Auto merge of #134681 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2024-12-23 12:18:14 +00:00
commit 485f5e80e6
113 changed files with 3098 additions and 2167 deletions

83
Cargo.lock generated
View file

@ -1008,18 +1008,6 @@ version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lsp-server"
version = "0.7.7"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
version = "0.7.7"
@ -1032,6 +1020,19 @@ dependencies = [
"serde_json",
]
[[package]]
name = "lsp-server"
version = "0.7.8"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "lsp-types"
version = "0.95.0"
@ -1289,7 +1290,6 @@ name = "paths"
version = "0.0.0"
dependencies = [
"camino",
"serde",
]
[[package]]
@ -1352,12 +1352,12 @@ dependencies = [
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"base-db",
"indexmap",
"intern",
"paths",
"rustc-hash 2.0.0",
"serde",
"serde_derive",
"serde_json",
"span",
"stdx",
@ -1369,7 +1369,6 @@ dependencies = [
name = "proc-macro-srv"
version = "0.0.0"
dependencies = [
"base-db",
"expect-test",
"intern",
"libloading",
@ -1448,6 +1447,7 @@ dependencies = [
"rustc-hash 2.0.0",
"semver",
"serde",
"serde_derive",
"serde_json",
"span",
"stdx",
@ -1507,9 +1507,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af462c3a2d524b84a51b6848b439787f01b35c6c1086d3e3086a5f5eea92ed9a"
checksum = "28b782af0a7a8df16ddf43cd70da9f17bc3b1ce712c9e4992b6edb16f5f53632"
dependencies = [
"bitflags 2.6.0",
"ra-ap-rustc_index",
@ -1518,9 +1518,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be6bb8cb0ab78d94a222f1ffd3e87254cdfb57413382b8d6ebe26a85482f99d1"
checksum = "ce5742f134960482f543b35ecebec3cacc6d79a9a685713518b4d8d70c5f9aa8"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@ -1528,9 +1528,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c24b1641455b46e87435b7321219672077066e678963d239a4a2904732979b16"
checksum = "d7ea011fcf68309a8835ad01d91c032cb18444617b00e2cab21d45b208164441"
dependencies = [
"proc-macro2",
"quote",
@ -1539,9 +1539,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94daa86974417981fed2f12bd8fb00158dfa6fee561152bed689278c846d0272"
checksum = "eb76f0a4d4c20859e41f0a23bff0f37ab9ca9171c214a6c7dd72ea69434865dc"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1549,9 +1549,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc07f6bd581746f358e39c4b6bfe8d455b3d6ad1a857821016d0d42eeb5e1e3e"
checksum = "06080bd35078305421a62da77f3c128482d8d44441b6da8ce9d146d1cd9cdb5b"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1559,9 +1559,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.85.0"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f49b86e1276c1c3c72898410def29b699415f4e7d1dfb3531daf79794694372"
checksum = "68a3154fe4c20c177d7b3c678a2d3a97aba0cca156ddef88959915041889daf0"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.0.0",
@ -1676,7 +1676,7 @@ dependencies = [
"intern",
"itertools",
"load-cargo",
"lsp-server 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-server 0.7.7",
"lsp-types",
"memchr",
"mimalloc",
@ -1695,6 +1695,7 @@ dependencies = [
"scip",
"semver",
"serde",
"serde_derive",
"serde_json",
"stdx",
"syntax",
@ -1822,18 +1823,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.206"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.206"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
dependencies = [
"proc-macro2",
"quote",
@ -1924,12 +1925,6 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a"
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "stdx"
version = "0.0.0"
@ -1946,9 +1941,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.74"
version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
@ -2264,13 +2259,9 @@ dependencies = [
[[package]]
name = "triomphe"
version = "0.1.13"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369"
dependencies = [
"serde",
"stable_deref_trait",
]
checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85"
[[package]]
name = "tt"

View file

@ -85,11 +85,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.85", default-features = false }
ra-ap-rustc_parse_format = { version = "0.85", default-features = false }
ra-ap-rustc_index = { version = "0.85", default-features = false }
ra-ap-rustc_abi = { version = "0.85", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.85", default-features = false }
ra-ap-rustc_lexer = { version = "0.87", default-features = false }
ra-ap-rustc_parse_format = { version = "0.87", default-features = false }
ra-ap-rustc_index = { version = "0.87", default-features = false }
ra-ap-rustc_abi = { version = "0.87", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.87", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
test-fixture = { path = "./crates/test-fixture" }
@ -138,7 +138,8 @@ pulldown-cmark = { version = "0.9.0", default-features = false }
rayon = "1.8.0"
rustc-hash = "2.0.0"
semver = "1.0.14"
serde = { version = "1.0.192", features = ["derive"] }
serde = { version = "1.0.192" }
serde_derive = { version = "1.0.192" }
serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [
"const_new",
@ -157,7 +158,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"time",
"tracing-log",
] }
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.3.1"
xshell = "0.2.5"

View file

@ -18,6 +18,7 @@ use smallvec::SmallVec;
use span::{Edition, MacroFileId};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
use tt::TextRange;
use crate::{
db::DefDatabase,
@ -143,15 +144,7 @@ pub struct BodySourceMap {
pub types: TypesSourceMap,
// FIXME: Make this a sane struct.
template_map: Option<
Box<(
// format_args!
FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>,
// asm!
FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>,
)>,
>,
template_map: Option<Box<FormatTemplate>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>,
@ -160,6 +153,20 @@ pub struct BodySourceMap {
diagnostics: Vec<BodyDiagnostic>,
}
#[derive(Default, Debug, Eq, PartialEq)]
struct FormatTemplate {
/// A map from `format_args!()` expressions to their captures.
format_args_to_captures: FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>,
/// A map from `asm!()` expressions to their captures.
asm_to_captures: FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>,
/// A map from desugared expressions of implicit captures to their source.
///
/// The value stored for each capture is its template literal and offset inside it. The template literal
/// is from the `format_args[_nl]!()` macro and so needs to be mapped up once to go to the user-written
/// template.
implicit_capture_to_source: FxHashMap<ExprId, InFile<(AstPtr<ast::Expr>, TextRange)>>,
}
#[derive(Debug, Eq, PartialEq)]
pub enum BodyDiagnostic {
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
@ -798,18 +805,29 @@ impl BodySourceMap {
node: InFile<&ast::FormatArgsExpr>,
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let (hygiene, names) =
self.template_map.as_ref()?.0.get(&self.expr_map.get(&src)?.as_expr()?)?;
let (hygiene, names) = self
.template_map
.as_ref()?
.format_args_to_captures
.get(&self.expr_map.get(&src)?.as_expr()?)?;
Some((*hygiene, &**names))
}
pub fn format_args_implicit_capture(
&self,
capture_expr: ExprId,
) -> Option<InFile<(AstPtr<ast::Expr>, TextRange)>> {
self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
}
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let expr = self.expr_map.get(&src)?.as_expr()?;
Some(expr).zip(self.template_map.as_ref()?.1.get(&expr).map(std::ops::Deref::deref))
Some(expr)
.zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
}
/// Get a reference to the body source map's diagnostics.
@ -835,8 +853,14 @@ impl BodySourceMap {
types,
} = self;
if let Some(template_map) = template_map {
template_map.0.shrink_to_fit();
template_map.1.shrink_to_fit();
let FormatTemplate {
format_args_to_captures,
asm_to_captures,
implicit_capture_to_source,
} = &mut **template_map;
format_args_to_captures.shrink_to_fit();
asm_to_captures.shrink_to_fit();
implicit_capture_to_source.shrink_to_fit();
}
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();

View file

@ -1957,8 +1957,10 @@ impl ExprCollector<'_> {
_ => None,
});
let mut mappings = vec![];
let (fmt, hygiene) = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => {
let (fmt, hygiene) = match template.and_then(|template| {
self.expand_macros_to_string(template.clone()).map(|it| (it, template))
}) {
Some(((s, is_direct_literal), template)) => {
let call_ctx = self.expander.syntax_context();
let hygiene = self.hygiene_id_for(s.syntax().text_range().start());
let fmt = format_args::parse(
@ -1966,8 +1968,18 @@ impl ExprCollector<'_> {
fmt_snippet,
args,
is_direct_literal,
|name| {
|name, range| {
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
if let Some(range) = range {
self.source_map
.template_map
.get_or_insert_with(Default::default)
.implicit_capture_to_source
.insert(
expr_id,
self.expander.in_file((AstPtr::new(&template), range)),
);
}
if !hygiene.is_root() {
self.body.expr_hygiene.insert(expr_id, hygiene);
}
@ -2139,7 +2151,7 @@ impl ExprCollector<'_> {
self.source_map
.template_map
.get_or_insert_with(Default::default)
.0
.format_args_to_captures
.insert(idx, (hygiene, mappings));
idx
}

View file

@ -6,7 +6,7 @@ use syntax::{
ast::{self, HasName, IsString},
AstNode, AstPtr, AstToken, T,
};
use tt::{TextRange, TextSize};
use tt::TextRange;
use crate::{
body::lower::{ExprCollector, FxIndexSet},
@ -224,7 +224,7 @@ impl ExprCollector<'_> {
TextRange::new(
inner_span.start.try_into().unwrap(),
inner_span.end.try_into().unwrap(),
) - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
)
})
};
for piece in unverified_pieces {
@ -268,7 +268,11 @@ impl ExprCollector<'_> {
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
syntax_ptr,
);
self.source_map.template_map.get_or_insert_with(Default::default).1.insert(idx, mappings);
self.source_map
.template_map
.get_or_insert_with(Default::default)
.asm_to_captures
.insert(idx, mappings);
idx
}
}

View file

@ -685,6 +685,7 @@ impl Printer<'_> {
self.print_binding(*id);
if let Some(pat) = subpat {
self.whitespace();
w!(self, "@ ");
self.print_pat(*pat);
}
}

View file

@ -426,3 +426,21 @@ fn f() {
"should have a binding for `B`",
);
}
#[test]
fn regression_pretty_print_bind_pat() {
let (db, body, owner) = lower(
r#"
fn foo() {
let v @ u = 123;
}
"#,
);
let printed = body.pretty_print(&db, owner, Edition::CURRENT);
assert_eq!(
printed,
r#"fn foo() -> () {
let v @ u = 123;
}"#
);
}

View file

@ -1,5 +1,6 @@
//! Parses `format_args` input.
use either::Either;
use hir_expand::name::Name;
use intern::Symbol;
use rustc_parse_format as parse;
@ -7,7 +8,7 @@ use span::SyntaxContextId;
use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
TextRange, TextSize,
TextRange,
};
use crate::hir::ExprId;
@ -33,7 +34,7 @@ pub enum FormatArgsPiece {
Placeholder(FormatPlaceholder),
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatPlaceholder {
/// Index into [`FormatArgs::arguments`].
pub argument: FormatArgPosition,
@ -45,11 +46,11 @@ pub struct FormatPlaceholder {
pub format_options: FormatOptions,
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArgPosition {
/// Which argument this position refers to (Ok),
/// or would've referred to if it existed (Err).
pub index: Result<usize, usize>,
pub index: Result<usize, Either<usize, Name>>,
/// What kind of position this is. See [`FormatArgPositionKind`].
pub kind: FormatArgPositionKind,
/// The span of the name or number.
@ -88,7 +89,7 @@ pub enum FormatTrait {
UpperHex,
}
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]
#[derive(Clone, Default, Debug, PartialEq, Eq)]
pub struct FormatOptions {
/// The width. E.g. `{:5}` or `{:width$}`.
pub width: Option<FormatCount>,
@ -133,7 +134,7 @@ pub enum FormatAlignment {
Center,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FormatCount {
/// `{:5}` or `{:.5}`
Literal(usize),
@ -173,7 +174,7 @@ pub(crate) fn parse(
fmt_snippet: Option<String>,
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
mut synth: impl FnMut(Name, Option<TextRange>) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
call_ctx: SyntaxContextId,
) -> FormatArgs {
@ -192,7 +193,6 @@ pub(crate) fn parse(
}
None => None,
};
let mut parser =
parse::Parser::new(&text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@ -217,7 +217,6 @@ pub(crate) fn parse(
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@ -245,8 +244,8 @@ pub(crate) fn parse(
Ok(index)
} else {
// Doesn't exist as an explicit argument.
invalid_refs.push((index, span, used_as, kind));
Err(index)
invalid_refs.push((Either::Left(index), span, used_as, kind));
Err(Either::Left(index))
}
}
ArgRef::Name(name, span) => {
@ -265,16 +264,19 @@ pub(crate) fn parse(
// For the moment capturing variables from format strings expanded from macros is
// disabled (see RFC #2795)
// FIXME: Diagnose
}
invalid_refs.push((Either::Right(name.clone()), span, used_as, kind));
Err(Either::Right(name))
} else {
record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
// expression proper and/or desugar these.
expr: synth(name),
expr: synth(name, span),
}))
}
}
}
};
FormatArgPosition { index, kind, span }
};

View file

@ -16,7 +16,7 @@ use syntax::ast;
use crate::{
db::DefDatabase,
per_ns::PerNs,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::{Visibility, VisibilityExplicitness},
AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId,
Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
@ -80,9 +80,9 @@ pub struct ItemScope {
/// Defs visible in this scope. This includes `declarations`, but also
/// imports. The imports belong to this module and can be resolved by using them on
/// the `use_imports_*` fields.
types: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
values: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>,
macros: FxIndexMap<Name, (MacroId, Visibility, Option<ImportId>)>,
types: FxIndexMap<Name, TypesItem>,
values: FxIndexMap<Name, ValuesItem>,
macros: FxIndexMap<Name, MacrosItem>,
unresolved: FxHashSet<Name>,
/// The defs declared in this scope. Each def has a single scope where it is
@ -92,7 +92,7 @@ pub struct ItemScope {
impls: Vec<ImplId>,
unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`.
unnamed_trait_imports: FxHashMap<TraitId, (Visibility, Option<ImportId>)>,
unnamed_trait_imports: FxHashMap<TraitId, Item<()>>,
// the resolutions of the imports of this scope
use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>,
@ -187,7 +187,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(ModuleDefId::MacroId(def)) => {
res.macros = Some((def, Visibility::Public, None));
res.macros = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@ -203,7 +203,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(def) => {
res.types = Some((def, Visibility::Public, None));
res.types = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@ -219,7 +219,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(def) => {
res.values = Some((def, Visibility::Public, None));
res.values = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@ -253,8 +253,8 @@ impl ItemScope {
}
pub(crate) fn modules_in_scope(&self) -> impl Iterator<Item = (ModuleId, Visibility)> + '_ {
self.types.values().copied().filter_map(|(def, vis, _)| match def {
ModuleDefId::ModuleId(module) => Some((module, vis)),
self.types.values().filter_map(|ns| match ns.def {
ModuleDefId::ModuleId(module) => Some((module, ns.vis)),
_ => None,
})
}
@ -283,20 +283,20 @@ impl ItemScope {
}
pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
self.types.get(name).copied().map(|(a, b, _)| (a, b))
self.types.get(name).map(|item| (item.def, item.vis))
}
/// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> {
match item {
ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, other_def)| {
(other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
ItemInNs::Types(def) => self.types.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
ItemInNs::Types(def) => self.types.iter().find_map(|(name, other_def)| {
(other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
ItemInNs::Values(def) => self.values.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
ItemInNs::Values(def) => self.values.iter().find_map(|(name, other_def)| {
(other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
}
}
@ -311,22 +311,34 @@ impl ItemScope {
ItemInNs::Macros(def) => self
.macros
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
.filter_map(|(name, other_def)| {
(other_def.def == def).then_some((
name,
other_def.vis,
other_def.import.is_none(),
))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Types(def) => self
.types
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
.filter_map(|(name, other_def)| {
(other_def.def == def).then_some((
name,
other_def.vis,
other_def.import.is_none(),
))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Values(def) => self
.values
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
.filter_map(|(name, other_def)| {
(other_def.def == def).then_some((
name,
other_def.vis,
other_def.import.is_none(),
))
})
.find_map(|(a, b, c)| cb(a, b, c)),
}
@ -335,7 +347,7 @@ impl ItemScope {
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types
.values()
.filter_map(|&(def, _, _)| match def {
.filter_map(|def| match def.def {
ModuleDefId::TraitId(t) => Some(t),
_ => None,
})
@ -344,13 +356,13 @@ impl ItemScope {
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
self.unnamed_trait_imports.iter().map(|(tr, (vis, i))| {
self.unnamed_trait_imports.iter().map(|(tr, trait_)| {
(
None,
PerNs::types(
ModuleDefId::TraitId(*tr),
*vis,
i.map(ImportOrExternCrate::Import),
trait_.vis,
trait_.import.map(ImportOrExternCrate::Import),
),
)
}),
@ -464,12 +476,12 @@ impl ItemScope {
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)
self.unnamed_trait_imports.get(&tr).map(|trait_| trait_.vis)
}
pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
// FIXME: import
self.unnamed_trait_imports.insert(tr, (vis, None));
self.unnamed_trait_imports.insert(tr, Item { def: (), vis, import: None });
}
pub(crate) fn push_res_with_import(
@ -502,7 +514,7 @@ impl ItemScope {
}
None | Some(ImportType::Glob(_)) => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
@ -513,7 +525,7 @@ impl ItemScope {
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
None => ImportOrDef::Def(fld.0),
None => ImportOrDef::Def(fld.def),
},
);
}
@ -540,7 +552,7 @@ impl ItemScope {
}
None | Some(ImportType::Glob(_)) => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
@ -551,7 +563,7 @@ impl ItemScope {
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
None => ImportOrDef::Def(fld.0),
None => ImportOrDef::Def(fld.def),
},
);
}
@ -579,13 +591,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0),
None => ImportOrDef::Def(fld.def),
},
);
}
@ -599,13 +611,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0),
None => ImportOrDef::Def(fld.def),
},
);
}
@ -631,13 +643,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0.into()),
None => ImportOrDef::Def(fld.def.into()),
},
);
}
@ -651,13 +663,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0.into()),
None => ImportOrDef::Def(fld.def.into()),
},
);
}
@ -680,19 +692,19 @@ impl ItemScope {
pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
self.types
.values_mut()
.map(|(_, vis, _)| vis)
.chain(self.values.values_mut().map(|(_, vis, _)| vis))
.chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
.map(|def| &mut def.vis)
.chain(self.values.values_mut().map(|def| &mut def.vis))
.chain(self.unnamed_trait_imports.values_mut().map(|def| &mut def.vis))
.for_each(|vis| {
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
});
for (mac, vis, import) in self.macros.values_mut() {
if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
for mac in self.macros.values_mut() {
if matches!(mac.def, MacroId::ProcMacroId(_) if mac.import.is_none()) {
continue;
}
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
mac.vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
}
}
@ -707,23 +719,23 @@ impl ItemScope {
name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string())
);
if let Some((.., i)) = def.types {
if let Some(Item { import, .. }) = def.types {
buf.push_str(" t");
match i {
match import {
Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
None => (),
}
}
if let Some((.., i)) = def.values {
if let Some(Item { import, .. }) = def.values {
buf.push_str(" v");
if i.is_some() {
if import.is_some() {
buf.push('i');
}
}
if let Some((.., i)) = def.macros {
if let Some(Item { import, .. }) = def.macros {
buf.push_str(" m");
if i.is_some() {
if import.is_some() {
buf.push('i');
}
}
@ -781,19 +793,19 @@ impl ItemScope {
pub(crate) fn update_visibility_types(&mut self, name: &Name, vis: Visibility) {
let res =
self.types.get_mut(name).expect("tried to update visibility of non-existent type");
res.1 = vis;
res.vis = vis;
}
pub(crate) fn update_visibility_values(&mut self, name: &Name, vis: Visibility) {
let res =
self.values.get_mut(name).expect("tried to update visibility of non-existent value");
res.1 = vis;
res.vis = vis;
}
pub(crate) fn update_visibility_macros(&mut self, name: &Name, vis: Visibility) {
let res =
self.macros.get_mut(name).expect("tried to update visibility of non-existent macro");
res.1 = vis;
res.vis = vis;
}
}

View file

@ -44,7 +44,7 @@ use crate::{
ResolveMode,
},
path::{ImportAlias, ModPath, PathKind},
per_ns::PerNs,
per_ns::{Item, PerNs},
tt,
visibility::{RawVisibility, Visibility},
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantLoc,
@ -523,7 +523,7 @@ impl DefCollector<'_> {
self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None);
match per_ns.types {
Some((ModuleDefId::ModuleId(m), _, import)) => {
Some(Item { def: ModuleDefId::ModuleId(m), import, .. }) => {
// FIXME: This should specifically look for a glob import somehow and record that here
self.def_map.prelude = Some((
m,
@ -1069,9 +1069,9 @@ impl DefCollector<'_> {
//
// This has been historically allowed, but may be not allowed in future
// https://github.com/rust-lang/rust/issues/127909
if let Some((_, v, it)) = defs.types.as_mut() {
if let Some(def) = defs.types.as_mut() {
let is_extern_crate_reimport_without_prefix = || {
let Some(ImportOrExternCrate::ExternCrate(_)) = it else {
let Some(ImportOrExternCrate::ExternCrate(_)) = def.import else {
return false;
};
let Some(ImportType::Import(id)) = def_import_type else {
@ -1086,16 +1086,16 @@ impl DefCollector<'_> {
path.segments().len() < 2
};
if is_extern_crate_reimport_without_prefix() {
*v = vis;
def.vis = vis;
} else {
*v = v.min(vis, &self.def_map).unwrap_or(vis);
def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
}
if let Some((_, v, _)) = defs.values.as_mut() {
*v = v.min(vis, &self.def_map).unwrap_or(vis);
if let Some(def) = defs.values.as_mut() {
def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
if let Some((_, v, _)) = defs.macros.as_mut() {
*v = v.min(vis, &self.def_map).unwrap_or(vis);
if let Some(def) = defs.macros.as_mut() {
def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
let mut changed = false;
@ -1106,12 +1106,12 @@ impl DefCollector<'_> {
// Multiple globs may import the same item and they may override visibility from
// previously resolved globs. Handle overrides here and leave the rest to
// `ItemScope::push_res_with_import()`.
if let Some((def, def_vis, _)) = defs.types {
if let Some((prev_def, prev_vis, _)) = prev_defs.types {
if def == prev_def
if let Some(def) = defs.types {
if let Some(prev_def) = prev_defs.types {
if def.def == prev_def.def
&& self.from_glob_import.contains_type(module_id, name.clone())
&& def_vis != prev_vis
&& def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
&& def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// This import is being handled here, don't pass it down to
@ -1119,41 +1119,41 @@ impl DefCollector<'_> {
defs.types = None;
self.def_map.modules[module_id]
.scope
.update_visibility_types(name, def_vis);
.update_visibility_types(name, def.vis);
}
}
}
if let Some((def, def_vis, _)) = defs.values {
if let Some((prev_def, prev_vis, _)) = prev_defs.values {
if def == prev_def
if let Some(def) = defs.values {
if let Some(prev_def) = prev_defs.values {
if def.def == prev_def.def
&& self.from_glob_import.contains_value(module_id, name.clone())
&& def_vis != prev_vis
&& def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
&& def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.values = None;
self.def_map.modules[module_id]
.scope
.update_visibility_values(name, def_vis);
.update_visibility_values(name, def.vis);
}
}
}
if let Some((def, def_vis, _)) = defs.macros {
if let Some((prev_def, prev_vis, _)) = prev_defs.macros {
if def == prev_def
if let Some(def) = defs.macros {
if let Some(prev_def) = prev_defs.macros {
if def.def == prev_def.def
&& self.from_glob_import.contains_macro(module_id, name.clone())
&& def_vis != prev_vis
&& def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
&& def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.macros = None;
self.def_map.modules[module_id]
.scope
.update_visibility_macros(name, def_vis);
.update_visibility_macros(name, def.vis);
}
}
}

View file

@ -67,8 +67,8 @@ impl PerNs {
db: &dyn DefDatabase,
expected: Option<MacroSubNs>,
) -> Self {
self.macros = self.macros.filter(|&(id, _, _)| {
let this = MacroSubNs::from_id(db, id);
self.macros = self.macros.filter(|def| {
let this = MacroSubNs::from_id(db, def.def);
sub_namespace_match(Some(this), expected)
});
@ -411,7 +411,7 @@ impl DefMap {
original_module: LocalModuleId,
) -> ResolvePathResult {
for (i, segment) in segments {
let (curr, vis, imp) = match curr_per_ns.take_types_full() {
let curr = match curr_per_ns.take_types_full() {
Some(r) => r,
None => {
// we still have path segments left, but the path so far
@ -424,7 +424,7 @@ impl DefMap {
};
// resolve segment in curr
curr_per_ns = match curr {
curr_per_ns = match curr.def {
ModuleDefId::ModuleId(module) => {
if module.krate != self.krate {
let path = ModPath::from_segments(
@ -492,7 +492,7 @@ impl DefMap {
Some(res) => res,
None => {
return ResolvePathResult::new(
PerNs::types(e.into(), vis, imp),
PerNs::types(e.into(), curr.vis, curr.import),
ReachedFixedPoint::Yes,
Some(i),
false,
@ -510,7 +510,7 @@ impl DefMap {
);
return ResolvePathResult::new(
PerNs::types(s, vis, imp),
PerNs::types(s, curr.vis, curr.import),
ReachedFixedPoint::Yes,
Some(i),
false,

View file

@ -331,7 +331,7 @@ pub type Ty = ();
}
for (_, res) in module_data.scope.resolutions() {
match res.values.map(|(a, _, _)| a).or(res.types.map(|(a, _, _)| a)).unwrap() {
match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() {
ModuleDefId::FunctionId(f) => _ = db.function_data(f),
ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(it) => _ = db.struct_data(it),

View file

@ -28,11 +28,22 @@ bitflags! {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Item<Def, Import = ImportId> {
pub def: Def,
pub vis: Visibility,
pub import: Option<Import>,
}
pub type TypesItem = Item<ModuleDefId, ImportOrExternCrate>;
pub type ValuesItem = Item<ModuleDefId>;
pub type MacrosItem = Item<MacroId>;
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
pub types: Option<TypesItem>,
pub values: Option<ValuesItem>,
pub macros: Option<MacrosItem>,
}
impl PerNs {
@ -48,29 +59,33 @@ impl PerNs {
PerNs { types: None, values: None, macros: None }
}
pub fn values(t: ModuleDefId, v: Visibility, i: Option<ImportId>) -> PerNs {
PerNs { types: None, values: Some((t, v, i)), macros: None }
pub fn values(def: ModuleDefId, vis: Visibility, import: Option<ImportId>) -> PerNs {
PerNs { types: None, values: Some(Item { def, vis, import }), macros: None }
}
pub fn types(t: ModuleDefId, v: Visibility, i: Option<ImportOrExternCrate>) -> PerNs {
PerNs { types: Some((t, v, i)), values: None, macros: None }
pub fn types(def: ModuleDefId, vis: Visibility, import: Option<ImportOrExternCrate>) -> PerNs {
PerNs { types: Some(Item { def, vis, import }), values: None, macros: None }
}
pub fn both(
types: ModuleDefId,
values: ModuleDefId,
v: Visibility,
i: Option<ImportOrExternCrate>,
vis: Visibility,
import: Option<ImportOrExternCrate>,
) -> PerNs {
PerNs {
types: Some((types, v, i)),
values: Some((values, v, i.and_then(ImportOrExternCrate::into_import))),
types: Some(Item { def: types, vis, import }),
values: Some(Item {
def: values,
vis,
import: import.and_then(ImportOrExternCrate::into_import),
}),
macros: None,
}
}
pub fn macros(macro_: MacroId, v: Visibility, i: Option<ImportId>) -> PerNs {
PerNs { types: None, values: None, macros: Some((macro_, v, i)) }
pub fn macros(def: MacroId, vis: Visibility, import: Option<ImportId>) -> PerNs {
PerNs { types: None, values: None, macros: Some(Item { def, vis, import }) }
}
pub fn is_none(&self) -> bool {
@ -82,43 +97,43 @@ impl PerNs {
}
pub fn take_types(self) -> Option<ModuleDefId> {
self.types.map(|it| it.0)
self.types.map(|it| it.def)
}
pub fn take_types_full(self) -> Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)> {
pub fn take_types_full(self) -> Option<TypesItem> {
self.types
}
pub fn take_values(self) -> Option<ModuleDefId> {
self.values.map(|it| it.0)
self.values.map(|it| it.def)
}
pub fn take_values_import(self) -> Option<(ModuleDefId, Option<ImportId>)> {
self.values.map(|it| (it.0, it.2))
self.values.map(|it| (it.def, it.import))
}
pub fn take_macros(self) -> Option<MacroId> {
self.macros.map(|it| it.0)
self.macros.map(|it| it.def)
}
pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportId>)> {
self.macros.map(|it| (it.0, it.2))
self.macros.map(|it| (it.def, it.import))
}
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
let _p = tracing::info_span!("PerNs::filter_visibility").entered();
PerNs {
types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|&(_, v, _)| f(v)),
macros: self.macros.filter(|&(_, v, _)| f(v)),
types: self.types.filter(|def| f(def.vis)),
values: self.values.filter(|def| f(def.vis)),
macros: self.macros.filter(|def| f(def.vis)),
}
}
pub fn with_visibility(self, vis: Visibility) -> PerNs {
PerNs {
types: self.types.map(|(it, _, c)| (it, vis, c)),
values: self.values.map(|(it, _, c)| (it, vis, c)),
macros: self.macros.map(|(it, _, import)| (it, vis, import)),
types: self.types.map(|def| Item { vis, ..def }),
values: self.values.map(|def| Item { vis, ..def }),
macros: self.macros.map(|def| Item { vis, ..def }),
}
}
@ -141,15 +156,17 @@ impl PerNs {
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
let _p = tracing::info_span!("PerNs::iter_items").entered();
self.types
.map(|it| (ItemInNs::Types(it.0), it.2))
.map(|it| (ItemInNs::Types(it.def), it.import))
.into_iter()
.chain(
self.values
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
self.values.map(|it| {
(ItemInNs::Values(it.def), it.import.map(ImportOrExternCrate::Import))
}),
)
.chain(
self.macros
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
self.macros.map(|it| {
(ItemInNs::Macros(it.def), it.import.map(ImportOrExternCrate::Import))
}),
)
}
}

View file

@ -933,8 +933,8 @@ impl ModuleItemMap {
Some(ResolveValueResult::ValueNs(value, import))
}
Some(idx) => {
let (def, _, import) = module_def.take_types_full()?;
let ty = match def {
let def = module_def.take_types_full()?;
let ty = match def.def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
@ -948,7 +948,7 @@ impl ModuleItemMap {
| ModuleDefId::MacroId(_)
| ModuleDefId::StaticId(_) => return None,
};
Some(ResolveValueResult::Partial(ty, idx, import))
Some(ResolveValueResult::Partial(ty, idx, def.import))
}
}
}
@ -986,8 +986,8 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportId>)> {
}
fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
let (def, _, import) = per_ns.take_types_full()?;
let res = match def {
let def = per_ns.take_types_full()?;
let res = match def.def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
@ -1003,7 +1003,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
| ModuleDefId::StaticId(_)
| ModuleDefId::ModuleId(_) => return None,
};
Some((res, import))
Some((res, def.import))
}
#[derive(Default)]
@ -1019,14 +1019,14 @@ impl ScopeNames {
}
}
fn add_per_ns(&mut self, name: &Name, def: PerNs) {
if let &Some((ty, _, _)) = &def.types {
self.add(name, ScopeDef::ModuleDef(ty))
if let Some(ty) = &def.types {
self.add(name, ScopeDef::ModuleDef(ty.def))
}
if let &Some((def, _, _)) = &def.values {
self.add(name, ScopeDef::ModuleDef(def))
if let Some(def) = &def.values {
self.add(name, ScopeDef::ModuleDef(def.def))
}
if let &Some((mac, _, _)) = &def.macros {
self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)))
if let Some(mac) = &def.macros {
self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac.def)))
}
if def.is_none() {
self.add(name, ScopeDef::Unknown)

View file

@ -153,13 +153,13 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId {
/// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned.
/// token(s) returned with their priority.
pub fn expand_speculative(
db: &dyn ExpandDatabase,
actual_macro_call: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
@ -303,17 +303,19 @@ pub fn expand_speculative(
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let (token, _) = rev_tmap
let token = rev_tmap
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
.min_by_key(|(t, ctx)| {
.map(|(t, ctx)| {
// prefer tokens of the same kind and text, as well as non opaque marked ones
// Note the inversion of the score here, as we want to prefer the first token in case
// of all tokens having the same score
ctx.is_opaque(db) as u8
let ranking = ctx.is_opaque(db) as u8
+ 2 * (t.kind() != token_to_map.kind()) as u8
+ 4 * ((t.text() != token_to_map.text()) as u8)
})?;
+ 4 * ((t.text() != token_to_map.text()) as u8);
(t, ranking)
})
.collect();
Some((node.syntax_node(), token))
}

View file

@ -22,7 +22,6 @@ use hir_def::{
use crate::{
db::{HirDatabase, InternedCoroutine},
display::HirDisplay,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
generics::generics,
make_binders, make_single_type_binders,
@ -823,13 +822,12 @@ pub(crate) fn impl_datum_query(
let _p = tracing::info_span!("impl_datum_query").entered();
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_)
impl_def_datum(db, krate, impl_)
}
fn impl_def_datum(
db: &dyn HirDatabase,
krate: CrateId,
chalk_id: ImplId,
impl_id: hir_def::ImplId,
) -> Arc<ImplDatum> {
let trait_ref = db
@ -850,13 +848,6 @@ fn impl_def_datum(
};
let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
let negative = impl_data.is_negative;
debug!(
"impl {:?}: {}{} where {:?}",
chalk_id,
if negative { "!" } else { "" },
trait_ref.display(db, db.crate_graph()[krate].edition),
where_clauses
);
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };

View file

@ -193,11 +193,20 @@ impl<'a> UnsafeVisitor<'a> {
self.resolver.reset_to_guard(guard);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
if let Expr::Path(_) = self.body.exprs[*expr] {
match self.body.exprs[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
Expr::Path(_) => return,
// https://github.com/rust-lang/rust/pull/129248
// Taking a raw ref to a deref place expr is always safe.
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
self.body
.walk_child_exprs_without_pats(expr, |child| self.walk_expr(child));
return;
}
_ => (),
}
}
Expr::MethodCall { .. } => {
if self

View file

@ -262,7 +262,7 @@ pub struct UnresolvedAssocItem {
#[derive(Debug)]
pub struct UnresolvedIdent {
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>,
}
#[derive(Debug)]
@ -550,11 +550,10 @@ impl AnyDiagnostic {
source_map: &hir_def::body::BodySourceMap,
) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| {
source_map.expr_syntax(expr).inspect_err(|_| tracing::error!("synthetic syntax")).ok()
};
let pat_syntax = |pat| {
source_map.pat_syntax(pat).inspect_err(|_| tracing::error!("synthetic syntax")).ok()
source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok()
};
let pat_syntax =
|pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
let expr_or_pat_syntax = |id| match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)),
ExprOrPatId::PatId(pat) => pat_syntax(pat),
@ -626,8 +625,16 @@ impl AnyDiagnostic {
UnresolvedAssocItem { expr_or_pat }.into()
}
&InferenceDiagnostic::UnresolvedIdent { id } => {
let expr_or_pat = expr_or_pat_syntax(id)?;
UnresolvedIdent { expr_or_pat }.into()
let node = match id {
ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) {
Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)),
Err(SyntheticSyntax) => source_map
.format_args_implicit_capture(id)?
.map(|(node, range)| (node.wrap_left(), Some(range))),
},
ExprOrPatId::PatId(id) => pat_syntax(id)?.map(|it| (it, None)),
};
UnresolvedIdent { node }.into()
}
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
let expr = expr_syntax(expr)?;

View file

@ -3105,10 +3105,10 @@ impl From<ModuleDef> for ItemInNs {
}
impl ItemInNs {
pub fn as_module_def(self) -> Option<ModuleDef> {
pub fn into_module_def(self) -> ModuleDef {
match self {
ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
ItemInNs::Macros(_) => None,
ItemInNs::Types(id) | ItemInNs::Values(id) => id,
ItemInNs::Macros(id) => ModuleDef::Macro(id),
}
}

View file

@ -38,9 +38,9 @@ use span::{AstIdMap, EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, IsString as _},
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
TextSize,
ast::{self, HasAttrs as _, HasGenericParams},
AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
};
use triomphe::Arc;
@ -571,7 +571,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let SourceAnalyzer { file_id, resolver, .. } =
self.analyze_no_infer(actual_macro_call.syntax())?;
let macro_call = InFile::new(file_id, actual_macro_call);
@ -592,7 +592,7 @@ impl<'db> SemanticsImpl<'db> {
macro_file: MacroFileId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_file.macro_call_id,
@ -608,7 +608,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let macro_call = self.wrap_node_infile(actual_macro_call.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
hir_expand::db::expand_speculative(
@ -624,7 +624,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let attr = self.wrap_node_infile(actual_macro_call.clone());
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
let macro_call_id = self.with_ctx(|ctx| {
@ -643,8 +643,7 @@ impl<'db> SemanticsImpl<'db> {
&self,
string: &ast::String,
) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
let quote = string.open_quote_text_range()?;
let string_start = string.syntax().text_range().start();
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
self.descend_into_macros_breakable(token, |token, _| {
(|| {
@ -658,7 +657,7 @@ impl<'db> SemanticsImpl<'db> {
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res.map(Either::Left)))
.map(|(range, res)| (range + string_start, res.map(Either::Left)))
.collect();
Some(res)
} else {
@ -672,7 +671,7 @@ impl<'db> SemanticsImpl<'db> {
.iter()
.map(|&(range, index)| {
(
range + quote.end(),
range + string_start,
Some(Either::Right(InlineAsmOperand { owner, expr, index })),
)
})
@ -690,17 +689,16 @@ impl<'db> SemanticsImpl<'db> {
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
let original_string = ast::String::cast(original_token.clone())?;
let string_start = original_token.text_range().start();
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
let quote = original_string.open_quote_text_range()?;
self.descend_into_macros_breakable(original_token, |token, _| {
(|| {
let token = token.value;
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset.checked_sub(quote.end())?,
offset.checked_sub(string_start)?,
)
.map(|(range, res)| (range + quote.end(), res))
.map(|(range, res)| (range + string_start, res))
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
@ -1542,6 +1540,21 @@ impl<'db> SemanticsImpl<'db> {
Some(items.iter_items().map(|(item, _)| item.into()))
}
pub fn resolve_mod_path_relative(
&self,
to: Module,
segments: impl IntoIterator<Item = SmolStr>,
) -> Option<impl Iterator<Item = ItemInNs>> {
let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
self.db.upcast(),
&ModPath::from_segments(
hir_def::path::PathKind::Plain,
segments.into_iter().map(|it| Name::new(&it, SyntaxContextId::ROOT)),
),
);
Some(items.iter_items().map(|(item, _)| item.into()))
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}

View file

@ -3,6 +3,7 @@
use hir::{FileRange, Semantics};
use ide_db::EditionedFileId;
use ide_db::{label::Label, FileId, RootDatabase};
use syntax::Edition;
use syntax::{
algo::{self, find_node_at_offset, find_node_at_range},
AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange,
@ -94,6 +95,10 @@ impl<'a> AssistContext<'a> {
self.frange.file_id
}
pub(crate) fn edition(&self) -> Edition {
self.frange.file_id.edition()
}
pub(crate) fn has_empty_selection(&self) -> bool {
self.trimmed_range.is_empty()
}

View file

@ -1,10 +1,12 @@
use ide_db::text_edit::TextRange;
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
search::{FileReference, SearchScope, UsageSearchResult},
search::{FileReference, SearchScope},
syntax_helpers::suggest_name,
text_edit::TextRange,
};
use itertools::Itertools;
use syntax::SmolStr;
use syntax::{
ast::{self, make, AstNode, FieldExpr, HasName, IdentPat},
ted,
@ -122,33 +124,43 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
return None;
}
let name = ident_pat.name()?.to_string();
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
let usages = ctx.sema.to_def(&ident_pat).and_then(|def| {
Definition::Local(def)
.usages(&ctx.sema)
.in_scope(&SearchScope::single_file(ctx.file_id()))
.all()
.iter()
.next()
.map(|(_, refs)| refs.to_vec())
});
let field_names = (0..field_types.len())
.map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
let mut name_generator = {
let mut names = vec![];
if let Some(scope) = ctx.sema.scope(ident_pat.syntax()) {
scope.process_all_names(&mut |name, scope| {
if let hir::ScopeDef::Local(_) = scope {
names.push(name.as_str().into())
}
})
}
suggest_name::NameGenerator::new_with_names(names.iter().map(|s: &SmolStr| s.as_str()))
};
let field_names = field_types
.into_iter()
.enumerate()
.map(|(id, ty)| {
match name_generator.for_type(&ty, ctx.db(), ctx.edition()) {
Some(name) => name,
None => name_generator.suggest_name(&format!("_{}", id)),
}
.to_string()
})
.collect::<Vec<_>>();
Some(TupleData { ident_pat, ref_type, field_names, usages })
}
fn generate_name(
_ctx: &AssistContext<'_>,
index: usize,
_tuple_name: &str,
_ident_pat: &IdentPat,
_usages: &Option<UsageSearchResult>,
) -> String {
// FIXME: detect if name already used
format!("_{index}")
}
enum RefType {
ReadOnly,
Mutable,
@ -157,7 +169,7 @@ struct TupleData {
ident_pat: IdentPat,
ref_type: Option<RefType>,
field_names: Vec<String>,
usages: Option<UsageSearchResult>,
usages: Option<Vec<FileReference>>,
}
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
@ -213,9 +225,6 @@ fn edit_tuple_usages(
ctx: &AssistContext<'_>,
in_sub_pattern: bool,
) -> Option<Vec<EditTupleUsage>> {
let mut current_file_usages = None;
if let Some(usages) = data.usages.as_ref() {
// We need to collect edits first before actually applying them
// as mapping nodes to their mutable node versions requires an
// unmodified syntax tree.
@ -224,31 +233,15 @@ fn edit_tuple_usages(
// tree mutation in the same file breaks when `builder.edit_file`
// is called
if let Some((_, refs)) = usages.iter().find(|(file_id, _)| *file_id == ctx.file_id()) {
current_file_usages = Some(
refs.iter()
.filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
.collect_vec(),
);
}
for (file_id, refs) in usages.iter() {
if file_id == ctx.file_id() {
continue;
}
edit.edit_file(file_id.file_id());
let tuple_edits = refs
let edits = data
.usages
.as_ref()?
.as_slice()
.iter()
.filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
.collect_vec();
tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit))
}
}
current_file_usages
Some(edits)
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
@ -1769,14 +1762,14 @@ struct S4 {
}
fn foo() -> Option<()> {
let ($0_0, _1, _2, _3, _4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
let ($0_0, _1, _2, _3, s4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
let v: i32 = *_0; // deref, no parens
let v: &i32 = _0; // no deref, no parens, remove `&`
f1(*_0); // deref, no parens
f2(_0); // `&*` -> cancel out -> no deref, no parens
// https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
// let v: i32 = t.1.0; // no deref, no parens
let v: i32 = _4.value; // no deref, no parens
let v: i32 = s4.value; // no deref, no parens
(*_0).do_stuff(); // deref, parens
let v: i32 = (*_2)?; // deref, parens
let v: i32 = _3[0]; // no deref, no parens
@ -1815,8 +1808,8 @@ impl S {
}
fn main() {
let ($0_0, _1) = &(S,2);
let s = _0.f();
let ($0s, _1) = &(S,2);
let s = s.f();
}
"#,
)
@ -1845,8 +1838,8 @@ impl S {
}
fn main() {
let ($0_0, _1) = &(S,2);
let s = (*_0).f();
let ($0s, _1) = &(S,2);
let s = (*s).f();
}
"#,
)
@ -1882,8 +1875,8 @@ impl T for &S {
}
fn main() {
let ($0_0, _1) = &(S,2);
let s = (*_0).f();
let ($0s, _1) = &(S,2);
let s = (*s).f();
}
"#,
)
@ -1923,8 +1916,8 @@ impl T for &S {
}
fn main() {
let ($0_0, _1) = &(S,2);
let s = (*_0).f();
let ($0s, _1) = &(S,2);
let s = (*s).f();
}
"#,
)
@ -1951,8 +1944,8 @@ impl S {
fn do_stuff(&self) -> i32 { 42 }
}
fn main() {
let ($0_0, _1) = &(S,&S);
let v = _0.do_stuff();
let ($0s, s1) = &(S,&S);
let v = s.do_stuff();
}
"#,
)
@ -1973,7 +1966,7 @@ fn main() {
// `t.0` gets auto-refed -> no deref needed -> no parens
let v = t.0.do_stuff(); // no deref, no parens
let v = &t.0.do_stuff(); // `&` is for result -> no deref, no parens
// deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
// deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
let v = t.1.do_stuff(); // deref, parens
}
"#,
@ -1984,13 +1977,13 @@ impl S {
fn do_stuff(&self) -> i32 { 42 }
}
fn main() {
let ($0_0, _1) = &(S,&S);
let v = _0.do_stuff(); // no deref, remove parens
let ($0s, s1) = &(S,&S);
let v = s.do_stuff(); // no deref, remove parens
// `t.0` gets auto-refed -> no deref needed -> no parens
let v = _0.do_stuff(); // no deref, no parens
let v = &_0.do_stuff(); // `&` is for result -> no deref, no parens
// deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
let v = (*_1).do_stuff(); // deref, parens
let v = s.do_stuff(); // no deref, no parens
let v = &s.do_stuff(); // `&` is for result -> no deref, no parens
// deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
let v = (*s1).do_stuff(); // deref, parens
}
"#,
)

View file

@ -1,5 +1,8 @@
use hir::{HirDisplay, TypeInfo};
use ide_db::{assists::GroupLabel, syntax_helpers::suggest_name};
use ide_db::{
assists::GroupLabel,
syntax_helpers::{suggest_name, LexedStr},
};
use syntax::{
ast::{
self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
@ -320,24 +323,58 @@ impl ExtractionKind {
ctx: &AssistContext<'_>,
to_extract: &ast::Expr,
) -> (String, SyntaxNode) {
// We only do this sort of extraction for fields because they should have lowercase names
if let ExtractionKind::Variable = self {
let field_shorthand = to_extract
.syntax()
.parent()
.and_then(ast::RecordExprField::cast)
.filter(|field| field.name_ref().is_some());
let (var_name, expr_replace) = match field_shorthand {
Some(field) => (field.to_string(), field.syntax().clone()),
None => {
(suggest_name::for_variable(to_extract, &ctx.sema), to_extract.syntax().clone())
if let Some(field) = field_shorthand {
return (field.to_string(), field.syntax().clone());
}
}
let var_name = if let Some(literal_name) = get_literal_name(ctx, to_extract) {
literal_name
} else {
suggest_name::for_variable(to_extract, &ctx.sema)
};
let var_name = match self {
ExtractionKind::Variable => var_name,
ExtractionKind::Variable => var_name.to_lowercase(),
ExtractionKind::Constant | ExtractionKind::Static => var_name.to_uppercase(),
};
(var_name, expr_replace)
(var_name, to_extract.syntax().clone())
}
}
fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String> {
let literal = match expr {
ast::Expr::Literal(literal) => literal,
_ => return None,
};
let inner = match literal.kind() {
ast::LiteralKind::String(string) => string.value().ok()?.into_owned(),
ast::LiteralKind::ByteString(byte_string) => {
String::from_utf8(byte_string.value().ok()?.into_owned()).ok()?
}
ast::LiteralKind::CString(cstring) => {
String::from_utf8(cstring.value().ok()?.into_owned()).ok()?
}
_ => return None,
};
// Entirely arbitrary
if inner.len() > 32 {
return None;
}
match LexedStr::single_token(ctx.file_id().edition(), &inner) {
Some((SyntaxKind::IDENT, None)) => Some(inner),
_ => None,
}
}
@ -493,7 +530,7 @@ fn main() {
"#,
r#"
fn main() {
let $0var_name = "hello";
let $0hello = "hello";
}
"#,
"Extract into variable",
@ -588,7 +625,7 @@ fn main() {
"#,
r#"
fn main() {
const $0VAR_NAME: &str = "hello";
const $0HELLO: &str = "hello";
}
"#,
"Extract into constant",
@ -683,7 +720,7 @@ fn main() {
"#,
r#"
fn main() {
static $0VAR_NAME: &str = "hello";
static $0HELLO: &str = "hello";
}
"#,
"Extract into static",
@ -2479,4 +2516,120 @@ fn foo() {
"Extract into variable",
);
}
#[test]
fn extract_string_literal() {
check_assist_by_label(
extract_variable,
r#"
struct Entry(&str);
fn foo() {
let entry = Entry($0"Hello"$0);
}
"#,
r#"
struct Entry(&str);
fn foo() {
let $0hello = "Hello";
let entry = Entry(hello);
}
"#,
"Extract into variable",
);
check_assist_by_label(
extract_variable,
r#"
struct Entry(&str);
fn foo() {
let entry = Entry($0"Hello"$0);
}
"#,
r#"
struct Entry(&str);
fn foo() {
const $0HELLO: &str = "Hello";
let entry = Entry(HELLO);
}
"#,
"Extract into constant",
);
check_assist_by_label(
extract_variable,
r#"
struct Entry(&str);
fn foo() {
let entry = Entry($0"Hello"$0);
}
"#,
r#"
struct Entry(&str);
fn foo() {
static $0HELLO: &str = "Hello";
let entry = Entry(HELLO);
}
"#,
"Extract into static",
);
}
#[test]
fn extract_variable_string_literal_use_field_shorthand() {
// When field shorthand is available, it should
// only be used when extracting into a variable
check_assist_by_label(
extract_variable,
r#"
struct Entry { message: &str }
fn foo() {
let entry = Entry { message: $0"Hello"$0 };
}
"#,
r#"
struct Entry { message: &str }
fn foo() {
let $0message = "Hello";
let entry = Entry { message };
}
"#,
"Extract into variable",
);
check_assist_by_label(
extract_variable,
r#"
struct Entry { message: &str }
fn foo() {
let entry = Entry { message: $0"Hello"$0 };
}
"#,
r#"
struct Entry { message: &str }
fn foo() {
const $0HELLO: &str = "Hello";
let entry = Entry { message: HELLO };
}
"#,
"Extract into constant",
);
check_assist_by_label(
extract_variable,
r#"
struct Entry { message: &str }
fn foo() {
let entry = Entry { message: $0"Hello"$0 };
}
"#,
r#"
struct Entry { message: &str }
fn foo() {
static $0HELLO: &str = "Hello";
let entry = Entry { message: HELLO };
}
"#,
"Extract into static",
);
}
}

View file

@ -86,7 +86,7 @@ fn item_for_path_search(db: &dyn HirDatabase, item: ItemInNs) -> Option<ItemInNs
}
fn item_as_assoc(db: &dyn HirDatabase, item: ItemInNs) -> Option<AssocItem> {
item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
item.into_module_def().as_assoc_item(db)
}
#[cfg(test)]

View file

@ -51,7 +51,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let candidate = import_assets.import_candidate();
let qualify_candidate = match syntax_under_caret.clone() {
NodeOrToken::Node(syntax_under_caret) => match candidate {
ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => {
ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => {
cov_mark::hit!(qualify_path_qualifier_start);
let path = ast::Path::cast(syntax_under_caret)?;
let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
@ -219,11 +219,9 @@ fn find_trait_method(
}
fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
let item_module_def = item.as_module_def()?;
match item_module_def {
match item.into_module_def() {
hir::ModuleDef::Trait(trait_) => Some(trait_),
_ => item_module_def.as_assoc_item(db)?.container_trait(db),
item_module_def => item_module_def.as_assoc_item(db)?.container_trait(db),
}
}
@ -247,7 +245,7 @@ fn label(
let import_path = &import.import_path;
match candidate {
ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => {
ImportCandidate::Path(candidate) if candidate.qualifier.is_empty() => {
format!("Qualify as `{}`", import_path.display(db, edition))
}
_ => format!("Qualify with `{}`", import_path.display(db, edition)),

View file

@ -78,7 +78,7 @@ pub(crate) fn replace_derive_with_manual_impl(
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
items_locator::AssocSearchMode::Exclude,
)
.filter_map(|item| match item.as_module_def()? {
.filter_map(|item| match item.into_module_def() {
ModuleDef::Trait(trait_) => Some(trait_),
_ => None,
})

View file

@ -12,13 +12,15 @@ use syntax::{
use crate::{AssistContext, Assists};
// FIXME: This ought to be a diagnostic lint.
// Assist: unnecessary_async
//
// Removes the `async` mark from functions which have no `.await` in their body.
// Looks for calls to the functions and removes the `.await` on the call site.
//
// ```
// pub async f$0n foo() {}
// pub asy$0nc fn foo() {}
// pub async fn bar() { foo().await }
// ```
// ->
@ -29,15 +31,11 @@ use crate::{AssistContext, Assists};
pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let function: ast::Fn = ctx.find_node_at_offset()?;
// Do nothing if the cursor is not on the prototype. This is so that the check does not pollute
// when the user asks us for assists when in the middle of the function body.
// We consider the prototype to be anything that is before the body of the function.
let cursor_position = ctx.offset();
if cursor_position >= function.body()?.syntax().text_range().start() {
// Do nothing if the cursor isn't on the async token.
let async_token = function.async_token()?;
if !async_token.text_range().contains_inclusive(ctx.offset()) {
return None;
}
// Do nothing if the function isn't async.
function.async_token()?;
// Do nothing if the function has an `await` expression in its body.
if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
return None;
@ -138,27 +136,22 @@ mod tests {
#[test]
fn applies_on_empty_function() {
check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
check_assist(unnecessary_async, "pub asy$0nc fn f() {}", "pub fn f() {}")
}
#[test]
fn applies_and_removes_whitespace() {
check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
}
#[test]
fn does_not_apply_on_non_async_function() {
check_assist_not_applicable(unnecessary_async, "pub f$0n f() {}")
check_assist(unnecessary_async, "pub async$0 fn f() {}", "pub fn f() {}")
}
#[test]
fn applies_on_function_with_a_non_await_expr() {
check_assist(unnecessary_async, "pub async f$0n f() { f2() }", "pub fn f() { f2() }")
check_assist(unnecessary_async, "pub asy$0nc fn f() { f2() }", "pub fn f() { f2() }")
}
#[test]
fn does_not_apply_on_function_with_an_await_expr() {
check_assist_not_applicable(unnecessary_async, "pub async f$0n f() { f2().await }")
check_assist_not_applicable(unnecessary_async, "pub asy$0nc fn f() { f2().await }")
}
#[test]
@ -167,7 +160,7 @@ mod tests {
unnecessary_async,
r#"
pub async fn f4() { }
pub async f$0n f2() { }
pub asy$0nc fn f2() { }
pub async fn f() { f2().await }
pub async fn f3() { f2().await }"#,
r#"
@ -184,7 +177,7 @@ pub async fn f3() { f2() }"#,
unnecessary_async,
r#"
pub async fn f4() { }
mod a { pub async f$0n f2() { } }
mod a { pub asy$0nc fn f2() { } }
pub async fn f() { a::f2().await }
pub async fn f3() { a::f2().await }"#,
r#"
@ -202,7 +195,7 @@ pub async fn f3() { a::f2() }"#,
// Ensure that it is the first await on the 3rd line that is removed
r#"
pub async fn f() { f2().await }
pub async f$0n f2() -> i32 { 1 }
pub asy$0nc fn f2() -> i32 { 1 }
pub async fn f3() { f4(f2().await).await }
pub async fn f4(i: i32) { }"#,
r#"
@ -220,7 +213,7 @@ pub async fn f4(i: i32) { }"#,
// Ensure that it is the second await on the 3rd line that is removed
r#"
pub async fn f() { f2().await }
pub async f$0n f2(i: i32) { }
pub async$0 fn f2(i: i32) { }
pub async fn f3() { f2(f4().await).await }
pub async fn f4() -> i32 { 1 }"#,
r#"
@ -237,7 +230,7 @@ pub async fn f4() -> i32 { 1 }"#,
unnecessary_async,
r#"
pub struct S { }
impl S { pub async f$0n f2(&self) { } }
impl S { pub async$0 fn f2(&self) { } }
pub async fn f(s: &S) { s.f2().await }"#,
r#"
pub struct S { }
@ -250,13 +243,13 @@ pub async fn f(s: &S) { s.f2() }"#,
fn does_not_apply_on_function_with_a_nested_await_expr() {
check_assist_not_applicable(
unnecessary_async,
"async f$0n f() { if true { loop { f2().await } } }",
"async$0 fn f() { if true { loop { f2().await } } }",
)
}
#[test]
fn does_not_apply_when_not_on_prototype() {
check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }")
fn does_not_apply_when_not_on_async_token() {
check_assist_not_applicable(unnecessary_async, "pub async fn$0 f() { f2() }")
}
#[test]

View file

@ -198,7 +198,7 @@ fn wrapper_alias(
);
ctx.sema.resolve_mod_path(ret_type.syntax(), &wrapper_path).and_then(|def| {
def.filter_map(|def| match def.as_module_def()? {
def.filter_map(|def| match def.into_module_def() {
hir::ModuleDef::TypeAlias(alias) => {
let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?;
(&enum_ty == core_wrapper).then_some(alias)

View file

@ -3280,7 +3280,7 @@ fn doctest_unnecessary_async() {
check_doc_test(
"unnecessary_async",
r#####"
pub async f$0n foo() {}
pub asy$0nc fn foo() {}
pub async fn bar() { foo().await }
"#####,
r#####"

View file

@ -718,7 +718,7 @@ impl<'a> CompletionContext<'a> {
expected: (expected_type, expected_name),
qualifier_ctx,
token,
offset,
original_offset,
} = expand_and_analyze(
&sema,
original_file.syntax().clone(),
@ -728,7 +728,7 @@ impl<'a> CompletionContext<'a> {
)?;
// adjust for macro input, this still fails if there is no token written yet
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
let scope = sema.scope_at_offset(&token.parent()?, original_offset)?;
let krate = scope.krate();
let module = scope.module();

View file

@ -22,10 +22,14 @@ use crate::context::{
COMPLETION_MARKER,
};
#[derive(Debug)]
struct ExpansionResult {
original_file: SyntaxNode,
speculative_file: SyntaxNode,
offset: TextSize,
/// The offset in the original file.
original_offset: TextSize,
/// The offset in the speculatively expanded file.
speculative_offset: TextSize,
fake_ident_token: SyntaxToken,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
}
@ -36,7 +40,8 @@ pub(super) struct AnalysisResult {
pub(super) qualifier_ctx: QualifierCtx,
/// the original token of the expanded file
pub(super) token: SyntaxToken,
pub(super) offset: TextSize,
/// The offset in the original file.
pub(super) original_offset: TextSize,
}
pub(super) fn expand_and_analyze(
@ -54,39 +59,96 @@ pub(super) fn expand_and_analyze(
// make the offset point to the start of the original token, as that is what the
// intermediate offsets calculated in expansion always points to
let offset = offset - relative_offset;
let expansion =
expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset);
let expansion = expand(
sema,
original_file.clone(),
speculative_file.clone(),
offset,
fake_ident_token.clone(),
relative_offset,
)
.unwrap_or(ExpansionResult {
original_file,
speculative_file,
original_offset: offset,
speculative_offset: fake_ident_token.text_range().start(),
fake_ident_token,
derive_ctx: None,
});
// add the relative offset back, so that left_biased finds the proper token
let offset = expansion.offset + relative_offset;
let token = expansion.original_file.token_at_offset(offset).left_biased()?;
let original_offset = expansion.original_offset + relative_offset;
let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
})
}
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
///
/// We do this by recursively expanding all macros and picking the best possible match. We cannot just
/// choose the first expansion each time because macros can expand to something that does not include
/// our completion marker, e.g.:
/// ```
/// macro_rules! helper { ($v:ident) => {} }
/// macro_rules! my_macro {
/// ($v:ident) => {
/// helper!($v);
/// $v
/// };
/// }
///
/// my_macro!(complete_me_here)
/// ```
/// If we would expand the first thing we encounter only (which in fact this method used to do), we would
/// be unable to complete here, because we would be walking directly into the void. So we instead try
/// *every* possible path.
///
/// This can also creates discrepancies between the speculative and real expansions: because we insert
/// tokens, we insert characters, which means if we try the second occurrence it may not be at the same
/// position in the original and speculative file. We take an educated guess here, and for each token
/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros
/// can insert the text of the completion marker in other places while removing the span, but this is
/// the best we can do.
fn expand(
sema: &Semantics<'_, RootDatabase>,
mut original_file: SyntaxNode,
mut speculative_file: SyntaxNode,
mut offset: TextSize,
mut fake_ident_token: SyntaxToken,
original_file: SyntaxNode,
speculative_file: SyntaxNode,
original_offset: TextSize,
fake_ident_token: SyntaxToken,
relative_offset: TextSize,
) -> ExpansionResult {
) -> Option<ExpansionResult> {
let _p = tracing::info_span!("CompletionContext::expand").entered();
let mut derive_ctx = None;
'expansion: loop {
if !sema.might_be_inside_macro_call(&fake_ident_token)
&& original_file
.token_at_offset(original_offset + relative_offset)
.right_biased()
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
{
// Recursion base case.
return Some(ExpansionResult {
original_file,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
fake_ident_token,
derive_ctx: None,
});
}
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
let ancestor_items = iter::successors(
Option::zip(
find_node_at_offset::<ast::Item>(&original_file, offset),
find_node_at_offset::<ast::Item>(&speculative_file, offset),
find_node_at_offset::<ast::Item>(&original_file, original_offset),
find_node_at_offset::<ast::Item>(
&speculative_file,
fake_ident_token.text_range().start(),
),
),
|(a, b)| parent_item(a).zip(parent_item(b)),
);
@ -106,40 +168,57 @@ fn expand(
// successful expansions
(
Some(ExpandResult { value: actual_expansion, err: _ }),
Some((fake_expansion, fake_mapped_token)),
Some((fake_expansion, fake_mapped_tokens)),
) => {
let new_offset = fake_mapped_token.text_range().start();
if new_offset + relative_offset > actual_expansion.text_range().end() {
let mut accumulated_offset_from_fake_tokens = 0;
let actual_range = actual_expansion.text_range().end();
let result = fake_mapped_tokens
.into_iter()
.filter_map(|(fake_mapped_token, rank)| {
let accumulated_offset = accumulated_offset_from_fake_tokens;
if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
// Proc macros can make the same span with different text, we don't
// want them to participate in completion because the macro author probably
// didn't intend them to.
return None;
}
accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
let new_offset = fake_mapped_token.text_range().start()
- TextSize::new(accumulated_offset as u32);
if new_offset + relative_offset > actual_range {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
return None;
}
let result = expand(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
new_offset,
fake_mapped_token,
relative_offset,
)?;
Some((result, rank))
})
.min_by_key(|(_, rank)| *rank)
.map(|(result, _)| result);
if result.is_some() {
return result;
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
offset = new_offset;
continue 'expansion;
}
// exactly one expansion failed, inconsistent state so stop expanding completely
_ => break 'expansion,
_ => break 'ancestors,
}
}
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
let orig_tt = match ancestors_at_offset(&original_file, offset)
let orig_tt = ancestors_at_offset(&original_file, original_offset)
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()
{
Some(it) => it,
None => break 'expansion,
};
let spec_tt = match ancestors_at_offset(&speculative_file, offset)
.last()?;
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()
{
Some(it) => it,
None => break 'expansion,
};
.last()?;
let (tts, attrs) = match (orig_tt, spec_tt) {
(Either::Left(orig_tt), Either::Left(spec_tt)) => {
@ -160,12 +239,12 @@ fn expand(
(Either::Right(orig_path), Either::Right(spec_path)) => {
(None, orig_path.parent_attr().zip(spec_path.parent_attr()))
}
_ => break 'expansion,
_ => return None,
};
// Expand pseudo-derive expansion aka `derive(Debug$0)`
if let Some((orig_attr, spec_attr)) = attrs {
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
@ -173,13 +252,23 @@ fn expand(
fake_ident_token.clone(),
),
) {
derive_ctx = Some((
if let Some((fake_mapped_token, _)) =
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
{
return Some(ExpansionResult {
original_file,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
fake_ident_token,
derive_ctx: Some((
actual_expansion,
fake_expansion,
fake_mapped_token.text_range().start(),
orig_attr,
));
break 'expansion;
)),
});
}
}
if let Some(spec_adt) =
@ -193,53 +282,72 @@ fn expand(
// might be the path of derive helper or a token tree inside of one
if let Some(helpers) = sema.derive_helper(&orig_attr) {
for (_mac, file) in helpers {
if let Some((fake_expansion, fake_mapped_token)) = sema
.speculative_expand_raw(
if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw(
file,
spec_adt.syntax(),
fake_ident_token.clone(),
)
{
) {
// we are inside a derive helper token tree, treat this as being inside
// the derive expansion
let actual_expansion = sema.parse_or_expand(file.into());
let new_offset = fake_mapped_token.text_range().start();
if new_offset + relative_offset > actual_expansion.text_range().end() {
let mut accumulated_offset_from_fake_tokens = 0;
let actual_range = actual_expansion.text_range().end();
let result = fake_mapped_tokens
.into_iter()
.filter_map(|(fake_mapped_token, rank)| {
let accumulated_offset = accumulated_offset_from_fake_tokens;
if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
// Proc macros can make the same span with different text, we don't
// want them to participate in completion because the macro author probably
// didn't intend them to.
return None;
}
accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
let new_offset = fake_mapped_token.text_range().start()
- TextSize::new(accumulated_offset as u32);
if new_offset + relative_offset > actual_range {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
return None;
}
let result = expand(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
new_offset,
fake_mapped_token,
relative_offset,
)?;
Some((result, rank))
})
.min_by_key(|(_, rank)| *rank)
.map(|(result, _)| result);
if result.is_some() {
return result;
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
offset = new_offset;
continue 'expansion;
}
}
}
}
// at this point we won't have any more successful expansions, so stop
break 'expansion;
return None;
}
// Expand fn-like macro calls
let Some((orig_tt, spec_tt)) = tts else { break 'expansion };
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
orig_tt.syntax().parent().and_then(ast::MacroCall::cast),
spec_tt.syntax().parent().and_then(ast::MacroCall::cast),
) {
let (orig_tt, spec_tt) = tts?;
let (actual_macro_call, macro_call_with_fake_ident) = (
orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
);
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
// inconsistent state, stop expanding
if mac_call_path0 != mac_call_path1 {
break 'expansion;
return None;
}
let speculative_args = match macro_call_with_fake_ident.token_tree() {
Some(tt) => tt,
None => break 'expansion,
};
let speculative_args = macro_call_with_fake_ident.token_tree()?;
match (
sema.expand_macro_call(&actual_macro_call),
@ -250,32 +358,47 @@ fn expand(
),
) {
// successful expansions
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
let new_offset = fake_mapped_token.text_range().start();
if new_offset + relative_offset > actual_expansion.text_range().end() {
(Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
let mut accumulated_offset_from_fake_tokens = 0;
let actual_range = actual_expansion.text_range().end();
fake_mapped_tokens
.into_iter()
.filter_map(|(fake_mapped_token, rank)| {
let accumulated_offset = accumulated_offset_from_fake_tokens;
if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
// Proc macros can make the same span with different text, we don't
// want them to participate in completion because the macro author probably
// didn't intend them to.
return None;
}
accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
let new_offset = fake_mapped_token.text_range().start()
- TextSize::new(accumulated_offset as u32);
if new_offset + relative_offset > actual_range {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
return None;
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
offset = new_offset;
continue 'expansion;
let result = expand(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
new_offset,
fake_mapped_token,
relative_offset,
)?;
Some((result, rank))
})
.min_by_key(|(_, rank)| *rank)
.map(|(result, _)| result)
}
// at least on expansion failed, we won't have anything to expand from this point
// at least one expansion failed, we won't have anything to expand from this point
// onwards so break out
_ => break 'expansion,
_ => None,
}
}
// none of our states have changed so stop the loop
break 'expansion;
}
ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
}
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
fn analyze(
@ -285,8 +408,14 @@ fn analyze(
self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
let _p = tracing::info_span!("CompletionContext::analyze").entered();
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
expansion_result;
let ExpansionResult {
original_file,
speculative_file,
original_offset: _,
speculative_offset,
fake_ident_token,
derive_ctx,
} = expansion_result;
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
@ -294,7 +423,8 @@ fn analyze(
find_node_at_offset(&file_with_fake_ident, offset)
{
let parent = name_ref.syntax().parent()?;
let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?;
let (mut nameref_ctx, _) =
classify_name_ref(sema, &original_file, name_ref, offset, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: sema
@ -314,7 +444,7 @@ fn analyze(
return None;
}
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else {
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
} else {
@ -350,8 +480,13 @@ fn analyze(
}
ast::NameLike::NameRef(name_ref) => {
let parent = name_ref.syntax().parent()?;
let (nameref_ctx, qualifier_ctx) =
classify_name_ref(sema, &original_file, name_ref, parent)?;
let (nameref_ctx, qualifier_ctx) = classify_name_ref(
sema,
&original_file,
name_ref,
expansion_result.original_offset,
parent,
)?;
if let NameRefContext {
kind:
@ -636,9 +771,10 @@ fn classify_name_ref(
sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
original_offset: TextSize,
parent: SyntaxNode,
) -> Option<(NameRefContext, QualifierCtx)> {
let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start());
let nameref = find_node_at_offset(original_file, original_offset);
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
@ -760,7 +896,7 @@ fn classify_name_ref(
// We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
// ex. trait Foo $0 {}
// in these cases parser recovery usually kicks in for our inserted identifier, causing it
// to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
// to either be parsed as an ExprStmt or a ItemRecovery, depending on whether it is in a block
// expression or an item list.
// The following code checks if the body is missing, if it is we either cut off the body
// from the item or it was missing in the first place
@ -1088,15 +1224,10 @@ fn classify_name_ref(
PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
};
let mut kind_macro_call = |it: ast::MacroCall| {
path_ctx.has_macro_bang = it.excl_token().is_some();
let parent = it.syntax().parent()?;
// Any path in an item list will be treated as a macro call by the parser
let kind_item = |it: &SyntaxNode| {
let parent = it.parent()?;
let kind = match_ast! {
match parent {
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
ast::MacroType(ty) => make_path_kind_type(ty.into()),
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
Some(it) => match_ast! {
@ -1126,6 +1257,23 @@ fn classify_name_ref(
};
Some(kind)
};
let mut kind_macro_call = |it: ast::MacroCall| {
path_ctx.has_macro_bang = it.excl_token().is_some();
let parent = it.syntax().parent()?;
if let Some(kind) = kind_item(it.syntax()) {
return Some(kind);
}
let kind = match_ast! {
match parent {
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
ast::MacroType(ty) => make_path_kind_type(ty.into()),
_ => return None,
}
};
Some(kind)
};
let make_path_kind_attr = |meta: ast::Meta| {
let attr = meta.parent_attr()?;
let kind = attr.kind();
@ -1153,7 +1301,15 @@ fn classify_name_ref(
// Infer the path kind
let parent = path.syntax().parent()?;
let kind = match_ast! {
let kind = 'find_kind: {
if parent.kind() == SyntaxKind::ERROR {
if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) {
return Some(make_res(NameRefKind::Keyword(kind)));
}
break 'find_kind kind_item(&parent)?;
}
match_ast! {
match parent {
ast::PathType(it) => make_path_kind_type(it.into()),
ast::PathExpr(it) => {
@ -1189,11 +1345,6 @@ fn classify_name_ref(
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
},
ast::MacroCall(it) => {
// A macro call in this position is usually a result of parsing recovery, so check that
if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
return Some(make_res(NameRefKind::Keyword(kind)));
}
kind_macro_call(it)?
},
ast::Meta(meta) => make_path_kind_attr(meta)?,
@ -1242,6 +1393,7 @@ fn classify_name_ref(
},
_ => return None,
}
}
};
path_ctx.kind = kind;
@ -1320,9 +1472,7 @@ fn classify_name_ref(
}
})
}
PathKind::Item { .. } => {
parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
}
PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR),
_ => None,
};
if let Some(top) = top_node {

View file

@ -10,7 +10,7 @@ use ide_db::{
};
use itertools::Itertools;
use smallvec::SmallVec;
use stdx::{impl_from, never};
use stdx::{format_to, impl_from, never};
use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize};
use crate::{
@ -27,10 +27,7 @@ use crate::{
#[non_exhaustive]
pub struct CompletionItem {
/// Label in the completion pop up which identifies completion.
pub label: SmolStr,
/// Additional label details in the completion pop up that are
/// displayed and aligned on the right side after the label.
pub label_detail: Option<SmolStr>,
pub label: CompletionItemLabel,
/// Range of identifier that is being completed.
///
@ -89,11 +86,23 @@ pub struct CompletionItem {
pub import_to_add: SmallVec<[(String, String); 1]>,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct CompletionItemLabel {
/// The primary label for the completion item.
pub primary: SmolStr,
/// The left detail for the completion item, usually rendered right next to the primary label.
pub detail_left: Option<String>,
/// The right detail for the completion item, usually rendered right aligned at the end of the completion item.
pub detail_right: Option<String>,
}
// We use custom debug for CompletionItem to make snapshot tests more readable.
impl fmt::Debug for CompletionItem {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("CompletionItem");
s.field("label", &self.label).field("source_range", &self.source_range);
s.field("label", &self.label.primary)
.field("detail_left", &self.label.detail_left)
.field("detail_right", &self.label.detail_right)
.field("source_range", &self.source_range);
if self.text_edit.len() == 1 {
let atom = self.text_edit.iter().next().unwrap();
s.field("delete", &atom.delete);
@ -102,7 +111,7 @@ impl fmt::Debug for CompletionItem {
s.field("text_edit", &self.text_edit);
}
s.field("kind", &self.kind);
if self.lookup() != self.label {
if self.lookup() != self.label.primary {
s.field("lookup", &self.lookup());
}
if let Some(detail) = &self.detail {
@ -434,7 +443,7 @@ impl CompletionItem {
self.ref_match.map(|(mutability, offset)| {
(
format!("&{}{}", mutability.as_keyword_for_ref(), self.label),
format!("&{}{}", mutability.as_keyword_for_ref(), self.label.primary),
ide_db::text_edit::Indel::insert(
offset,
format!("&{}", mutability.as_keyword_for_ref()),
@ -488,13 +497,13 @@ impl Builder {
let _p = tracing::info_span!("item::Builder::build").entered();
let label = self.label;
let mut label_detail = None;
let mut lookup = self.lookup.unwrap_or_else(|| label.clone());
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
let mut detail_left = None;
if !self.doc_aliases.is_empty() {
let doc_aliases = self.doc_aliases.iter().join(", ");
label_detail.replace(format_smolstr!(" (alias {doc_aliases})"));
detail_left = Some(format!("(alias {doc_aliases})"));
let lookup_doc_aliases = self
.doc_aliases
.iter()
@ -516,16 +525,20 @@ impl Builder {
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
label_detail.replace(format_smolstr!(
let detail_left = detail_left.get_or_insert_with(String::new);
format_to!(
detail_left,
"{}(use {})",
label_detail.as_deref().unwrap_or_default(),
if detail_left.is_empty() { "" } else { " " },
import_edit.import_path.display(db, self.edition)
));
);
} else if let Some(trait_name) = self.trait_name {
label_detail.replace(format_smolstr!(
let detail_left = detail_left.get_or_insert_with(String::new);
format_to!(
detail_left,
"{}(as {trait_name})",
label_detail.as_deref().unwrap_or_default(),
));
if detail_left.is_empty() { "" } else { " " },
);
}
let text_edit = match self.text_edit {
@ -546,8 +559,11 @@ impl Builder {
CompletionItem {
source_range: self.source_range,
label,
label_detail,
label: CompletionItemLabel {
primary: label,
detail_left,
detail_right: self.detail.clone(),
},
text_edit,
is_snippet: self.is_snippet,
detail: self.detail,

View file

@ -749,8 +749,8 @@ mod tests {
let relevance = display_relevance(it.relevance);
items.push(format!(
"{tag} {} {} {relevance}\n",
it.label,
it.label_detail.clone().unwrap_or_default(),
it.label.primary,
it.label.detail_right.clone().unwrap_or_default(),
));
if let Some((label, _indel, relevance)) = it.ref_match() {
@ -812,13 +812,13 @@ fn main() {
}
"#,
expect![[r#"
st dep::test_mod_b::Struct {} [type_could_unify]
st dep::test_mod_b::Struct {} dep::test_mod_b::Struct { } [type_could_unify]
ex dep::test_mod_b::Struct { } [type_could_unify]
st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
fn main() []
fn test() []
st Struct Struct [type_could_unify+requires_import]
fn main() fn() []
fn test() fn(Struct) []
md dep []
st Struct (use dep::test_mod_a::Struct) [requires_import]
st Struct Struct [requires_import]
"#]],
);
}
@ -852,11 +852,11 @@ fn main() {
}
"#,
expect![[r#"
un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import]
fn main() []
fn test() []
un Union Union [type_could_unify+requires_import]
fn main() fn() []
fn test() fn(Union) []
md dep []
en Union (use dep::test_mod_a::Union) [requires_import]
en Union Union [requires_import]
"#]],
);
}
@ -888,13 +888,13 @@ fn main() {
}
"#,
expect![[r#"
ev dep::test_mod_b::Enum::variant [type_could_unify]
ev dep::test_mod_b::Enum::variant dep::test_mod_b::Enum::variant [type_could_unify]
ex dep::test_mod_b::Enum::variant [type_could_unify]
en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
fn main() []
fn test() []
en Enum Enum [type_could_unify+requires_import]
fn main() fn() []
fn test() fn(Enum) []
md dep []
en Enum (use dep::test_mod_a::Enum) [requires_import]
en Enum Enum [requires_import]
"#]],
);
}
@ -926,10 +926,10 @@ fn main() {
}
"#,
expect![[r#"
ev dep::test_mod_b::Enum::Variant [type_could_unify]
ev dep::test_mod_b::Enum::Variant dep::test_mod_b::Enum::Variant [type_could_unify]
ex dep::test_mod_b::Enum::Variant [type_could_unify]
fn main() []
fn test() []
fn main() fn() []
fn test() fn(Enum) []
md dep []
"#]],
);
@ -958,11 +958,11 @@ fn main() {
}
"#,
expect![[r#"
fn main() []
fn test() []
fn main() fn() []
fn test() fn(fn(usize) -> i32) []
md dep []
fn function (use dep::test_mod_a::function) [requires_import]
fn function() (use dep::test_mod_b::function) [requires_import]
fn function fn(usize) -> i32 [requires_import]
fn function() fn(isize) -> i32 [requires_import]
"#]],
);
}
@ -990,11 +990,11 @@ fn main() {
}
"#,
expect![[r#"
ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import]
fn main() []
fn test() []
ct CONST i32 [type_could_unify+requires_import]
fn main() fn() []
fn test() fn(i32) []
md dep []
ct CONST (use dep::test_mod_a::CONST) [requires_import]
ct CONST i64 [requires_import]
"#]],
);
}
@ -1022,11 +1022,11 @@ fn main() {
}
"#,
expect![[r#"
sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import]
fn main() []
fn test() []
sc STATIC i32 [type_could_unify+requires_import]
fn main() fn() []
fn test() fn(i32) []
md dep []
sc STATIC (use dep::test_mod_a::STATIC) [requires_import]
sc STATIC i64 [requires_import]
"#]],
);
}
@ -1058,7 +1058,7 @@ fn main() {
"#,
expect![[r#"
me Function []
me Function fn(&self, i32) -> bool []
"#]],
);
}
@ -1081,14 +1081,14 @@ fn func(input: Struct) { }
"#,
expect![[r#"
st Struct [type]
st Self [type]
sp Self [type]
st Struct [type]
st Struct Struct [type]
st Self Self [type]
sp Self Struct [type]
st Struct Struct [type]
ex Struct [type]
lc self [local]
fn func() []
me self.test() []
lc self &Struct [local]
fn func() fn(Struct) []
me self.test() fn(&self) []
"#]],
);
}
@ -1109,13 +1109,13 @@ fn main() {
}
"#,
expect![[r#"
lc input [type+name+local]
lc input bool [type+name+local]
ex input [type]
ex true [type]
ex false [type]
lc inputbad [local]
fn main() []
fn test() []
lc inputbad i32 [local]
fn main() fn() []
fn test() fn(bool) []
"#]],
);
}
@ -1133,6 +1133,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo {…}",
detail_left: None,
detail_right: Some(
"Foo { x: i32, y: i32 }",
),
source_range: 54..56,
delete: 54..56,
insert: "Foo { x: ${1:()}, y: ${2:()} }$0",
@ -1161,6 +1165,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo(…)",
detail_left: None,
detail_right: Some(
"Foo(i32, i32)",
),
source_range: 46..48,
delete: 46..48,
insert: "Foo(${1:()}, ${2:()})$0",
@ -1189,6 +1197,10 @@ fn main() { fo$0 }
[
CompletionItem {
label: "foo(…)",
detail_left: None,
detail_right: Some(
"fn(u32, u32, T) -> (u32, T)",
),
source_range: 68..70,
delete: 68..70,
insert: "foo(${1:a}, ${2:b}, ${3:t})$0",
@ -1201,6 +1213,10 @@ fn main() { fo$0 }
},
CompletionItem {
label: "main()",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 68..70,
delete: 68..70,
insert: "main();$0",
@ -1228,6 +1244,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo",
detail_left: None,
detail_right: Some(
"Foo",
),
source_range: 35..37,
delete: 35..37,
insert: "Foo$0",
@ -1260,6 +1280,10 @@ fn main() { let _: m::Spam = S$0 }
[
CompletionItem {
label: "main()",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 75..76,
delete: 75..76,
insert: "main();$0",
@ -1271,6 +1295,8 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m",
detail_left: None,
detail_right: None,
source_range: 75..76,
delete: 75..76,
insert: "m",
@ -1280,6 +1306,10 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m::Spam::Bar(…)",
detail_left: None,
detail_right: Some(
"m::Spam::Bar(i32)",
),
source_range: 75..76,
delete: 75..76,
insert: "m::Spam::Bar(${1:()})$0",
@ -1305,6 +1335,10 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m::Spam::Foo",
detail_left: None,
detail_right: Some(
"m::Spam::Foo",
),
source_range: 75..76,
delete: 75..76,
insert: "m::Spam::Foo$0",
@ -1347,6 +1381,10 @@ fn main() { som$0 }
[
CompletionItem {
label: "main()",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 56..59,
delete: 56..59,
insert: "main();$0",
@ -1358,6 +1396,10 @@ fn main() { som$0 }
},
CompletionItem {
label: "something_deprecated()",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 56..59,
delete: 56..59,
insert: "something_deprecated();$0",
@ -1382,6 +1424,10 @@ fn foo() { A { the$0 } }
[
CompletionItem {
label: "the_field",
detail_left: None,
detail_right: Some(
"u32",
),
source_range: 57..60,
delete: 57..60,
insert: "the_field",
@ -1429,6 +1475,10 @@ impl S {
[
CompletionItem {
label: "bar()",
detail_left: None,
detail_right: Some(
"fn(self)",
),
source_range: 94..94,
delete: 94..94,
insert: "bar();$0",
@ -1460,6 +1510,10 @@ impl S {
},
CompletionItem {
label: "foo",
detail_left: None,
detail_right: Some(
"{unknown}",
),
source_range: 94..94,
delete: 94..94,
insert: "foo",
@ -1498,6 +1552,8 @@ use self::E::*;
[
CompletionItem {
label: "my",
detail_left: None,
detail_right: None,
source_range: 10..12,
delete: 10..12,
insert: "my",
@ -1510,6 +1566,10 @@ use self::E::*;
},
CompletionItem {
label: "V",
detail_left: None,
detail_right: Some(
"V",
),
source_range: 10..12,
delete: 10..12,
insert: "V$0",
@ -1524,6 +1584,10 @@ use self::E::*;
},
CompletionItem {
label: "E",
detail_left: None,
detail_right: Some(
"E",
),
source_range: 10..12,
delete: 10..12,
insert: "E",
@ -1556,6 +1620,10 @@ fn foo(s: S) { s.$0 }
[
CompletionItem {
label: "the_method()",
detail_left: None,
detail_right: Some(
"fn(&self)",
),
source_range: 81..81,
delete: 81..81,
insert: "the_method();$0",
@ -1729,9 +1797,9 @@ fn test(bar: u32) { }
fn foo(s: S) { test(s.$0) }
"#,
expect![[r#"
fd bar [type+name]
fd baz [type]
fd foo []
fd bar u32 [type+name]
fd baz u32 [type]
fd foo i64 []
"#]],
);
}
@ -1745,9 +1813,9 @@ struct B { x: (), y: f32, bar: u32 }
fn foo(a: A) { B { bar: a.$0 }; }
"#,
expect![[r#"
fd bar [type+name]
fd baz [type]
fd foo []
fd bar u32 [type+name]
fd baz u32 [type]
fd foo i64 []
"#]],
)
}
@ -1768,6 +1836,10 @@ fn f() -> i32 {
[
CompletionItem {
label: "0",
detail_left: None,
detail_right: Some(
"i32",
),
source_range: 56..57,
delete: 56..57,
insert: "0",
@ -1804,9 +1876,9 @@ fn f(foo: i64) { }
fn foo(a: A) { B { bar: f(a.$0) }; }
"#,
expect![[r#"
fd foo [type+name]
fd bar []
fd baz []
fd foo i64 [type+name]
fd bar u32 []
fd baz u32 []
"#]],
);
check_relevance(
@ -1817,9 +1889,9 @@ fn f(foo: i64) { }
fn foo(a: A) { f(B { bar: a.$0 }); }
"#,
expect![[r#"
fd bar [type+name]
fd baz [type]
fd foo []
fd bar u32 [type+name]
fd baz u32 [type]
fd foo i64 []
"#]],
);
}
@ -1832,13 +1904,13 @@ struct WorldSnapshot { _f: () };
fn go(world: &WorldSnapshot) { go(w$0) }
"#,
expect![[r#"
lc world [type+name+local]
lc world &WorldSnapshot [type+name+local]
ex world [type]
st WorldSnapshot {} []
st WorldSnapshot {} WorldSnapshot { _f: () } []
st &WorldSnapshot {} [type]
st WorldSnapshot []
st WorldSnapshot WorldSnapshot []
st &WorldSnapshot [type]
fn go() []
fn go() fn(&WorldSnapshot) []
"#]],
);
}
@ -1852,9 +1924,9 @@ struct Foo;
fn f(foo: &Foo) { f(foo, w$0) }
"#,
expect![[r#"
lc foo [local]
st Foo []
fn f() []
lc foo &Foo [local]
st Foo Foo []
fn f() fn(&Foo) []
"#]],
);
}
@ -1869,12 +1941,12 @@ fn bar() -> u8 { 0 }
fn f() { A { bar: b$0 }; }
"#,
expect![[r#"
fn bar() [type+name]
fn baz() [type]
fn bar() fn() -> u8 [type+name]
fn baz() fn() -> u8 [type]
ex bar() [type]
ex baz() [type]
st A []
fn f() []
st A A []
fn f() fn() []
"#]],
);
}
@ -1895,9 +1967,9 @@ fn f() {
}
"#,
expect![[r#"
me aaa() [type+name]
me bbb() [type]
me ccc() []
me aaa() fn(&self) -> u32 [type+name]
me bbb() fn(&self) -> u32 [type]
me ccc() fn(&self) -> u64 []
"#]],
);
}
@ -1916,7 +1988,7 @@ fn f() {
}
"#,
expect![[r#"
me aaa() [name]
me aaa() fn(&self) -> u64 [name]
"#]],
);
}
@ -1934,14 +2006,14 @@ fn main() {
}
"#,
expect![[r#"
lc s [name+local]
lc s S [name+local]
lc &mut s [type+name+local]
st S []
st S S []
st &mut S [type]
st S []
st S S []
st &mut S [type]
fn foo() []
fn main() []
fn foo() fn(&mut S) []
fn main() fn() []
"#]],
);
check_relevance(
@ -1954,13 +2026,13 @@ fn main() {
}
"#,
expect![[r#"
lc s [type+name+local]
st S [type]
st S [type]
lc s S [type+name+local]
st S S [type]
st S S [type]
ex s [type]
ex S [type]
fn foo() []
fn main() []
fn foo() fn(&mut S) []
fn main() fn() []
"#]],
);
check_relevance(
@ -1973,13 +2045,13 @@ fn main() {
}
"#,
expect![[r#"
lc ssss [type+local]
st S [type]
st S [type]
lc ssss S [type+local]
st S S [type]
st S S [type]
ex ssss [type]
ex S [type]
fn foo() []
fn main() []
fn foo() fn(&mut S) []
fn main() fn() []
"#]],
);
}
@ -2010,18 +2082,18 @@ fn main() {
}
"#,
expect![[r#"
ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
lc m [local]
lc t [local]
ex core::ops::Deref::deref(&t) [type_could_unify]
lc m i32 [local]
lc t T [local]
lc &t [type+local]
st S []
st S S []
st &S [type]
st S []
st S S []
st &S [type]
st T []
st T T []
st &T [type]
fn foo() []
fn main() []
fn foo() fn(&S) []
fn main() fn() []
md core []
"#]],
)
@ -2059,18 +2131,18 @@ fn main() {
}
"#,
expect![[r#"
ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
lc m [local]
lc t [local]
ex core::ops::DerefMut::deref_mut(&mut t) [type_could_unify]
lc m i32 [local]
lc t T [local]
lc &mut t [type+local]
st S []
st S S []
st &mut S [type]
st S []
st S S []
st &mut S [type]
st T []
st T T []
st &mut T [type]
fn foo() []
fn main() []
fn foo() fn(&mut S) []
fn main() fn() []
md core []
"#]],
)
@ -2087,9 +2159,9 @@ fn foo(bar: u32) {
}
"#,
expect![[r#"
lc baz [local]
lc bar [local]
fn foo() []
lc baz i32 [local]
lc bar u32 [local]
fn foo() fn(u32) []
"#]],
);
}
@ -2105,13 +2177,13 @@ fn foo() {
fn bar(t: Foo) {}
"#,
expect![[r#"
ev Foo::A [type]
ev Foo::B [type]
en Foo [type]
ev Foo::A Foo::A [type]
ev Foo::B Foo::B [type]
en Foo Foo [type]
ex Foo::A [type]
ex Foo::B [type]
fn bar() []
fn foo() []
fn bar() fn(Foo) []
fn foo() fn() []
"#]],
);
}
@ -2127,14 +2199,14 @@ fn foo() {
fn bar(t: &Foo) {}
"#,
expect![[r#"
ev Foo::A []
ev Foo::A Foo::A []
ev &Foo::A [type]
ev Foo::B []
ev Foo::B Foo::B []
ev &Foo::B [type]
en Foo []
en Foo Foo []
en &Foo [type]
fn bar() []
fn foo() []
fn bar() fn(&Foo) []
fn foo() fn() []
"#]],
);
}
@ -2163,17 +2235,17 @@ fn main() {
}
"#,
expect![[r#"
ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify]
st S []
ex core::ops::Deref::deref(&bar()) [type_could_unify]
st S S []
st &S [type]
st S []
st S S []
st &S [type]
st T []
st T T []
st &T [type]
fn bar() []
fn bar() fn() -> T []
fn &bar() [type]
fn foo() []
fn main() []
fn foo() fn(&S) []
fn main() fn() []
md core []
"#]],
)
@ -2191,7 +2263,7 @@ impl Sub for u32 {}
fn foo(a: u32) { a.$0 }
"#,
expect![[r#"
me sub() (as Sub) [op_method]
me sub() fn(self, Self) -> Self [op_method]
"#]],
);
check_relevance(
@ -2212,9 +2284,9 @@ fn main() {
}
"#,
expect![[r#"
fn new() []
me eq() (as PartialEq) [op_method]
me ne() (as PartialEq) [op_method]
fn new() fn() -> Foo []
me eq() fn(&self, &Rhs) -> bool [op_method]
me ne() fn(&self, &Rhs) -> bool [op_method]
"#]],
);
}
@ -2238,9 +2310,9 @@ fn test() {
}
"#,
expect![[r#"
fn fn_ctr() [type_could_unify]
fn fn_ctr_self() [type_could_unify]
fn fn_another() [type_could_unify]
fn fn_ctr() fn() -> Foo [type_could_unify]
fn fn_ctr_self() fn() -> Option<Foo> [type_could_unify]
fn fn_another() fn(u32) -> Other [type_could_unify]
"#]],
);
}
@ -2384,12 +2456,12 @@ fn test() {
// Constructor
// Others
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr() [type_could_unify]
me fn_no_ret() [type_could_unify]
fn fn_other() [type_could_unify]
fn fn_direct_ctr() fn() -> Foo [type_could_unify]
fn fn_ctr_with_args() fn(u32) -> Foo [type_could_unify]
fn fn_builder() fn() -> FooBuilder [type_could_unify]
fn fn_ctr() fn() -> Result<Foo> [type_could_unify]
me fn_no_ret() fn(&self) [type_could_unify]
fn fn_other() fn() -> Result<u32> [type_could_unify]
"#]],
);
@ -2420,13 +2492,13 @@ fn test() {
}
"#,
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr_wrapped() [type_could_unify]
fn fn_ctr_wrapped_2() [type_could_unify]
me fn_returns_unit() [type_could_unify]
fn fn_other() [type_could_unify]
fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify]
fn fn_ctr_with_args() fn(T) -> Foo<T> [type_could_unify]
fn fn_builder() fn() -> FooBuilder [type_could_unify]
fn fn_ctr_wrapped() fn() -> Option<Foo<T>> [type_could_unify]
fn fn_ctr_wrapped_2() fn() -> Result<Foo<T>, u32> [type_could_unify]
me fn_returns_unit() fn(&self) [type_could_unify]
fn fn_other() fn() -> Option<u32> [type_could_unify]
"#]],
);
}
@ -2456,13 +2528,13 @@ fn test() {
}
"#,
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr() [type_could_unify]
fn fn_ctr2() [type_could_unify]
me fn_no_ret() [type_could_unify]
fn fn_other() [type_could_unify]
fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify]
fn fn_ctr_with_args() fn(T) -> Foo<T> [type_could_unify]
fn fn_builder() fn() -> FooBuilder [type_could_unify]
fn fn_ctr() fn() -> Option<Foo<T>> [type_could_unify]
fn fn_ctr2() fn() -> Result<Foo<T>, u32> [type_could_unify]
me fn_no_ret() fn(&self) [type_could_unify]
fn fn_other() fn() -> Option<u32> [type_could_unify]
"#]],
);
}
@ -2484,6 +2556,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
[
CompletionItem {
label: "baz()",
detail_left: None,
detail_right: Some(
"fn(&self) -> u32",
),
source_range: 109..110,
delete: 109..110,
insert: "baz()$0",
@ -2513,6 +2589,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
},
CompletionItem {
label: "bar",
detail_left: None,
detail_right: Some(
"u32",
),
source_range: 109..110,
delete: 109..110,
insert: "bar",
@ -2524,6 +2604,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
},
CompletionItem {
label: "qux",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 109..110,
text_edit: TextEdit {
indels: [
@ -2562,6 +2646,10 @@ fn foo() {
[
CompletionItem {
label: "field",
detail_left: None,
detail_right: Some(
"fn()",
),
source_range: 76..78,
delete: 76..78,
insert: "field",
@ -2610,6 +2698,10 @@ fn main() {
[
CompletionItem {
label: "foo()",
detail_left: None,
detail_right: Some(
"fn() -> S",
),
source_range: 95..95,
delete: 95..95,
insert: "foo()$0",
@ -2661,15 +2753,15 @@ fn foo() {
}
"#,
expect![[r#"
lc foo [type+local]
lc foo Foo<u32> [type+local]
ex foo [type]
ex Foo::B [type]
ev Foo::A() [type_could_unify]
ev Foo::B [type_could_unify]
en Foo [type_could_unify]
fn foo() []
fn bar() []
fn baz() []
ev Foo::A() Foo::A(T) [type_could_unify]
ev Foo::B Foo::B [type_could_unify]
en Foo Foo<{unknown}> [type_could_unify]
fn foo() fn() []
fn bar() fn() -> Foo<u8> []
fn baz() fn() -> Foo<T> []
"#]],
);
}
@ -2697,20 +2789,20 @@ fn main() {
"#,
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
sn not [snippet]
me not() (use ops::Not) [type_could_unify+requires_import]
sn if []
sn while []
sn ref []
sn refm []
sn deref []
sn unsafe []
sn match []
sn box []
sn dbg []
sn dbgr []
sn call []
sn return []
sn not !expr [snippet]
me not() fn(self) -> <Self as Not>::Output [type_could_unify+requires_import]
sn if if expr {} []
sn while while expr {} []
sn ref &expr []
sn refm &mut expr []
sn deref *expr []
sn unsafe unsafe {} []
sn match match expr {} []
sn box Box::new(expr) []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
sn call function(expr) []
sn return return expr []
"#]],
);
}
@ -2730,19 +2822,19 @@ fn main() {
"#,
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
me f() []
sn ref []
sn refm []
sn deref []
sn unsafe []
sn match []
sn box []
sn dbg []
sn dbgr []
sn call []
sn let []
sn letm []
sn return []
me f() fn(&self) []
sn ref &expr []
sn refm &mut expr []
sn deref *expr []
sn unsafe unsafe {} []
sn match match expr {} []
sn box Box::new(expr) []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
sn call function(expr) []
sn let let []
sn letm let mut []
sn return return expr []
"#]],
);
}
@ -2765,12 +2857,12 @@ fn f() {
}
"#,
expect![[r#"
st Buffer []
fn f() []
st Buffer Buffer []
fn f() fn() []
md std []
tt BufRead (use std::io::BufRead) [requires_import]
st BufReader (use std::io::BufReader) [requires_import]
st BufWriter (use std::io::BufWriter) [requires_import]
tt BufRead [requires_import]
st BufReader BufReader [requires_import]
st BufWriter BufWriter [requires_import]
"#]],
);
}
@ -2979,6 +3071,12 @@ fn main() {
[
CompletionItem {
label: "flush()",
detail_left: Some(
"(as Write)",
),
detail_right: Some(
"fn(&self)",
),
source_range: 193..193,
delete: 193..193,
insert: "flush();$0",
@ -3006,6 +3104,12 @@ fn main() {
},
CompletionItem {
label: "write()",
detail_left: Some(
"(as Write)",
),
detail_right: Some(
"fn(&self)",
),
source_range: 193..193,
delete: 193..193,
insert: "write();$0",

View file

@ -118,10 +118,16 @@ fn completion_list_with_config_raw(
let items = get_all_items(config, ra_fixture, trigger_character);
items
.into_iter()
.filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32")
.filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label.primary == "u32")
.filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword)
.filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet)
.sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned)))
.sorted_by_key(|it| {
(
it.kind,
it.label.primary.clone(),
it.label.detail_left.as_ref().map(ToOwned::to_owned),
)
})
.collect()
}
@ -173,27 +179,30 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
let label_width = completions
.iter()
.map(|it| {
monospace_width(&it.label)
+ monospace_width(it.label_detail.as_deref().unwrap_or_default())
monospace_width(&it.label.primary)
+ monospace_width(it.label.detail_left.as_deref().unwrap_or_default())
+ monospace_width(it.label.detail_right.as_deref().unwrap_or_default())
+ it.label.detail_left.is_some() as usize
+ it.label.detail_right.is_some() as usize
})
.max()
.unwrap_or_default()
.min(22);
.unwrap_or_default();
completions
.into_iter()
.map(|it| {
let tag = it.kind.tag();
let var_name = format!("{tag} {}", it.label);
let mut buf = var_name;
if let Some(ref label_detail) = it.label_detail {
let mut buf = format!("{tag} {}", it.label.primary);
if let Some(label_detail) = &it.label.detail_left {
format_to!(buf, " {label_detail}");
}
if let Some(detail) = it.detail {
let width = label_width.saturating_sub(
monospace_width(&it.label)
+ monospace_width(&it.label_detail.unwrap_or_default()),
if let Some(detail_right) = it.label.detail_right {
let pad_with = label_width.saturating_sub(
monospace_width(&it.label.primary)
+ monospace_width(it.label.detail_left.as_deref().unwrap_or_default())
+ monospace_width(&detail_right)
+ it.label.detail_left.is_some() as usize,
);
format_to!(buf, "{:width$} {}", "", detail, width = width);
format_to!(buf, "{:pad_with$}{detail_right}", "",);
}
if it.deprecated {
format_to!(buf, " DEPRECATED");

View file

@ -692,9 +692,9 @@ fn main() {
Foo::$0
}
",
expect![[r"
expect![[r#"
fn bar() fn(impl Trait<U>)
"]],
"#]],
);
}
@ -1320,3 +1320,73 @@ fn main() {
"#]],
);
}
#[test]
fn macro_that_ignores_completion_marker() {
check(
r#"
macro_rules! helper {
($v:ident) => {};
}
macro_rules! m {
($v:ident) => {{
helper!($v);
$v
}};
}
fn main() {
let variable = "test";
m!(v$0);
}
"#,
expect![[r#"
ct CONST Unit
en Enum Enum
fn function() fn()
fn main() fn()
lc variable &str
ma helper!() macro_rules! helper
ma m!() macro_rules! m
ma makro!() macro_rules! makro
md module
sc STATIC Unit
st Record Record
st Tuple Tuple
st Unit Unit
un Union Union
ev TupleV() TupleV(u32)
bt u32 u32
kw async
kw const
kw crate::
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw let
kw loop
kw match
kw mod
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
}

View file

@ -1720,3 +1720,45 @@ fn function() {
"#]],
);
}
#[test]
fn intrinsics() {
check(
r#"
//- /core.rs crate:core
pub mod intrinsics {
extern "rust-intrinsic" {
pub fn transmute<Src, Dst>(src: Src) -> Dst;
}
}
pub mod mem {
pub use crate::intrinsics::transmute;
}
//- /main.rs crate:main deps:core
fn function() {
transmute$0
}
"#,
expect![[r#"
fn transmute() (use core::mem::transmute) unsafe fn(Src) -> Dst
"#]],
);
check(
r#"
//- /core.rs crate:core
pub mod intrinsics {
extern "rust-intrinsic" {
pub fn transmute<Src, Dst>(src: Src) -> Dst;
}
}
pub mod mem {
pub use crate::intrinsics::transmute;
}
//- /main.rs crate:main deps:core
fn function() {
mem::transmute$0
}
"#,
expect![""],
);
}

View file

@ -1315,10 +1315,9 @@ use krate::e;
fn main() {
e::$0
}"#,
expect![
"fn i_am_public() fn()
"
],
expect![[r#"
fn i_am_public() fn()
"#]],
)
}

View file

@ -2,10 +2,10 @@
use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
ItemInNs, ModPath, Module, ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type,
};
use itertools::{EitherOrBoth, Itertools};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make, HasName},
@ -13,7 +13,6 @@ use syntax::{
};
use crate::{
helpers::item_name,
items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
FxIndexSet, RootDatabase,
};
@ -52,7 +51,7 @@ pub struct TraitImportCandidate {
#[derive(Debug)]
pub struct PathImportCandidate {
/// Optional qualifier before name.
pub qualifier: Option<Vec<SmolStr>>,
pub qualifier: Vec<SmolStr>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
}
@ -264,7 +263,6 @@ impl ImportAssets {
Some(it) => it,
None => return <FxIndexSet<_>>::default().into_iter(),
};
let krate = self.module_with_candidate.krate();
let scope_definitions = self.scope_definitions(sema);
let mod_path = |item| {
@ -279,11 +277,14 @@ impl ImportAssets {
};
match &self.import_candidate {
ImportCandidate::Path(path_candidate) => {
path_applicable_imports(sema, krate, path_candidate, mod_path, |item_to_import| {
!scope_definitions.contains(&ScopeDef::from(item_to_import))
})
}
ImportCandidate::Path(path_candidate) => path_applicable_imports(
sema,
&scope,
krate,
path_candidate,
mod_path,
|item_to_import| !scope_definitions.contains(&ScopeDef::from(item_to_import)),
),
ImportCandidate::TraitAssocItem(trait_candidate)
| ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items(
sema,
@ -315,6 +316,7 @@ impl ImportAssets {
fn path_applicable_imports(
sema: &Semantics<'_, RootDatabase>,
scope: &SemanticsScope<'_>,
current_crate: Crate,
path_candidate: &PathImportCandidate,
mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
@ -322,8 +324,8 @@ fn path_applicable_imports(
) -> FxIndexSet<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered();
match &path_candidate.qualifier {
None => {
match &*path_candidate.qualifier {
[] => {
items_locator::items_with_name(
sema,
current_crate,
@ -348,89 +350,107 @@ fn path_applicable_imports(
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect()
}
Some(qualifier) => items_locator::items_with_name(
[first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name(
sema,
current_crate,
path_candidate.name.clone(),
AssocSearchMode::Include,
NameToImport::Exact(first_qsegment.to_string(), true),
AssocSearchMode::Exclude,
)
.filter_map(|item| import_for_item(sema.db, mod_path, qualifier, item, scope_filter))
.filter_map(|item| {
import_for_item(
sema,
scope,
mod_path,
&path_candidate.name,
item,
qualifier_rest,
scope_filter,
)
})
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect(),
}
}
fn import_for_item(
db: &RootDatabase,
sema: &Semantics<'_, RootDatabase>,
scope: &SemanticsScope<'_>,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
candidate: &NameToImport,
resolved_qualifier: ItemInNs,
unresolved_qualifier: &[SmolStr],
original_item: ItemInNs,
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
let [first_segment, ..] = unresolved_qualifier else { return None };
let item_as_assoc = item_as_assoc(db, original_item);
let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
Some(assoc_item) => match assoc_item.container(db) {
AssocItemContainer::Trait(trait_) => {
let trait_ = ItemInNs::from(ModuleDef::from(trait_));
(trait_, Some(trait_))
}
AssocItemContainer::Impl(impl_) => {
(ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
}
},
None => (original_item, None),
};
let import_path_candidate = mod_path(original_item_candidate)?;
let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
// segments match, check next one
EitherOrBoth::Both(a, b) if b.as_str() == &**a => None,
// segments mismatch / qualifier is longer than the path, bail out
EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
// all segments match and we have exhausted the qualifier, proceed
EitherOrBoth::Right(_) => Some(true),
};
if item_as_assoc.is_none() {
let item_name = item_name(db, original_item)?;
let last_segment = import_path_candidate_segments.next()?;
if *last_segment != item_name {
return None;
}
}
let ends_with = unresolved_qualifier
.iter()
.rev()
.zip_longest(import_path_candidate_segments)
.find_map(predicate)
.unwrap_or(true);
if !ends_with {
return None;
}
let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
(true, Some(_)) => {
// FIXME we should be able to import both the trait and the segment,
// but it's unclear what to do with overlapping edits (merge imports?)
// especially in case of lazy completion edit resolutions.
return None;
}
(false, Some(trait_to_import)) if scope_filter(trait_to_import) => {
LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
}
(true, None) if scope_filter(original_item_candidate) => {
LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
}
(false, None) if scope_filter(segment_import) => {
LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
let qualifier = {
let mut adjusted_resolved_qualifier = resolved_qualifier;
if !unresolved_qualifier.is_empty() {
match resolved_qualifier {
ItemInNs::Types(ModuleDef::Module(module)) => {
adjusted_resolved_qualifier = sema
.resolve_mod_path_relative(module, unresolved_qualifier.iter().cloned())?
.next()?;
}
// can't resolve multiple segments for non-module item path bases
_ => return None,
}
}
match adjusted_resolved_qualifier {
ItemInNs::Types(def) => def,
_ => return None,
}
};
let import_path_candidate = mod_path(resolved_qualifier)?;
let ty = match qualifier {
ModuleDef::Module(module) => {
return items_locator::items_with_name_in_module(
sema,
module,
candidate.clone(),
AssocSearchMode::Exclude,
)
.find(|&it| scope_filter(it))
.map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item))
}
// FIXME
ModuleDef::Trait(_) => return None,
// FIXME
ModuleDef::TraitAlias(_) => return None,
ModuleDef::TypeAlias(alias) => alias.ty(sema.db),
ModuleDef::BuiltinType(builtin) => builtin.ty(sema.db),
ModuleDef::Adt(adt) => adt.ty(sema.db),
_ => return None,
};
ty.iterate_path_candidates(sema.db, scope, &FxHashSet::default(), None, None, |assoc| {
// FIXME: Support extra trait imports
if assoc.container_or_implemented_trait(sema.db).is_some() {
return None;
}
let name = assoc.name(sema.db)?;
let is_match = match candidate {
NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
NameToImport::Prefix(text, false) => {
name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
name_char.eq_ignore_ascii_case(&candidate_char)
})
}
NameToImport::Exact(text, true) => name.as_str() == text,
NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
NameToImport::Fuzzy(text, false) => text
.chars()
.all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))),
};
if !is_match {
return None;
}
Some(LocatedImport::new(
import_path_candidate.clone(),
resolved_qualifier,
assoc_to_item(assoc),
))
})
}
@ -453,45 +473,6 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio
})
}
fn find_import_for_segment(
db: &RootDatabase,
original_item: ItemInNs,
unresolved_first_segment: &str,
) -> Option<ItemInNs> {
let segment_is_name = item_name(db, original_item)
.map(|name| name.eq_ident(unresolved_first_segment))
.unwrap_or(false);
Some(if segment_is_name {
original_item
} else {
let matching_module =
module_with_segment_name(db, unresolved_first_segment, original_item)?;
ItemInNs::from(ModuleDef::from(matching_module))
})
}
fn module_with_segment_name(
db: &RootDatabase,
segment_name: &str,
candidate: ItemInNs,
) -> Option<Module> {
let mut current_module = match candidate {
ItemInNs::Types(module_def_id) => module_def_id.module(db),
ItemInNs::Values(module_def_id) => module_def_id.module(db),
ItemInNs::Macros(macro_def_id) => ModuleDef::from(macro_def_id).module(db),
};
while let Some(module) = current_module {
if let Some(module_name) = module.name(db) {
if module_name.eq_ident(segment_name) {
return Some(module);
}
}
current_module = module.parent(db);
}
None
}
fn trait_applicable_items(
sema: &Semantics<'_, RootDatabase>,
current_crate: Crate,
@ -703,7 +684,7 @@ impl ImportCandidate {
return None;
}
Some(ImportCandidate::Path(PathImportCandidate {
qualifier: None,
qualifier: vec![],
name: NameToImport::exact_case_sensitive(name.to_string()),
}))
}
@ -730,7 +711,7 @@ fn path_import_candidate(
.segments()
.map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
.collect::<Option<Vec<_>>>()?;
ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
ImportCandidate::Path(PathImportCandidate { qualifier, name })
} else {
return None;
}
@ -754,10 +735,10 @@ fn path_import_candidate(
}
Some(_) => return None,
},
None => ImportCandidate::Path(PathImportCandidate { qualifier: None, name }),
None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name }),
})
}
fn item_as_assoc(db: &RootDatabase, item: ItemInNs) -> Option<AssocItem> {
item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
item.into_module_def().as_assoc_item(db)
}

View file

@ -3,10 +3,14 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
use hir::{import_map, Crate, ItemInNs, Semantics};
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
use crate::{
imports::import_assets::NameToImport,
symbol_index::{self, SymbolsDatabase as _},
RootDatabase,
};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
@ -20,8 +24,7 @@ pub fn items_with_name<'a>(
name: NameToImport,
assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a {
let krate_name = krate.display_name(sema.db).map(|name| name.to_string());
let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name)
let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(sema.db).map(|name| name.to_string()))
.entered();
let prefix = matches!(name, NameToImport::Prefix(..));
@ -66,6 +69,54 @@ pub fn items_with_name<'a>(
find_items(sema, krate, local_query, external_query)
}
/// Searches for importable items with the given name in the crate and its dependencies.
pub fn items_with_name_in_module<'a>(
sema: &'a Semantics<'_, RootDatabase>,
module: Module,
name: NameToImport,
assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module)
.entered();
let prefix = matches!(name, NameToImport::Prefix(..));
let local_query = match name {
NameToImport::Prefix(exact_name, case_sensitive)
| NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone());
local_query.assoc_search_mode(assoc_item_search);
if prefix {
local_query.prefix();
} else {
local_query.exact();
}
if case_sensitive {
local_query.case_sensitive();
}
local_query
}
NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
local_query.fuzzy();
local_query.assoc_search_mode(assoc_item_search);
if case_sensitive {
local_query.case_sensitive();
}
local_query
}
};
let mut local_results = Vec::new();
local_query.search(&[sema.db.module_symbols(module)], |local_candidate| {
local_results.push(match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def),
})
});
local_results.into_iter()
}
fn find_items<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,

View file

@ -377,6 +377,8 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<S
return None;
}
name
} else if let Some(inner_ty) = ty.remove_ref() {
return name_of_type(&inner_ty, db, edition);
} else {
return None;
};

View file

@ -778,4 +778,20 @@ fn bar(mut v: Union2) {
"#,
)
}
#[test]
fn raw_ref_reborrow_is_safe() {
check_diagnostics(
r#"
fn main() {
let ptr: *mut i32;
let _addr = &raw const *ptr;
let local = 1;
let ptr = &local as *const i32;
let _addr = &raw const *ptr;
}
"#,
)
}
}

View file

@ -90,7 +90,9 @@ fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<A
make::ty("()")
};
if !is_editable_crate(target_module.krate(), ctx.sema.db) {
if !is_editable_crate(target_module.krate(), ctx.sema.db)
|| SyntaxKind::from_keyword(field_name, ctx.edition).is_some()
{
return None;
}
@ -501,4 +503,19 @@ fn main() {}
"#,
)
}
#[test]
fn regression_18683() {
check_diagnostics(
r#"
struct S;
impl S {
fn f(self) {
self.self
// ^^^^ error: no field `self` on type `S`
}
}
"#,
);
}
}

View file

@ -7,12 +7,12 @@ pub(crate) fn unresolved_ident(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedIdent,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0425"),
"no such value in this scope",
d.expr_or_pat.map(Into::into),
)
let mut range =
ctx.sema.diagnostics_display_range(d.node.map(|(node, _)| node.syntax_node_ptr()));
if let Some(in_node_range) = d.node.value.1 {
range.range = in_node_range + range.range.start();
}
Diagnostic::new(DiagnosticCode::RustcHardError("E0425"), "no such value in this scope", range)
.experimental()
}
@ -20,7 +20,6 @@ pub(crate) fn unresolved_ident(
mod tests {
use crate::tests::check_diagnostics;
// FIXME: This should show a diagnostic
#[test]
fn feature() {
check_diagnostics(
@ -28,6 +27,7 @@ mod tests {
//- minicore: fmt
fn main() {
format_args!("{unresolved}");
// ^^^^^^^^^^ error: no such value in this scope
}
"#,
)

View file

@ -16,7 +16,7 @@ use ide_db::{
};
use itertools::Itertools;
use span::{Edition, TextSize};
use stdx::{always, format_to};
use stdx::format_to;
use syntax::{
ast::{self, AstNode},
SmolStr, SyntaxNode, ToSmolStr,
@ -130,14 +130,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
// In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
let mut add_opt = |runnable: Option<Runnable>, def| {
if let Some(runnable) = runnable.filter(|runnable| {
always!(
runnable.nav.file_id == file_id,
"tried adding a runnable pointing to a different file: {:?} for {:?}",
runnable.kind,
file_id
)
}) {
if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) {
if let Some(def) = def {
let file_id = match def {
Definition::Module(it) => {
@ -161,13 +154,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
Definition::SelfType(impl_) => runnable_impl(&sema, &impl_),
_ => None,
};
add_opt(
runnable
.or_else(|| module_def_doctest(sema.db, def))
// #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module
.filter(|it| it.nav.file_id == file_id),
Some(def),
);
add_opt(runnable.or_else(|| module_def_doctest(sema.db, def)), Some(def));
if let Definition::SelfType(impl_) = def {
impl_.items(db).into_iter().for_each(|assoc| {
let runnable = match assoc {

View file

@ -174,6 +174,9 @@ fn on_delimited_node_typed(
kinds: &[fn(SyntaxKind) -> bool],
) -> Option<TextEdit> {
let t = reparsed.syntax().token_at_offset(offset).right_biased()?;
if t.prev_token().map_or(false, |t| t.kind().is_any_identifier()) {
return None;
}
let (filter, node) = t
.parent_ancestors()
.take_while(|n| n.text_range().start() == offset)
@ -1091,6 +1094,22 @@ fn f() {
);
}
#[test]
fn preceding_whitespace_is_significant_for_closing_brackets() {
type_char_noop(
'(',
r#"
fn f() { a.b$0if true {} }
"#,
);
type_char_noop(
'(',
r#"
fn f() { foo$0{} }
"#,
);
}
#[test]
fn adds_closing_parenthesis_for_pat() {
type_char(

View file

@ -487,7 +487,7 @@ impl ProcMacroExpander for Expander {
match self.0.expand(
subtree,
attrs,
env.clone(),
env.clone().into(),
def_site,
call_site,
mixed_site,

View file

@ -72,8 +72,19 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool, is_in_ext
// macro_rules! ()
// macro_rules! []
if paths::is_use_path_start(p) {
paths::use_path(p);
// Do not create a MACRO_CALL node here if this isn't a macro call, this causes problems with completion.
// test_err path_item_without_excl
// foo
if p.at(T![!]) {
macro_call(p, m);
return;
} else {
m.complete(p, ERROR);
p.error("expected an item");
return;
}
}
m.abandon(p);
@ -410,8 +421,7 @@ fn fn_(p: &mut Parser<'_>, m: Marker) {
}
fn macro_call(p: &mut Parser<'_>, m: Marker) {
assert!(paths::is_use_path_start(p));
paths::use_path(p);
assert!(p.at(T![!]));
match macro_call_after_excl(p) {
BlockLike::Block => (),
BlockLike::NotBlock => {

View file

@ -33,7 +33,7 @@ fn source_file() {
SOURCE_FILE
ERROR
AT "@"
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
@ -41,9 +41,7 @@ fn source_file() {
ERROR
AT "@"
error 0: expected an item
error 6: expected BANG
error 6: expected `{`, `[`, `(`
error 6: expected SEMICOLON
error 6: expected an item
error 6: expected an item
"#]],
);

View file

@ -775,6 +775,10 @@ mod err {
run_and_expect_errors("test_data/parser/inline/err/missing_fn_param_type.rs");
}
#[test]
fn path_item_without_excl() {
run_and_expect_errors("test_data/parser/inline/err/path_item_without_excl.rs");
}
#[test]
fn pointer_type_no_mutability() {
run_and_expect_errors("test_data/parser/inline/err/pointer_type_no_mutability.rs");
}

View file

@ -10,20 +10,20 @@ SOURCE_FILE
USE_KW "use"
ERROR
SLASH "/"
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bin"
ERROR
SLASH "/"
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "env"
WHITESPACE " "
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
@ -33,13 +33,7 @@ error 23: expected `[`
error 23: expected an item
error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
error 28: expected SEMICOLON
error 31: expected BANG
error 31: expected `{`, `[`, `(`
error 31: expected SEMICOLON
error 31: expected an item
error 35: expected BANG
error 35: expected `{`, `[`, `(`
error 35: expected SEMICOLON
error 41: expected BANG
error 41: expected `{`, `[`, `(`
error 41: expected SEMICOLON
error 31: expected an item
error 35: expected an item
error 41: expected an item

View file

@ -14,13 +14,14 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n\n"
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bar"
TOKEN_TREE
ERROR
L_PAREN "("
ERROR
R_PAREN ")"
WHITESPACE " "
ERROR
@ -75,6 +76,7 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 17: expected BANG
error 19: expected SEMICOLON
error 17: expected an item
error 17: expected an item
error 18: expected an item
error 20: expected an item

View file

@ -46,7 +46,7 @@ SOURCE_FILE
ERROR
AT "@"
WHITESPACE " "
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
@ -72,9 +72,7 @@ error 67: expected R_ANGLE
error 67: expected R_PAREN
error 67: expected SEMICOLON
error 67: expected an item
error 72: expected BANG
error 72: expected `{`, `[`, `(`
error 72: expected SEMICOLON
error 72: expected an item
error 72: expected an item
error 73: expected an item
error 79: expected an item

View file

@ -26,13 +26,14 @@ SOURCE_FILE
ERROR
FN_KW "fn"
WHITESPACE " "
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bar"
TOKEN_TREE
ERROR
L_PAREN "("
ERROR
R_PAREN ")"
WHITESPACE " "
ERROR
@ -43,6 +44,7 @@ error 6: expected fn, trait or impl
error 38: expected a name
error 40: missing type for `const` or `static`
error 40: expected SEMICOLON
error 44: expected BANG
error 46: expected SEMICOLON
error 44: expected an item
error 44: expected an item
error 45: expected an item
error 47: expected an item

View file

@ -12,15 +12,16 @@ SOURCE_FILE
ERROR
USE_KW "use"
WHITESPACE " "
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "std"
ERROR
SEMICOLON ";"
WHITESPACE "\n"
error 8: expected R_ANGLE
error 8: expected type
error 11: expected `{`
error 15: expected BANG
error 15: expected `{`, `[`, `(`
error 15: expected an item
error 15: expected an item

View file

@ -1,5 +1,5 @@
SOURCE_FILE
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
@ -22,7 +22,7 @@ SOURCE_FILE
ERROR
ASYNC_KW "async"
WHITESPACE " "
MACRO_CALL
ERROR
PATH
PATH_SEGMENT
NAME_REF
@ -42,10 +42,6 @@ SOURCE_FILE
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
error 3: expected BANG
error 3: expected `{`, `[`, `(`
error 3: expected SEMICOLON
error 3: expected an item
error 24: expected fn, trait or impl
error 28: expected BANG
error 28: expected `{`, `[`, `(`
error 28: expected SEMICOLON
error 28: expected an item

View file

@ -0,0 +1,8 @@
SOURCE_FILE
ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "foo"
WHITESPACE "\n"
error 3: expected an item

View file

@ -14,10 +14,9 @@ doctest = false
[dependencies]
camino.workspace = true
serde = { workspace = true, optional = true }
[features]
serde1 = ["camino/serde1", "dep:serde"]
serde1 = ["camino/serde1"]
[lints]
workspace = true

View file

@ -14,6 +14,7 @@ doctest = false
[dependencies]
serde.workspace = true
serde_derive.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] }
tracing.workspace = true
rustc-hash.workspace = true
@ -23,11 +24,9 @@ indexmap.workspace = true
paths = { workspace = true, features = ["serde1"] }
tt.workspace = true
stdx.workspace = true
# Ideally this crate would not depend on salsa things, but we need span information here which wraps
# InternIds for the syntax context
span.workspace = true
# only here due to the `Env` newtype :/
base-db.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
[lints]

View file

@ -9,7 +9,6 @@ pub mod json;
pub mod msg;
mod process;
use base_db::Env;
use paths::{AbsPath, AbsPathBuf};
use span::Span;
use std::{fmt, io, sync::Arc};
@ -148,7 +147,7 @@ impl ProcMacro {
&self,
subtree: &tt::Subtree<Span>,
attr: Option<&tt::Subtree<Span>>,
env: Env,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
@ -179,7 +178,7 @@ impl ProcMacro {
},
},
lib: self.dylib_path.to_path_buf().into(),
env: env.into(),
env,
current_dir,
};

View file

@ -4,7 +4,8 @@ pub(crate) mod flat;
use std::io::{self, BufRead, Write};
use paths::Utf8PathBuf;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde::de::DeserializeOwned;
use serde_derive::{Deserialize, Serialize};
use crate::ProcMacroKind;
@ -123,7 +124,7 @@ impl ExpnGlobals {
}
}
pub trait Message: Serialize + DeserializeOwned {
pub trait Message: serde::Serialize + DeserializeOwned {
fn read<R: BufRead>(
from_proto: ProtocolRead<R>,
inp: &mut R,

View file

@ -39,10 +39,10 @@ use std::collections::VecDeque;
use intern::Symbol;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_derive::{Deserialize, Serialize};
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
use crate::msg::EXTENDED_LEAF_DATA;
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
pub type SpanDataIndexMap =
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
@ -145,7 +145,11 @@ impl FlatTree {
w.write(subtree);
FlatTree {
subtree: write_vec(w.subtree, SubtreeRepr::write),
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
write_vec(w.subtree, SubtreeRepr::write)
},
literal: if version >= EXTENDED_LEAF_DATA {
write_vec(w.literal, LiteralRepr::write_with_kind)
} else {
@ -179,7 +183,11 @@ impl FlatTree {
w.write(subtree);
FlatTree {
subtree: write_vec(w.subtree, SubtreeRepr::write),
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
write_vec(w.subtree, SubtreeRepr::write)
},
literal: if version >= EXTENDED_LEAF_DATA {
write_vec(w.literal, LiteralRepr::write_with_kind)
} else {
@ -202,7 +210,11 @@ impl FlatTree {
span_data_table: &SpanDataIndexMap,
) -> tt::Subtree<Span> {
Reader {
subtree: read_vec(self.subtree, SubtreeRepr::read),
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
read_vec(self.subtree, SubtreeRepr::read)
},
literal: if version >= EXTENDED_LEAF_DATA {
read_vec(self.literal, LiteralRepr::read_with_kind)
} else {
@ -224,7 +236,11 @@ impl FlatTree {
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
Reader {
subtree: read_vec(self.subtree, SubtreeRepr::read),
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
read_vec(self.subtree, SubtreeRepr::read)
},
literal: if version >= EXTENDED_LEAF_DATA {
read_vec(self.literal, LiteralRepr::read_with_kind)
} else {
@ -257,7 +273,26 @@ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u
}
impl SubtreeRepr {
fn write(self) -> [u32; 5] {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
tt::DelimiterKind::Brace => 2,
tt::DelimiterKind::Bracket => 3,
};
[self.open.0, kind, self.tt[0], self.tt[1]]
}
fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
2 => tt::DelimiterKind::Brace,
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
@ -266,7 +301,7 @@ impl SubtreeRepr {
};
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
}
fn read([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,

View file

@ -56,25 +56,8 @@ impl ProcMacroProcessSrv {
match srv.version_check() {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other,
format!(
"The version of the proc-macro server ({v}) in your Rust toolchain \
is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
\
This will prevent proc-macro expansion from working. \
Please consider updating your rust-analyzer to ensure compatibility with your \
current toolchain."
),
)),
Ok(v) if v < RUST_ANALYZER_SPAN_SUPPORT => Err(io::Error::new(
io::ErrorKind::Other,
format!(
"The version of the proc-macro server ({v}) in your Rust toolchain \
is too old and no longer supported by your rust-analyzer which requires\
version {RUST_ANALYZER_SPAN_SUPPORT} or higher.
\
This will prevent proc-macro expansion from working. \
Please consider updating your toolchain or downgrading your rust-analyzer \
to ensure compatibility with your current toolchain."
format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
),
)),
Ok(v) => {
@ -89,10 +72,10 @@ impl ProcMacroProcessSrv {
tracing::info!("Proc-macro server span mode: {:?}", srv.mode);
Ok(srv)
}
Err(e) => Err(io::Error::new(
io::ErrorKind::Other,
format!("Failed to fetch proc-macro server version: {e}"),
)),
Err(e) => {
tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0");
create_srv(false)
}
}
}

View file

@ -21,8 +21,8 @@ stdx.workspace = true
tt.workspace = true
syntax-bridge.workspace = true
paths.workspace = true
base-db.workspace = true
span.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
proc-macro-api.workspace = true
intern.workspace = true

View file

@ -19,6 +19,7 @@ rustc-hash.workspace = true
semver.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_derive.workspace = true
tracing.workspace = true
triomphe.workspace = true
la-arena.workspace = true

View file

@ -15,7 +15,7 @@ use itertools::Itertools;
use la_arena::ArenaMap;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::Deserialize;
use serde::Deserialize as _;
use toolchain::Tool;
use crate::{

View file

@ -8,7 +8,7 @@ use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::Deserialize;
use serde_derive::Deserialize;
use serde_json::from_value;
use span::Edition;
use toolchain::Tool;

View file

@ -20,7 +20,7 @@ parking_lot = "0.12.1"
rustc-hash = "2.0.0"
smallvec = "1.0.0"
oorandom = "11"
triomphe = "0.1.11"
triomphe.workspace = true
itertools.workspace = true
ra-salsa-macros = { version = "0.0.0", path = "ra-salsa-macros", package = "salsa-macros" }

View file

@ -36,6 +36,7 @@ rayon.workspace = true
rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
serde_derive.workspace = true
tenthash = "0.4.0"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }

View file

@ -644,7 +644,8 @@ config_data! {
/// Aliased as `"checkOnSave.targets"`.
check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = None,
/// Whether `--workspace` should be passed to `cargo check`.
/// If false, `-p <package>` will be passed instead.
/// If false, `-p <package>` will be passed instead if applicable. In case it is not, no
/// check will be performed.
check_workspace: bool = true,
/// These proc-macros will be ignored when trying to expand them.

View file

@ -3,6 +3,7 @@ pub(crate) mod to_proto;
use std::mem;
use cargo_metadata::PackageId;
use ide::FileId;
use ide_db::FxHashMap;
use itertools::Itertools;
@ -13,7 +14,8 @@ use triomphe::Arc;
use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind};
pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>;
pub(crate) type CheckFixes =
Arc<IntMap<usize, FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<Fix>>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@ -31,7 +33,10 @@ pub(crate) struct DiagnosticCollection {
pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
// FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check: IntMap<
usize,
FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
>,
pub(crate) check_fixes: CheckFixes,
changes: IntSet<FileId>,
/// Counter for supplying a new generation number for diagnostics.
@ -50,18 +55,37 @@ pub(crate) struct Fix {
impl DiagnosticCollection {
pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
it.clear();
}
if let Some(it) = self.check.get_mut(&flycheck_id) {
self.changes.extend(it.drain().map(|(key, _value)| key));
let Some(check) = self.check.get_mut(&flycheck_id) else {
return;
};
self.changes.extend(check.drain().flat_map(|(_, v)| v.into_keys()));
if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
fixes.clear();
}
}
pub(crate) fn clear_check_all(&mut self) {
Arc::make_mut(&mut self.check_fixes).clear();
self.changes
.extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
self.changes.extend(
self.check.values_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())),
)
}
pub(crate) fn clear_check_for_package(
&mut self,
flycheck_id: usize,
package_id: Arc<PackageId>,
) {
let Some(check) = self.check.get_mut(&flycheck_id) else {
return;
};
let package_id = Some(package_id);
if let Some(checks) = check.remove(&package_id) {
self.changes.extend(checks.into_keys());
}
if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
fixes.remove(&package_id);
}
}
pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
@ -73,11 +97,19 @@ impl DiagnosticCollection {
pub(crate) fn add_check_diagnostic(
&mut self,
flycheck_id: usize,
package_id: &Option<Arc<PackageId>>,
file_id: FileId,
diagnostic: lsp_types::Diagnostic,
fix: Option<Box<Fix>>,
) {
let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
let diagnostics = self
.check
.entry(flycheck_id)
.or_default()
.entry(package_id.clone())
.or_default()
.entry(file_id)
.or_default();
for existing_diagnostic in diagnostics.iter() {
if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
return;
@ -86,7 +118,14 @@ impl DiagnosticCollection {
if let Some(fix) = fix {
let check_fixes = Arc::make_mut(&mut self.check_fixes);
check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().push(*fix);
check_fixes
.entry(flycheck_id)
.or_default()
.entry(package_id.clone())
.or_default()
.entry(file_id)
.or_default()
.push(*fix);
}
diagnostics.push(diagnostic);
self.changes.insert(file_id);
@ -135,7 +174,12 @@ impl DiagnosticCollection {
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d);
let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d);
let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
let check = self
.check
.values()
.flat_map(|it| it.values())
.filter_map(move |it| it.get(&file_id))
.flatten();
native_syntax.chain(native_semantic).chain(check)
}

View file

@ -1,17 +1,20 @@
//! Flycheck provides the functionality needed to run `cargo check` to provide
//! LSP diagnostics based on the output of the command.
use std::{fmt, io, process::Command, time::Duration};
use std::{fmt, io, mem, process::Command, time::Duration};
use cargo_metadata::PackageId;
use crossbeam_channel::{select_biased, unbounded, Receiver, Sender};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::Deserialize;
use serde::Deserialize as _;
use serde_derive::Deserialize;
pub(crate) use cargo_metadata::diagnostic::{
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
};
use toolchain::Tool;
use triomphe::Arc;
use crate::command::{CommandHandle, ParseFromLine};
@ -150,10 +153,19 @@ impl FlycheckHandle {
pub(crate) enum FlycheckMessage {
/// Request adding a diagnostic with fixes included to a file
AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
AddDiagnostic {
id: usize,
workspace_root: Arc<AbsPathBuf>,
diagnostic: Diagnostic,
package_id: Option<Arc<PackageId>>,
},
/// Request clearing all previous diagnostics
ClearDiagnostics { id: usize },
/// Request clearing all outdated diagnostics.
ClearDiagnostics {
id: usize,
/// The package whose diagnostics to clear, or if unspecified, all diagnostics.
package_id: Option<Arc<PackageId>>,
},
/// Request check progress notification to client
Progress {
@ -166,15 +178,18 @@ pub(crate) enum FlycheckMessage {
impl fmt::Debug for FlycheckMessage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f
FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => f
.debug_struct("AddDiagnostic")
.field("id", id)
.field("workspace_root", workspace_root)
.field("package_id", package_id)
.field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
.finish(),
FlycheckMessage::ClearDiagnostics { id } => {
f.debug_struct("ClearDiagnostics").field("id", id).finish()
}
FlycheckMessage::ClearDiagnostics { id, package_id } => f
.debug_struct("ClearDiagnostics")
.field("id", id)
.field("package_id", package_id)
.finish(),
FlycheckMessage::Progress { id, progress } => {
f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
}
@ -200,12 +215,13 @@ enum StateChange {
struct FlycheckActor {
/// The workspace id of this flycheck instance.
id: usize,
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
manifest_path: Option<AbsPathBuf>,
/// Either the workspace root of the workspace we are flychecking,
/// or the project root of the project.
root: AbsPathBuf,
root: Arc<AbsPathBuf>,
sysroot_root: Option<AbsPathBuf>,
/// CargoHandle exists to wrap around the communication needed to be able to
/// run `cargo check` without blocking. Currently the Rust standard library
@ -215,8 +231,13 @@ struct FlycheckActor {
command_handle: Option<CommandHandle<CargoCheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
package_status: FxHashMap<Arc<PackageId>, DiagnosticReceived>,
}
status: FlycheckStatus,
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
enum DiagnosticReceived {
Yes,
No,
}
#[allow(clippy::large_enum_variant)]
@ -225,13 +246,6 @@ enum Event {
CheckEvent(Option<CargoCheckMessage>),
}
#[derive(PartialEq)]
enum FlycheckStatus {
Started,
DiagnosticSent,
Finished,
}
pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
impl FlycheckActor {
@ -249,11 +263,11 @@ impl FlycheckActor {
sender,
config,
sysroot_root,
root: workspace_root,
root: Arc::new(workspace_root),
manifest_path,
command_handle: None,
command_receiver: None,
status: FlycheckStatus::Finished,
package_status: FxHashMap::default(),
}
}
@ -306,13 +320,11 @@ impl FlycheckActor {
self.command_handle = Some(command_handle);
self.command_receiver = Some(receiver);
self.report_progress(Progress::DidStart);
self.status = FlycheckStatus::Started;
}
Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {formatted_command} error={error}"
)));
self.status = FlycheckStatus::Finished;
}
}
}
@ -332,37 +344,62 @@ impl FlycheckActor {
error
);
}
if self.status == FlycheckStatus::Started {
self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
if self.package_status.is_empty() {
// We finished without receiving any diagnostics.
// That means all of them are stale.
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
package_id: None,
});
} else {
for (package_id, status) in mem::take(&mut self.package_status) {
if let DiagnosticReceived::No = status {
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
package_id: Some(package_id),
});
}
}
}
self.report_progress(Progress::DidFinish(res));
self.status = FlycheckStatus::Finished;
}
Event::CheckEvent(Some(message)) => match message {
CargoCheckMessage::CompilerArtifact(msg) => {
tracing::trace!(
flycheck_id = self.id,
artifact = msg.target.name,
package_id = msg.package_id.repr,
"artifact received"
);
self.report_progress(Progress::DidCheckCrate(msg.target.name));
self.package_status
.entry(Arc::new(msg.package_id))
.or_insert(DiagnosticReceived::No);
}
CargoCheckMessage::Diagnostic(msg) => {
CargoCheckMessage::Diagnostic { diagnostic, package_id } => {
tracing::trace!(
flycheck_id = self.id,
message = msg.message,
message = diagnostic.message,
package_id = package_id.as_ref().map(|it| &it.repr),
"diagnostic received"
);
if self.status == FlycheckStatus::Started {
self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
if let Some(package_id) = &package_id {
if !self.package_status.contains_key(package_id) {
self.package_status
.insert(package_id.clone(), DiagnosticReceived::Yes);
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
package_id: Some(package_id.clone()),
});
}
}
self.send(FlycheckMessage::AddDiagnostic {
id: self.id,
package_id,
workspace_root: self.root.clone(),
diagnostic: msg,
diagnostic,
});
self.status = FlycheckStatus::DiagnosticSent;
}
},
}
@ -380,7 +417,7 @@ impl FlycheckActor {
command_handle.cancel();
self.command_receiver.take();
self.report_progress(Progress::DidCancel);
self.status = FlycheckStatus::Finished;
self.package_status.clear();
}
}
@ -400,7 +437,7 @@ impl FlycheckActor {
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
}
cmd.arg(command);
cmd.current_dir(&self.root);
cmd.current_dir(&*self.root);
match package {
Some(pkg) => cmd.arg("-p").arg(pkg),
@ -442,11 +479,11 @@ impl FlycheckActor {
match invocation_strategy {
InvocationStrategy::Once => {
cmd.current_dir(&self.root);
cmd.current_dir(&*self.root);
}
InvocationStrategy::PerWorkspace => {
// FIXME: cmd.current_dir(&affected_workspace);
cmd.current_dir(&self.root);
cmd.current_dir(&*self.root);
}
}
@ -486,7 +523,7 @@ impl FlycheckActor {
#[allow(clippy::large_enum_variant)]
enum CargoCheckMessage {
CompilerArtifact(cargo_metadata::Artifact),
Diagnostic(Diagnostic),
Diagnostic { diagnostic: Diagnostic, package_id: Option<Arc<PackageId>> },
}
impl ParseFromLine for CargoCheckMessage {
@ -501,11 +538,16 @@ impl ParseFromLine for CargoCheckMessage {
Some(CargoCheckMessage::CompilerArtifact(artifact))
}
cargo_metadata::Message::CompilerMessage(msg) => {
Some(CargoCheckMessage::Diagnostic(msg.message))
Some(CargoCheckMessage::Diagnostic {
diagnostic: msg.message,
package_id: Some(Arc::new(msg.package_id)),
})
}
_ => None,
},
JsonMessage::Rustc(message) => Some(CargoCheckMessage::Diagnostic(message)),
JsonMessage::Rustc(message) => {
Some(CargoCheckMessage::Diagnostic { diagnostic: message, package_id: None })
}
};
}

View file

@ -92,7 +92,7 @@ pub(crate) struct GlobalState {
// status
pub(crate) shutdown_requested: bool,
pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroServer>]>,
@ -238,7 +238,11 @@ impl GlobalState {
mem_docs: MemDocs::default(),
semantic_tokens_cache: Arc::new(Default::default()),
shutdown_requested: false,
last_reported_status: None,
last_reported_status: lsp_ext::ServerStatusParams {
health: lsp_ext::Health::Ok,
quiescent: true,
message: None,
},
source_root_config: SourceRootConfig::default(),
local_roots_parent_map: Arc::new(FxHashMap::default()),
config_errors: Default::default(),

View file

@ -126,7 +126,7 @@ impl RequestDispatcher<'_> {
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
/// ready this will return a `default` constructed [`R::Result`].
pub(crate) fn on_with<R>(
pub(crate) fn on_with_vfs_default<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
default: impl FnOnce() -> R::Result,

View file

@ -189,7 +189,7 @@ pub(crate) fn handle_did_save_text_document(
if !state.config.check_on_save(Some(sr)) || run_flycheck(state, vfs_path) {
return Ok(());
}
} else if state.config.check_on_save(None) {
} else if state.config.check_on_save(None) && state.config.flycheck_workspace(None) {
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in state.flycheck.iter() {
flycheck.restart_workspace(None);
@ -293,7 +293,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let file_id = state.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = state.snapshot();
let source_root_id = world.analysis.source_root_id(file_id).ok();
let may_flycheck_workspace = state.config.flycheck_workspace(None);
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Is the target binary? If so we let flycheck run only for the workspace that contains the crate.
@ -375,21 +375,22 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let saved_file = vfs_path.as_path().map(|p| p.to_owned());
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
'flychecks: for flycheck in world.flycheck.iter() {
for (id, package) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
match package.filter(|_| !world.config.flycheck_workspace(source_root_id)) {
Some(package) => flycheck
.restart_for_package(package, target.clone().map(TupleExt::head)),
None => flycheck.restart_workspace(saved_file.clone()),
if may_flycheck_workspace {
flycheck.restart_workspace(saved_file.clone())
} else if let Some(package) = package {
flycheck
.restart_for_package(package, target.clone().map(TupleExt::head))
}
continue;
continue 'flychecks;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
if !updated && may_flycheck_workspace {
for flycheck in world.flycheck.iter() {
flycheck.restart_workspace(saved_file.clone());
}
@ -432,9 +433,11 @@ pub(crate) fn handle_run_flycheck(
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if state.config.flycheck_workspace(None) {
for flycheck in state.flycheck.iter() {
flycheck.restart_workspace(None);
}
}
Ok(())
}

View file

@ -481,27 +481,28 @@ pub(crate) fn handle_document_diagnostics(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentDiagnosticParams,
) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> {
const EMPTY: lsp_types::DocumentDiagnosticReportResult =
let empty = || {
lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(
lsp_types::RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
result_id: None,
result_id: Some("rust-analyzer".to_owned()),
items: vec![],
},
},
),
);
)
};
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let source_root = snap.analysis.source_root_id(file_id)?;
if !snap.analysis.is_local_source_root(source_root)? {
return Ok(EMPTY);
return Ok(empty());
}
let config = snap.config.diagnostics(Some(source_root));
if !config.enabled {
return Ok(EMPTY);
return Ok(empty());
}
let line_index = snap.file_line_index(file_id)?;
let supports_related = snap.config.text_document_diagnostic_related_document_support();
@ -529,7 +530,7 @@ pub(crate) fn handle_document_diagnostics(
Ok(lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(lsp_types::RelatedFullDocumentDiagnosticReport {
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
result_id: None,
result_id: Some("rust-analyzer".to_owned()),
items: diagnostics.collect(),
},
related_documents: related_documents.is_empty().not().then(|| {
@ -539,7 +540,10 @@ pub(crate) fn handle_document_diagnostics(
(
to_proto::url(&snap, id),
lsp_types::DocumentDiagnosticReportKind::Full(
lsp_types::FullDocumentDiagnosticReport { result_id: None, items },
lsp_types::FullDocumentDiagnosticReport {
result_id: Some("rust-analyzer".to_owned()),
items,
},
),
)
})
@ -1144,7 +1148,7 @@ pub(crate) fn handle_completion_resolve(
let Some(corresponding_completion) = completions.into_iter().find(|completion_item| {
// Avoid computing hashes for items that obviously do not match
// r-a might append a detail-based suffix to the label, so we cannot check for equality
original_completion.label.starts_with(completion_item.label.as_str())
original_completion.label.starts_with(completion_item.label.primary.as_str())
&& resolve_data_hash == completion_item_hash(completion_item, resolve_data.for_ref)
}) else {
return Ok(original_completion);
@ -1441,7 +1445,13 @@ pub(crate) fn handle_code_action(
}
// Fixes from `cargo check`.
for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() {
for fix in snap
.check_fixes
.values()
.flat_map(|it| it.values())
.filter_map(|it| it.get(&frange.file_id))
.flatten()
{
// FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix

View file

@ -114,8 +114,11 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
u8::from(item.deprecated),
u8::from(item.trigger_call_info),
]);
hasher.update(&item.label);
if let Some(label_detail) = &item.label_detail {
hasher.update(&item.label.primary);
if let Some(label_detail) = &item.label.detail_left {
hasher.update(label_detail);
}
if let Some(label_detail) = &item.label.detail_right {
hasher.update(label_detail);
}
// NB: do not hash edits or source range, as those may change between the time the client sends the resolve request

View file

@ -823,8 +823,11 @@ impl Request for OnTypeFormatting {
#[derive(Debug, Serialize, Deserialize)]
pub struct CompletionResolveData {
pub position: lsp_types::TextDocumentPositionParams,
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub imports: Vec<CompletionImport>,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub version: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub trigger_character: Option<char>,
pub for_ref: bool,
pub hash: String,
@ -836,6 +839,7 @@ pub struct InlayHintResolveData {
// This is a string instead of a u64 as javascript can't represent u64 fully
pub hash: String,
pub resolve_range: lsp_types::Range,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub version: Option<i32>,
}

View file

@ -2,6 +2,7 @@
use std::{
iter::once,
mem,
ops::Not as _,
sync::atomic::{AtomicU32, Ordering},
};
@ -353,14 +354,17 @@ fn completion_item(
};
let mut lsp_item = lsp_types::CompletionItem {
label: item.label.to_string(),
label: item.label.primary.to_string(),
detail,
filter_text,
kind: Some(completion_item_kind(item.kind)),
text_edit,
additional_text_edits: Some(additional_text_edits),
additional_text_edits: additional_text_edits
.is_empty()
.not()
.then_some(additional_text_edits),
documentation,
deprecated: Some(item.deprecated),
deprecated: item.deprecated.then_some(item.deprecated),
tags,
command,
insert_text_format,
@ -368,15 +372,17 @@ fn completion_item(
};
if config.completion_label_details_support() {
let has_label_details =
item.label.detail_left.is_some() || item.label.detail_right.is_some();
if fields_to_resolve.resolve_label_details {
something_to_resolve |= true;
} else {
something_to_resolve |= has_label_details;
} else if has_label_details {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
detail: item.label_detail.as_ref().map(ToString::to_string),
description: item.detail.clone(),
detail: item.label.detail_left.clone(),
description: item.label.detail_right.clone(),
});
}
} else if let Some(label_detail) = &item.label_detail {
} else if let Some(label_detail) = &item.label.detail_left {
lsp_item.label.push_str(label_detail.as_str());
}
@ -1578,7 +1584,9 @@ pub(crate) fn code_lens(
};
let lens_config = snap.config.lens();
if lens_config.run && client_commands_config.run_single && has_root {
if has_root {
if lens_config.run && client_commands_config.run_single {
let command = command::run_single(&r, &title);
acc.push(lsp_types::CodeLens {
range: annotation_range,
@ -1594,6 +1602,8 @@ pub(crate) fn code_lens(
data: None,
})
}
}
if lens_config.interpret {
let command = command::interpret_single(&r);
acc.push(lsp_types::CodeLens {

View file

@ -408,7 +408,10 @@ impl GlobalState {
if self.is_quiescent() {
let became_quiescent = !was_quiescent;
if became_quiescent {
if self.config.check_on_save(None) {
if self.config.check_on_save(None)
&& self.config.flycheck_workspace(None)
&& !self.fetch_build_data_queue.op_requested()
{
// Project has loaded properly, kick off initial flycheck
self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None));
}
@ -656,8 +659,8 @@ impl GlobalState {
fn update_status_or_notify(&mut self) {
let status = self.current_status();
if self.last_reported_status.as_ref() != Some(&status) {
self.last_reported_status = Some(status.clone());
if self.last_reported_status != status {
self.last_reported_status = status.clone();
if self.config.server_status_notification() {
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
@ -715,6 +718,7 @@ impl GlobalState {
error!("FetchWorkspaceError: {e}");
}
self.wants_to_switch = Some("fetched workspace".to_owned());
self.diagnostics.clear_check_all();
(Progress::End, None)
}
};
@ -956,7 +960,7 @@ impl GlobalState {
fn handle_flycheck_msg(&mut self, message: FlycheckMessage) {
match message {
FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => {
FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => {
let snap = self.snapshot();
let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map(None),
@ -968,6 +972,7 @@ impl GlobalState {
match url_to_file_id(&self.vfs.read().0, &diag.url) {
Ok(file_id) => self.diagnostics.add_check_diagnostic(
id,
&package_id,
file_id,
diag.diagnostic,
diag.fix,
@ -981,9 +986,12 @@ impl GlobalState {
};
}
}
FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id),
FlycheckMessage::ClearDiagnostics { id, package_id: None } => {
self.diagnostics.clear_check(id)
}
FlycheckMessage::ClearDiagnostics { id, package_id: Some(package_id) } => {
self.diagnostics.clear_check_for_package(id, package_id)
}
FlycheckMessage::Progress { id, progress } => {
let (state, message) = match progress {
flycheck::Progress::DidStart => (Progress::Begin, None),
@ -1090,12 +1098,12 @@ impl GlobalState {
.on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)
// FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change.
// All other request handlers
.on_with::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
.on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(
lsp_types::RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
result_id: None,
result_id: Some("rust-analyzer".to_owned()),
items: vec![],
},
},

View file

@ -70,7 +70,6 @@ impl GlobalState {
/// are ready to do semantic work.
pub(crate) fn is_quiescent(&self) -> bool {
self.vfs_done
&& self.last_reported_status.is_some()
&& !self.fetch_workspaces_queue.op_in_progress()
&& !self.fetch_build_data_queue.op_in_progress()
&& !self.fetch_proc_macros_queue.op_in_progress()

View file

@ -5,7 +5,8 @@ use std::process::Command;
use crossbeam_channel::Sender;
use paths::AbsPath;
use serde::Deserialize;
use serde::Deserialize as _;
use serde_derive::Deserialize;
use toolchain::Tool;
use crate::{

View file

@ -54,7 +54,7 @@ where
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
fn on_close(&self, id: Id, ctx: Context<'_, S>) {
#[derive(serde::Serialize)]
#[derive(serde_derive::Serialize)]
struct JsonDataInner {
name: &'static str,
elapsed_ms: u128,

View file

@ -1,7 +1,7 @@
use std::{
cell::{Cell, RefCell},
env, fs,
sync::{Once, OnceLock},
sync::Once,
time::Duration,
};
@ -141,34 +141,15 @@ impl Project<'_> {
/// file in the config dir after server is run, something where our naive approach comes short.
/// Using a `prelock` allows us to force a lock when we know we need it.
pub(crate) fn server_with_lock(self, config_lock: bool) -> Server {
static CONFIG_DIR_LOCK: OnceLock<(Utf8PathBuf, Mutex<()>)> = OnceLock::new();
static CONFIG_DIR_LOCK: Mutex<()> = Mutex::new(());
let config_dir_guard = if config_lock {
Some({
let (path, mutex) = CONFIG_DIR_LOCK.get_or_init(|| {
let value = TestDir::new().keep().path().to_owned();
let guard = CONFIG_DIR_LOCK.lock();
let test_dir = TestDir::new();
let value = test_dir.path().to_owned();
env::set_var("__TEST_RA_USER_CONFIG_DIR", &value);
(value, Mutex::new(()))
});
#[allow(dyn_drop)]
(mutex.lock(), {
Box::new({
struct Dropper(Utf8PathBuf);
impl Drop for Dropper {
fn drop(&mut self) {
for entry in fs::read_dir(&self.0).unwrap() {
let path = entry.unwrap().path();
if path.is_file() {
fs::remove_file(path).unwrap();
} else if path.is_dir() {
fs::remove_dir_all(path).unwrap();
}
}
}
}
Dropper(path.clone())
}) as Box<dyn Drop>
})
(guard, test_dir)
})
} else {
None
@ -311,14 +292,12 @@ pub(crate) struct Server {
client: Connection,
/// XXX: remove the tempdir last
dir: TestDir,
#[allow(dyn_drop)]
_config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>,
_config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>,
}
impl Server {
#[allow(dyn_drop)]
fn new(
config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>,
config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>,
dir: TestDir,
config: Config,
) -> Server {

View file

@ -12,7 +12,7 @@ authors.workspace = true
[dependencies]
la-arena.workspace = true
ra-salsa.workspace = true
ra-salsa = { workspace = true, optional = true }
rustc-hash.workspace = true
hashbrown.workspace = true
text-size.workspace = true
@ -22,5 +22,8 @@ vfs.workspace = true
syntax.workspace = true
stdx.workspace = true
[features]
default = ["ra-salsa"]
[lints]
workspace = true

View file

@ -21,6 +21,9 @@
//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
use std::fmt;
#[cfg(not(feature = "ra-salsa"))]
use crate::InternId;
#[cfg(feature = "ra-salsa")]
use ra_salsa::{InternId, InternValue};
use crate::MacroCallId;
@ -39,6 +42,7 @@ impl fmt::Debug for SyntaxContextId {
}
}
#[cfg(feature = "ra-salsa")]
impl ra_salsa::InternKey for SyntaxContextId {
fn from_intern_id(v: ra_salsa::InternId) -> Self {
SyntaxContextId(v)
@ -92,6 +96,7 @@ pub struct SyntaxContextData {
pub opaque_and_semitransparent: SyntaxContextId,
}
#[cfg(feature = "ra-salsa")]
impl InternValue for SyntaxContextData {
type Key = (SyntaxContextId, Option<MacroCallId>, Transparency);

View file

@ -1,6 +1,7 @@
//! File and span related types.
use std::fmt::{self, Write};
#[cfg(feature = "ra-salsa")]
use ra_salsa::InternId;
mod ast_id;
@ -261,8 +262,9 @@ pub struct MacroFileId {
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(ra_salsa::InternId);
pub struct MacroCallId(InternId);
#[cfg(feature = "ra-salsa")]
impl ra_salsa::InternKey for MacroCallId {
fn from_intern_id(v: ra_salsa::InternId) -> Self {
MacroCallId(v)
@ -355,3 +357,72 @@ impl HirFileId {
}
}
}
#[cfg(not(feature = "ra-salsa"))]
mod intern_id_proxy {
use std::fmt;
use std::num::NonZeroU32;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(super) struct InternId {
value: NonZeroU32,
}
impl InternId {
pub(super) const MAX: u32 = 0xFFFF_FF00;
pub(super) const unsafe fn new_unchecked(value: u32) -> Self {
debug_assert!(value < InternId::MAX);
let value = unsafe { NonZeroU32::new_unchecked(value + 1) };
InternId { value }
}
pub(super) fn as_u32(self) -> u32 {
self.value.get() - 1
}
pub(super) fn as_usize(self) -> usize {
self.as_u32() as usize
}
}
impl From<InternId> for u32 {
fn from(raw: InternId) -> u32 {
raw.as_u32()
}
}
impl From<InternId> for usize {
fn from(raw: InternId) -> usize {
raw.as_usize()
}
}
impl From<u32> for InternId {
fn from(id: u32) -> InternId {
assert!(id < InternId::MAX);
unsafe { InternId::new_unchecked(id) }
}
}
impl From<usize> for InternId {
fn from(id: usize) -> InternId {
assert!(id < (InternId::MAX as usize));
unsafe { InternId::new_unchecked(id as u32) }
}
}
impl fmt::Debug for InternId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.as_usize().fmt(f)
}
}
impl fmt::Display for InternId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.as_usize().fmt(f)
}
}
}
#[cfg(not(feature = "ra-salsa"))]
use intern_id_proxy::InternId;

View file

@ -21,7 +21,8 @@ syntax.workspace = true
parser.workspace = true
tt.workspace = true
stdx.workspace = true
span.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
[dev-dependencies]

View file

@ -414,7 +414,7 @@ AsmClobberAbi = 'clobber_abi' '(' ('@string' (',' '@string')* ','?) ')'
// option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw"
AsmOption = 'pure' | 'nomem' | 'readonly' | 'preserves_flags' | 'noreturn' | 'nostack' | 'att_syntax' | 'raw' | 'may_unwind'
// options := "options(" option *("," option) [","] ")"
AsmOptions = 'options' '(' AsmOption *(',' AsmOption) ','? ')'
AsmOptions = 'options' '(' (AsmOption (',' AsmOption)*) ','? ')'
AsmLabel = 'label' BlockExpr
AsmSym = 'sym' Path
AsmConst = 'const' Expr

View file

@ -211,8 +211,6 @@ pub struct AsmOptions {
pub(crate) syntax: SyntaxNode,
}
impl AsmOptions {
#[inline]
pub fn asm_option(&self) -> Option<AsmOption> { support::child(&self.syntax) }
#[inline]
pub fn asm_options(&self) -> AstChildren<AsmOption> { support::children(&self.syntax) }
#[inline]

View file

@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: 14b7fb1309f5bb00
lsp/ext.rs hash: 9790509d87670c22
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:

View file

@ -270,7 +270,8 @@ Aliased as `"checkOnSave.targets"`.
+
--
Whether `--workspace` should be passed to `cargo check`.
If false, `-p <package>` will be passed instead.
If false, `-p <package>` will be passed instead if applicable. In case it is not, no
check will be performed.
--
[[rust-analyzer.completion.addSemicolonToUnit]]rust-analyzer.completion.addSemicolonToUnit (default: `true`)::
+

View file

@ -1098,7 +1098,7 @@
"title": "check",
"properties": {
"rust-analyzer.check.workspace": {
"markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead.",
"markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead if applicable. In case it is not, no\ncheck will be performed.",
"default": true,
"type": "boolean"
}

View file

@ -347,6 +347,8 @@ export class Ctx implements RustAnalyzerExtensionApi {
}
log.info("Disposing language client");
this.updateCommands("disable");
// we give the server 100ms to stop gracefully
await this.client?.stop(100).catch((_) => {});
await this.disposeClient();
}

View file

@ -1,6 +1,6 @@
[package]
name = "lsp-server"
version = "0.7.7"
version = "0.7.8"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@ -9,7 +9,8 @@ edition = "2021"
[dependencies]
log = "0.4.17"
serde_json = "1.0.108"
serde = { version = "1.0.192", features = ["derive"] }
serde = { version = "1.0.216" }
serde_derive = { version = "1.0.216" }
crossbeam-channel.workspace = true
[dev-dependencies]

View file

@ -3,7 +3,8 @@ use std::{
io::{self, BufRead, Write},
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde::de::DeserializeOwned;
use serde_derive::{Deserialize, Serialize};
use crate::error::ExtractError;
@ -196,7 +197,7 @@ impl Message {
}
impl Response {
pub fn new_ok<R: Serialize>(id: RequestId, result: R) -> Response {
pub fn new_ok<R: serde::Serialize>(id: RequestId, result: R) -> Response {
Response { id, result: Some(serde_json::to_value(result).unwrap()), error: None }
}
pub fn new_err(id: RequestId, code: i32, message: String) -> Response {
@ -206,7 +207,7 @@ impl Response {
}
impl Request {
pub fn new<P: Serialize>(id: RequestId, method: String, params: P) -> Request {
pub fn new<P: serde::Serialize>(id: RequestId, method: String, params: P) -> Request {
Request { id, method, params: serde_json::to_value(params).unwrap() }
}
pub fn extract<P: DeserializeOwned>(
@ -231,7 +232,7 @@ impl Request {
}
impl Notification {
pub fn new(method: String, params: impl Serialize) -> Notification {
pub fn new(method: String, params: impl serde::Serialize) -> Notification {
Notification { method, params: serde_json::to_value(params).unwrap() }
}
pub fn extract<P: DeserializeOwned>(

View file

@ -1,7 +1,5 @@
use std::collections::HashMap;
use serde::Serialize;
use crate::{ErrorCode, Request, RequestId, Response, ResponseError};
/// Manages the set of pending requests, both incoming and outgoing.
@ -56,7 +54,7 @@ impl<I> Incoming<I> {
}
impl<O> Outgoing<O> {
pub fn register<P: Serialize>(&mut self, method: String, params: P, data: O) -> Request {
pub fn register<P: serde::Serialize>(&mut self, method: String, params: P, data: O) -> Request {
let id = RequestId::from(self.next_id);
self.pending.insert(id.clone(), data);
self.next_id += 1;

View file

@ -1 +1 @@
5a6036a1802262f8cf02192b02026688d396f1d7
0eca4dd3205a01dba4bd7b7c140ec370aff03440

View file

@ -16,7 +16,8 @@ xflags = "0.3.0"
time = { version = "0.3", default-features = false }
zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
stdx.workspace = true
proc-macro2 = "1.0.47"
# https://github.com/dtolnay/proc-macro2/issues/475
proc-macro2 = "=1.0.86"
quote = "1.0.20"
ungrammar = "1.16.1"
either.workspace = true