mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 13:18:47 +00:00
Merge from rustc
This commit is contained in:
commit
a2336ab277
72 changed files with 8775 additions and 2159 deletions
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
@ -220,7 +220,7 @@ jobs:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
env:
|
env:
|
||||||
FORCE_COLOR: 1
|
FORCE_COLOR: 1
|
||||||
TYPOS_VERSION: v1.18.0
|
TYPOS_VERSION: v1.28.3
|
||||||
steps:
|
steps:
|
||||||
- name: download typos
|
- name: download typos
|
||||||
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
|
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
|
||||||
|
|
|
@ -16,7 +16,8 @@ extend-ignore-re = [
|
||||||
"raison d'être",
|
"raison d'être",
|
||||||
"inout",
|
"inout",
|
||||||
"INOUT",
|
"INOUT",
|
||||||
"optin"
|
"optin",
|
||||||
|
"=Pn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[default.extend-words]
|
[default.extend-words]
|
||||||
|
@ -26,8 +27,12 @@ fo = "fo"
|
||||||
ket = "ket"
|
ket = "ket"
|
||||||
makro = "makro"
|
makro = "makro"
|
||||||
trivias = "trivias"
|
trivias = "trivias"
|
||||||
|
thir = "thir"
|
||||||
|
jod = "jod"
|
||||||
|
|
||||||
[default.extend-identifiers]
|
[default.extend-identifiers]
|
||||||
|
anc = "anc"
|
||||||
datas = "datas"
|
datas = "datas"
|
||||||
impl_froms = "impl_froms"
|
impl_froms = "impl_froms"
|
||||||
selfs = "selfs"
|
selfs = "selfs"
|
||||||
|
taits = "taits"
|
||||||
|
|
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -389,6 +389,10 @@ version = "0.1.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
|
checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "edition"
|
||||||
|
version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "either"
|
name = "either"
|
||||||
version = "1.13.0"
|
version = "1.13.0"
|
||||||
|
@ -1266,6 +1270,7 @@ name = "parser"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"drop_bomb",
|
"drop_bomb",
|
||||||
|
"edition",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"limit",
|
"limit",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
|
@ -2662,6 +2667,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"directories",
|
"directories",
|
||||||
|
"edition",
|
||||||
"either",
|
"either",
|
||||||
"flate2",
|
"flate2",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
|
|
@ -83,6 +83,7 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
||||||
tt = { path = "./crates/tt", version = "0.0.0" }
|
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||||
|
edition = { path = "./crates/edition", version = "0.0.0" }
|
||||||
|
|
||||||
ra-ap-rustc_lexer = { version = "0.85", default-features = false }
|
ra-ap-rustc_lexer = { version = "0.85", default-features = false }
|
||||||
ra-ap-rustc_parse_format = { version = "0.85", default-features = false }
|
ra-ap-rustc_parse_format = { version = "0.85", default-features = false }
|
||||||
|
|
13
crates/edition/Cargo.toml
Normal file
13
crates/edition/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[package]
|
||||||
|
name = "edition"
|
||||||
|
version = "0.0.0"
|
||||||
|
rust-version.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
|
@ -1,6 +1,5 @@
|
||||||
//! The edition of the Rust language used in a crate.
|
//! The edition of the Rust language used in a crate.
|
||||||
// Ideally this would be defined in the span crate, but the dependency chain is all over the place
|
// This should live in a separate crate because we use it in both actual code and codegen.
|
||||||
// wrt to span, parser and syntax.
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
@ -74,7 +74,7 @@ impl BuiltinFnLikeExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_asm(&self) -> bool {
|
pub fn is_asm(&self) -> bool {
|
||||||
matches!(self, Self::Asm | Self::GlobalAsm)
|
matches!(self, Self::Asm | Self::GlobalAsm | Self::NakedAsm)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,6 +122,7 @@ register_builtin! {
|
||||||
(stringify, Stringify) => stringify_expand,
|
(stringify, Stringify) => stringify_expand,
|
||||||
(asm, Asm) => asm_expand,
|
(asm, Asm) => asm_expand,
|
||||||
(global_asm, GlobalAsm) => asm_expand,
|
(global_asm, GlobalAsm) => asm_expand,
|
||||||
|
(naked_asm, NakedAsm) => asm_expand,
|
||||||
(cfg, Cfg) => cfg_expand,
|
(cfg, Cfg) => cfg_expand,
|
||||||
(core_panic, CorePanic) => panic_expand,
|
(core_panic, CorePanic) => panic_expand,
|
||||||
(std_panic, StdPanic) => panic_expand,
|
(std_panic, StdPanic) => panic_expand,
|
||||||
|
|
|
@ -237,7 +237,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
|
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
|
||||||
experimental!(no_sanitize)
|
experimental!(no_sanitize)
|
||||||
),
|
),
|
||||||
gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)),
|
ungated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing),
|
||||||
|
|
||||||
ungated!(
|
ungated!(
|
||||||
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
|
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
|
||||||
|
|
|
@ -28,6 +28,7 @@ use rustc_hash::FxHashMap;
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
use core::fmt;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
use base_db::{ra_salsa::InternValueTrivial, CrateId};
|
use base_db::{ra_salsa::InternValueTrivial, CrateId};
|
||||||
|
@ -147,6 +148,10 @@ impl ExpandError {
|
||||||
pub fn span(&self) -> Span {
|
pub fn span(&self) -> Span {
|
||||||
self.inner.1
|
self.inner.1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError {
|
||||||
|
self.inner.0.render_to_string(db)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
|
@ -164,18 +169,18 @@ pub enum ExpandErrorKind {
|
||||||
ProcMacroPanic(Box<str>),
|
ProcMacroPanic(Box<str>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandError {
|
|
||||||
pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError {
|
|
||||||
self.inner.0.render_to_string(db)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct RenderedExpandError {
|
pub struct RenderedExpandError {
|
||||||
pub message: String,
|
pub message: String,
|
||||||
pub error: bool,
|
pub error: bool,
|
||||||
pub kind: &'static str,
|
pub kind: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for RenderedExpandError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl RenderedExpandError {
|
impl RenderedExpandError {
|
||||||
const GENERAL_KIND: &str = "macro-error";
|
const GENERAL_KIND: &str = "macro-error";
|
||||||
}
|
}
|
||||||
|
|
|
@ -1053,8 +1053,21 @@ impl HirDisplay for Ty {
|
||||||
generic_args_sans_defaults(f, Some(generic_def_id), parameters);
|
generic_args_sans_defaults(f, Some(generic_def_id), parameters);
|
||||||
assert!(params_len >= parameters.len());
|
assert!(params_len >= parameters.len());
|
||||||
let defaults = params_len - parameters.len();
|
let defaults = params_len - parameters.len();
|
||||||
let without_impl =
|
|
||||||
self_param as usize + type_ + const_ + lifetime - defaults;
|
// Normally, functions cannot have default parameters, but they can,
|
||||||
|
// for function-like things such as struct names or enum variants.
|
||||||
|
// The former cannot have defaults but parents, and the later cannot have
|
||||||
|
// parents but defaults.
|
||||||
|
// So, if `parent_len` > 0, it have a parent and thus it doesn't have any
|
||||||
|
// default. Therefore, we shouldn't subtract defaults because those defaults
|
||||||
|
// are from their parents.
|
||||||
|
// And if `parent_len` == 0, either parents don't exists or they don't have
|
||||||
|
// any defaults. Thus, we can - and should - subtract defaults.
|
||||||
|
let without_impl = if parent_len > 0 {
|
||||||
|
params_len - parent_len - impl_
|
||||||
|
} else {
|
||||||
|
params_len - parent_len - impl_ - defaults
|
||||||
|
};
|
||||||
// parent's params (those from enclosing impl or trait, if any).
|
// parent's params (those from enclosing impl or trait, if any).
|
||||||
let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
|
let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ impl chalk_ir::interner::Interner for Interner {
|
||||||
type InternedConst = Interned<InternedWrapper<ConstData>>;
|
type InternedConst = Interned<InternedWrapper<ConstData>>;
|
||||||
type InternedConcreteConst = ConstScalar;
|
type InternedConcreteConst = ConstScalar;
|
||||||
type InternedGenericArg = GenericArgData;
|
type InternedGenericArg = GenericArgData;
|
||||||
// We could do the following, but that saves "only" 20mb on self while increasing inferecene
|
// We could do the following, but that saves "only" 20mb on self while increasing inference
|
||||||
// time by ~2.5%
|
// time by ~2.5%
|
||||||
// type InternedGoal = Interned<InternedWrapper<GoalData>>;
|
// type InternedGoal = Interned<InternedWrapper<GoalData>>;
|
||||||
type InternedGoal = Arc<GoalData>;
|
type InternedGoal = Arc<GoalData>;
|
||||||
|
|
|
@ -4380,7 +4380,7 @@ fn test() {
|
||||||
fn associated_type_in_struct_expr_path() {
|
fn associated_type_in_struct_expr_path() {
|
||||||
// FIXME: All annotation should be resolvable.
|
// FIXME: All annotation should be resolvable.
|
||||||
// For lines marked as unstable, see rust-lang/rust#86935.
|
// For lines marked as unstable, see rust-lang/rust#86935.
|
||||||
// FIXME: Remove the comments once stablized.
|
// FIXME: Remove the comments once stabilized.
|
||||||
check_types(
|
check_types(
|
||||||
r#"
|
r#"
|
||||||
trait Trait {
|
trait Trait {
|
||||||
|
@ -4416,7 +4416,7 @@ impl Trait for () {
|
||||||
fn associated_type_in_struct_expr_path_enum() {
|
fn associated_type_in_struct_expr_path_enum() {
|
||||||
// FIXME: All annotation should be resolvable.
|
// FIXME: All annotation should be resolvable.
|
||||||
// For lines marked as unstable, see rust-lang/rust#86935.
|
// For lines marked as unstable, see rust-lang/rust#86935.
|
||||||
// FIXME: Remove the comments once stablized.
|
// FIXME: Remove the comments once stabilized.
|
||||||
check_types(
|
check_types(
|
||||||
r#"
|
r#"
|
||||||
trait Trait {
|
trait Trait {
|
||||||
|
|
|
@ -2649,24 +2649,31 @@ impl Const {
|
||||||
Type::from_value_def(db, self.id)
|
Type::from_value_def(db, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Evaluate the constant and return the result as a string.
|
/// Evaluate the constant.
|
||||||
///
|
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst, ConstEvalError> {
|
||||||
/// This function is intended for IDE assistance, different from [`Const::render_eval`].
|
db.const_eval(self.id.into(), Substitution::empty(Interner), None)
|
||||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
.map(|it| EvaluatedConst { const_: it, def: self.id.into() })
|
||||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
}
|
||||||
Ok(format!("{}", c.display(db, self.krate(db).edition(db))))
|
}
|
||||||
|
|
||||||
|
impl HasVisibility for Const {
|
||||||
|
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
|
||||||
|
db.const_visibility(self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct EvaluatedConst {
|
||||||
|
def: DefWithBodyId,
|
||||||
|
const_: hir_ty::Const,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluatedConst {
|
||||||
|
pub fn render(&self, db: &dyn HirDatabase, edition: Edition) -> String {
|
||||||
|
format!("{}", self.const_.display(db, edition))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Evaluate the constant and return the result as a string, with more detailed information.
|
pub fn render_debug(&self, db: &dyn HirDatabase) -> Result<String, MirEvalError> {
|
||||||
///
|
let data = self.const_.data(Interner);
|
||||||
/// This function is intended for user-facing display.
|
|
||||||
pub fn render_eval(
|
|
||||||
self,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
edition: Edition,
|
|
||||||
) -> Result<String, ConstEvalError> {
|
|
||||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
|
||||||
let data = &c.data(Interner);
|
|
||||||
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
||||||
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
||||||
if let hir_ty::ConstValue::Concrete(c) = &data.value {
|
if let hir_ty::ConstValue::Concrete(c) = &data.value {
|
||||||
|
@ -2689,17 +2696,7 @@ impl Const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id.into(), &c) {
|
mir::render_const_using_debug_impl(db, self.def, &self.const_)
|
||||||
Ok(s)
|
|
||||||
} else {
|
|
||||||
Ok(format!("{}", c.display(db, edition)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasVisibility for Const {
|
|
||||||
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
|
|
||||||
db.const_visibility(self.id)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2729,51 +2726,10 @@ impl Static {
|
||||||
Type::from_value_def(db, self.id)
|
Type::from_value_def(db, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Evaluate the static and return the result as a string.
|
/// Evaluate the static initializer.
|
||||||
///
|
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst, ConstEvalError> {
|
||||||
/// This function is intended for IDE assistance, different from [`Static::render_eval`].
|
db.const_eval(self.id.into(), Substitution::empty(Interner), None)
|
||||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
.map(|it| EvaluatedConst { const_: it, def: self.id.into() })
|
||||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
|
||||||
Ok(format!("{}", c.display(db, self.krate(db).edition(db))))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Evaluate the static and return the result as a string, with more detailed information.
|
|
||||||
///
|
|
||||||
/// This function is intended for user-facing display.
|
|
||||||
pub fn render_eval(
|
|
||||||
self,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
edition: Edition,
|
|
||||||
) -> Result<String, ConstEvalError> {
|
|
||||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
|
||||||
let data = &c.data(Interner);
|
|
||||||
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
|
||||||
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
|
||||||
if let hir_ty::ConstValue::Concrete(c) = &data.value {
|
|
||||||
if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned {
|
|
||||||
let value = u128::from_le_bytes(mir::pad16(b, false));
|
|
||||||
let value_signed =
|
|
||||||
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
|
|
||||||
let mut result = if let Scalar::Int(_) = s {
|
|
||||||
value_signed.to_string()
|
|
||||||
} else {
|
|
||||||
value.to_string()
|
|
||||||
};
|
|
||||||
if value >= 10 {
|
|
||||||
format_to!(result, " ({value:#X})");
|
|
||||||
return Ok(result);
|
|
||||||
} else {
|
|
||||||
return Ok(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id.into(), &c) {
|
|
||||||
Ok(s)
|
|
||||||
} else {
|
|
||||||
Ok(format!("{}", c.display(db, edition)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ use hir_expand::{
|
||||||
hygiene::SyntaxContextExt as _,
|
hygiene::SyntaxContextExt as _,
|
||||||
inert_attr_macro::find_builtin_attr_idx,
|
inert_attr_macro::find_builtin_attr_idx,
|
||||||
name::AsName,
|
name::AsName,
|
||||||
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
ExpandResult, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -381,7 +381,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> {
|
||||||
|
let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
|
||||||
|
self.cache(res.value.clone(), file_id.into());
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||||
|
|
||||||
let macro_call = InFile::new(sa.file_id, macro_call);
|
let macro_call = InFile::new(sa.file_id, macro_call);
|
||||||
|
@ -412,7 +418,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
|
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
|
||||||
/// expansions.
|
/// expansions.
|
||||||
pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
pub fn expand_allowed_builtins(
|
||||||
|
&self,
|
||||||
|
macro_call: &ast::MacroCall,
|
||||||
|
) -> Option<ExpandResult<SyntaxNode>> {
|
||||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||||
|
|
||||||
let macro_call = InFile::new(sa.file_id, macro_call);
|
let macro_call = InFile::new(sa.file_id, macro_call);
|
||||||
|
@ -434,6 +443,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
| BuiltinFnLikeExpander::ModulePath
|
| BuiltinFnLikeExpander::ModulePath
|
||||||
| BuiltinFnLikeExpander::Asm
|
| BuiltinFnLikeExpander::Asm
|
||||||
| BuiltinFnLikeExpander::GlobalAsm
|
| BuiltinFnLikeExpander::GlobalAsm
|
||||||
|
| BuiltinFnLikeExpander::NakedAsm
|
||||||
| BuiltinFnLikeExpander::LogSyntax
|
| BuiltinFnLikeExpander::LogSyntax
|
||||||
| BuiltinFnLikeExpander::TraceMacros
|
| BuiltinFnLikeExpander::TraceMacros
|
||||||
| BuiltinFnLikeExpander::FormatArgs
|
| BuiltinFnLikeExpander::FormatArgs
|
||||||
|
@ -447,15 +457,15 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let node = self.parse_or_expand(file_id.into());
|
let node = self.expand(file_id);
|
||||||
Some(node)
|
Some(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If `item` has an attribute macro attached to it, expands it.
|
/// If `item` has an attribute macro attached to it, expands it.
|
||||||
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
|
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
|
||||||
let src = self.wrap_node_infile(item.clone());
|
let src = self.wrap_node_infile(item.clone());
|
||||||
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
|
||||||
Some(self.parse_or_expand(macro_call_id.as_file()))
|
Some(self.expand(macro_call_id.as_macro_file()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
||||||
|
@ -479,15 +489,16 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
|
pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
|
||||||
let res: Vec<_> = self
|
let res: Vec<_> = self
|
||||||
.derive_macro_calls(attr)?
|
.derive_macro_calls(attr)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|call| {
|
.flat_map(|call| {
|
||||||
let file_id = call?.as_file();
|
let file_id = call?.as_macro_file();
|
||||||
let node = self.db.parse_or_expand(file_id);
|
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
|
||||||
self.cache(node.clone(), file_id);
|
let root_node = value.0.syntax_node();
|
||||||
Some(node)
|
self.cache(root_node.clone(), file_id.into());
|
||||||
|
Some(ExpandResult { value: root_node, err })
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Some(res)
|
Some(res)
|
||||||
|
@ -555,7 +566,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
||||||
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
||||||
pub fn speculative_expand(
|
pub fn speculative_expand_macro_call(
|
||||||
&self,
|
&self,
|
||||||
actual_macro_call: &ast::MacroCall,
|
actual_macro_call: &ast::MacroCall,
|
||||||
speculative_args: &ast::TokenTree,
|
speculative_args: &ast::TokenTree,
|
||||||
|
|
|
@ -7,6 +7,7 @@ use hir::{
|
||||||
TypeInfo, TypeParam,
|
TypeInfo, TypeParam,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
|
assists::GroupLabel,
|
||||||
defs::{Definition, NameRefClass},
|
defs::{Definition, NameRefClass},
|
||||||
famous_defs::FamousDefs,
|
famous_defs::FamousDefs,
|
||||||
helpers::mod_path_to_ast,
|
helpers::mod_path_to_ast,
|
||||||
|
@ -104,7 +105,8 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
|
|
||||||
let scope = ImportScope::find_insert_use_container(&node, &ctx.sema)?;
|
let scope = ImportScope::find_insert_use_container(&node, &ctx.sema)?;
|
||||||
|
|
||||||
acc.add(
|
acc.add_group(
|
||||||
|
&GroupLabel("Extract into...".to_owned()),
|
||||||
AssistId("extract_function", crate::AssistKind::RefactorExtract),
|
AssistId("extract_function", crate::AssistKind::RefactorExtract),
|
||||||
"Extract into function",
|
"Extract into function",
|
||||||
target_range,
|
target_range,
|
||||||
|
@ -5009,7 +5011,7 @@ fn $0fun_name(bar: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unresolveable_types_default_to_placeholder() {
|
fn unresolvable_types_default_to_placeholder() {
|
||||||
check_assist(
|
check_assist(
|
||||||
extract_function,
|
extract_function,
|
||||||
r#"
|
r#"
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2055,7 +2055,7 @@ fn bar(closure: impl Fn(i64) -> i64) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unresolveable_types_default_to_placeholder() {
|
fn unresolvable_types_default_to_placeholder() {
|
||||||
check_assist(
|
check_assist(
|
||||||
generate_function,
|
generate_function,
|
||||||
r"
|
r"
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use hir::HasCrate;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -51,7 +52,10 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||||
| ast::Expr::MatchExpr(_)
|
| ast::Expr::MatchExpr(_)
|
||||||
| ast::Expr::MacroExpr(_)
|
| ast::Expr::MacroExpr(_)
|
||||||
| ast::Expr::BinExpr(_)
|
| ast::Expr::BinExpr(_)
|
||||||
| ast::Expr::CallExpr(_) => konst.eval(ctx.sema.db).ok()?,
|
| ast::Expr::CallExpr(_) => konst
|
||||||
|
.eval(ctx.sema.db)
|
||||||
|
.ok()?
|
||||||
|
.render(ctx.sema.db, konst.krate(ctx.sema.db).edition(ctx.sema.db)),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
|
let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
|
||||||
let guard = match_arm.guard()?;
|
let guard = match_arm.guard()?;
|
||||||
if ctx.offset() > guard.syntax().text_range().end() {
|
if ctx.offset() > guard.syntax().text_range().end() {
|
||||||
cov_mark::hit!(move_guard_unapplicable_in_arm_body);
|
cov_mark::hit!(move_guard_inapplicable_in_arm_body);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let space_before_guard = guard.syntax().prev_sibling_or_token();
|
let space_before_guard = guard.syntax().prev_sibling_or_token();
|
||||||
|
@ -219,7 +219,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn move_guard_to_arm_body_range() {
|
fn move_guard_to_arm_body_range() {
|
||||||
cov_mark::check!(move_guard_unapplicable_in_arm_body);
|
cov_mark::check!(move_guard_inapplicable_in_arm_body);
|
||||||
check_assist_not_applicable(
|
check_assist_not_applicable(
|
||||||
move_guard_to_arm_body,
|
move_guard_to_arm_body,
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -1,19 +1,17 @@
|
||||||
use hir::{HirDisplay, ModuleDef, PathResolution, Semantics};
|
use hir::HirDisplay;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
syntax_helpers::node_ext::preorder_expr,
|
|
||||||
RootDatabase,
|
|
||||||
};
|
};
|
||||||
use stdx::to_upper_snake_case;
|
use stdx::to_upper_snake_case;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasName},
|
ast::{self, make, HasName},
|
||||||
ted, AstNode, WalkEvent,
|
ted, AstNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
assist_context::{AssistContext, Assists},
|
assist_context::{AssistContext, Assists},
|
||||||
utils,
|
utils::{self},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Assist: promote_local_to_const
|
// Assist: promote_local_to_const
|
||||||
|
@ -63,7 +61,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
};
|
};
|
||||||
|
|
||||||
let initializer = let_stmt.initializer()?;
|
let initializer = let_stmt.initializer()?;
|
||||||
if !is_body_const(&ctx.sema, &initializer) {
|
if !utils::is_body_const(&ctx.sema, &initializer) {
|
||||||
cov_mark::hit!(promote_local_non_const);
|
cov_mark::hit!(promote_local_non_const);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -103,40 +101,6 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
|
|
||||||
let mut is_const = true;
|
|
||||||
preorder_expr(expr, &mut |ev| {
|
|
||||||
let expr = match ev {
|
|
||||||
WalkEvent::Enter(_) if !is_const => return true,
|
|
||||||
WalkEvent::Enter(expr) => expr,
|
|
||||||
WalkEvent::Leave(_) => return false,
|
|
||||||
};
|
|
||||||
match expr {
|
|
||||||
ast::Expr::CallExpr(call) => {
|
|
||||||
if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() {
|
|
||||||
if let Some(PathResolution::Def(ModuleDef::Function(func))) =
|
|
||||||
path_expr.path().and_then(|path| sema.resolve_path(&path))
|
|
||||||
{
|
|
||||||
is_const &= func.is_const(sema.db);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ast::Expr::MethodCallExpr(call) => {
|
|
||||||
is_const &=
|
|
||||||
sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true)
|
|
||||||
}
|
|
||||||
ast::Expr::ForExpr(_)
|
|
||||||
| ast::Expr::ReturnExpr(_)
|
|
||||||
| ast::Expr::TryExpr(_)
|
|
||||||
| ast::Expr::YieldExpr(_)
|
|
||||||
| ast::Expr::AwaitExpr(_) => is_const = false,
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
!is_const
|
|
||||||
});
|
|
||||||
is_const
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||||
|
|
|
@ -139,7 +139,7 @@ fn add_assist(
|
||||||
let trait_path = make::ty_path(replace_trait_path.clone());
|
let trait_path = make::ty_path(replace_trait_path.clone());
|
||||||
|
|
||||||
match (ctx.config.snippet_cap, impl_def_with_items) {
|
match (ctx.config.snippet_cap, impl_def_with_items) {
|
||||||
(None, _) => {
|
(None, None) => {
|
||||||
let impl_def = generate_trait_impl(adt, trait_path);
|
let impl_def = generate_trait_impl(adt, trait_path);
|
||||||
|
|
||||||
ted::insert_all(
|
ted::insert_all(
|
||||||
|
@ -147,6 +147,12 @@ fn add_assist(
|
||||||
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
|
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
(None, Some((impl_def, _))) => {
|
||||||
|
ted::insert_all(
|
||||||
|
insert_after,
|
||||||
|
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
|
||||||
|
);
|
||||||
|
}
|
||||||
(Some(cap), None) => {
|
(Some(cap), None) => {
|
||||||
let impl_def = generate_trait_impl(adt, trait_path);
|
let impl_def = generate_trait_impl(adt, trait_path);
|
||||||
|
|
||||||
|
@ -272,7 +278,7 @@ fn update_attribute(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -297,6 +303,30 @@ impl core::fmt::Debug for Foo {
|
||||||
f.debug_struct("Foo").field("bar", &self.bar).finish()
|
f.debug_struct("Foo").field("bar", &self.bar).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn add_custom_impl_without_snippet() {
|
||||||
|
check_assist_no_snippet_cap(
|
||||||
|
replace_derive_with_manual_impl,
|
||||||
|
r#"
|
||||||
|
//- minicore: fmt, derive
|
||||||
|
#[derive(Debu$0g)]
|
||||||
|
struct Foo {
|
||||||
|
bar: String,
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
struct Foo {
|
||||||
|
bar: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for Foo {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
|
f.debug_struct("Foo").field("bar", &self.bar).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -360,7 +360,7 @@ mod tests {
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
|
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_if_let_with_match_unapplicable_for_simple_ifs() {
|
fn test_if_let_with_match_inapplicable_for_simple_ifs() {
|
||||||
check_assist_not_applicable(
|
check_assist_not_applicable(
|
||||||
replace_if_let_with_match,
|
replace_if_let_with_match,
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -127,7 +127,7 @@ pub(crate) fn desugar_async_into_impl_future(
|
||||||
|
|
||||||
let rparen = function.param_list()?.r_paren_token()?;
|
let rparen = function.param_list()?.r_paren_token()?;
|
||||||
let return_type = match function.ret_type() {
|
let return_type = match function.ret_type() {
|
||||||
// unable to get a `ty` makes the action unapplicable
|
// unable to get a `ty` makes the action inapplicable
|
||||||
Some(ret_type) => Some(ret_type.ty()?),
|
Some(ret_type) => Some(ret_type.ty()?),
|
||||||
// No type means `-> ()`
|
// No type means `-> ()`
|
||||||
None => None,
|
None => None,
|
||||||
|
|
|
@ -362,8 +362,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
|
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Convert integer base
|
Convert integer base
|
||||||
Extract into variable
|
Extract into...
|
||||||
Extract into function
|
|
||||||
Replace if let with match
|
Replace if let with match
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&expected);
|
.assert_eq(&expected);
|
||||||
|
@ -391,8 +390,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
|
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Convert integer base
|
Convert integer base
|
||||||
Extract into variable
|
Extract into...
|
||||||
Extract into function
|
|
||||||
Replace if let with match
|
Replace if let with match
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&expected);
|
.assert_eq(&expected);
|
||||||
|
@ -405,8 +403,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
let expected = labels(&assists);
|
let expected = labels(&assists);
|
||||||
|
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Extract into variable
|
Extract into...
|
||||||
Extract into function
|
|
||||||
"#]]
|
"#]]
|
||||||
.assert_eq(&expected);
|
.assert_eq(&expected);
|
||||||
}
|
}
|
||||||
|
@ -440,7 +437,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
|
|
||||||
{
|
{
|
||||||
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
|
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
|
||||||
assert_eq!(2, assists.len());
|
assert_eq!(4, assists.len());
|
||||||
let mut assists = assists.into_iter();
|
let mut assists = assists.into_iter();
|
||||||
|
|
||||||
let extract_into_variable_assist = assists.next().unwrap();
|
let extract_into_variable_assist = assists.next().unwrap();
|
||||||
|
@ -451,7 +448,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into variable",
|
label: "Extract into variable",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: None,
|
source_change: None,
|
||||||
command: None,
|
command: None,
|
||||||
|
@ -459,6 +460,46 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
"#]]
|
"#]]
|
||||||
.assert_debug_eq(&extract_into_variable_assist);
|
.assert_debug_eq(&extract_into_variable_assist);
|
||||||
|
|
||||||
|
let extract_into_constant_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_constant",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into constant",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_constant_assist);
|
||||||
|
|
||||||
|
let extract_into_static_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_static",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into static",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_static_assist);
|
||||||
|
|
||||||
let extract_into_function_assist = assists.next().unwrap();
|
let extract_into_function_assist = assists.next().unwrap();
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Assist {
|
Assist {
|
||||||
|
@ -467,7 +508,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into function",
|
label: "Extract into function",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: None,
|
source_change: None,
|
||||||
command: None,
|
command: None,
|
||||||
|
@ -486,7 +531,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
}),
|
}),
|
||||||
frange.into(),
|
frange.into(),
|
||||||
);
|
);
|
||||||
assert_eq!(2, assists.len());
|
assert_eq!(4, assists.len());
|
||||||
let mut assists = assists.into_iter();
|
let mut assists = assists.into_iter();
|
||||||
|
|
||||||
let extract_into_variable_assist = assists.next().unwrap();
|
let extract_into_variable_assist = assists.next().unwrap();
|
||||||
|
@ -497,7 +542,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into variable",
|
label: "Extract into variable",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: None,
|
source_change: None,
|
||||||
command: None,
|
command: None,
|
||||||
|
@ -505,6 +554,46 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
"#]]
|
"#]]
|
||||||
.assert_debug_eq(&extract_into_variable_assist);
|
.assert_debug_eq(&extract_into_variable_assist);
|
||||||
|
|
||||||
|
let extract_into_constant_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_constant",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into constant",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_constant_assist);
|
||||||
|
|
||||||
|
let extract_into_static_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_static",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into static",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_static_assist);
|
||||||
|
|
||||||
let extract_into_function_assist = assists.next().unwrap();
|
let extract_into_function_assist = assists.next().unwrap();
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Assist {
|
Assist {
|
||||||
|
@ -513,7 +602,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into function",
|
label: "Extract into function",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: None,
|
source_change: None,
|
||||||
command: None,
|
command: None,
|
||||||
|
@ -532,7 +625,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
}),
|
}),
|
||||||
frange.into(),
|
frange.into(),
|
||||||
);
|
);
|
||||||
assert_eq!(2, assists.len());
|
assert_eq!(4, assists.len());
|
||||||
let mut assists = assists.into_iter();
|
let mut assists = assists.into_iter();
|
||||||
|
|
||||||
let extract_into_variable_assist = assists.next().unwrap();
|
let extract_into_variable_assist = assists.next().unwrap();
|
||||||
|
@ -543,7 +636,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into variable",
|
label: "Extract into variable",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: Some(
|
source_change: Some(
|
||||||
SourceChange {
|
SourceChange {
|
||||||
|
@ -594,6 +691,46 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
"#]]
|
"#]]
|
||||||
.assert_debug_eq(&extract_into_variable_assist);
|
.assert_debug_eq(&extract_into_variable_assist);
|
||||||
|
|
||||||
|
let extract_into_constant_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_constant",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into constant",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_constant_assist);
|
||||||
|
|
||||||
|
let extract_into_static_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_static",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into static",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: None,
|
||||||
|
command: None,
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_static_assist);
|
||||||
|
|
||||||
let extract_into_function_assist = assists.next().unwrap();
|
let extract_into_function_assist = assists.next().unwrap();
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Assist {
|
Assist {
|
||||||
|
@ -602,7 +739,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into function",
|
label: "Extract into function",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: None,
|
source_change: None,
|
||||||
command: None,
|
command: None,
|
||||||
|
@ -613,7 +754,7 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
|
|
||||||
{
|
{
|
||||||
let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into());
|
let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into());
|
||||||
assert_eq!(2, assists.len());
|
assert_eq!(4, assists.len());
|
||||||
let mut assists = assists.into_iter();
|
let mut assists = assists.into_iter();
|
||||||
|
|
||||||
let extract_into_variable_assist = assists.next().unwrap();
|
let extract_into_variable_assist = assists.next().unwrap();
|
||||||
|
@ -624,7 +765,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into variable",
|
label: "Extract into variable",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: Some(
|
source_change: Some(
|
||||||
SourceChange {
|
SourceChange {
|
||||||
|
@ -675,6 +820,140 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
"#]]
|
"#]]
|
||||||
.assert_debug_eq(&extract_into_variable_assist);
|
.assert_debug_eq(&extract_into_variable_assist);
|
||||||
|
|
||||||
|
let extract_into_constant_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_constant",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into constant",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: Some(
|
||||||
|
SourceChange {
|
||||||
|
source_file_edits: {
|
||||||
|
FileId(
|
||||||
|
0,
|
||||||
|
): (
|
||||||
|
TextEdit {
|
||||||
|
indels: [
|
||||||
|
Indel {
|
||||||
|
insert: "const",
|
||||||
|
delete: 45..47,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "VAR_NAME:",
|
||||||
|
delete: 48..60,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "i32",
|
||||||
|
delete: 61..81,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "=",
|
||||||
|
delete: 82..86,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "5;\n if let 2..6 = VAR_NAME {\n true\n } else {\n false\n }",
|
||||||
|
delete: 87..108,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
Some(
|
||||||
|
SnippetEdit(
|
||||||
|
[
|
||||||
|
(
|
||||||
|
0,
|
||||||
|
51..51,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
file_system_edits: [],
|
||||||
|
is_snippet: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
command: Some(
|
||||||
|
Rename,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_constant_assist);
|
||||||
|
|
||||||
|
let extract_into_static_assist = assists.next().unwrap();
|
||||||
|
expect![[r#"
|
||||||
|
Assist {
|
||||||
|
id: AssistId(
|
||||||
|
"extract_static",
|
||||||
|
RefactorExtract,
|
||||||
|
),
|
||||||
|
label: "Extract into static",
|
||||||
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
target: 59..60,
|
||||||
|
source_change: Some(
|
||||||
|
SourceChange {
|
||||||
|
source_file_edits: {
|
||||||
|
FileId(
|
||||||
|
0,
|
||||||
|
): (
|
||||||
|
TextEdit {
|
||||||
|
indels: [
|
||||||
|
Indel {
|
||||||
|
insert: "static",
|
||||||
|
delete: 45..47,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "VAR_NAME:",
|
||||||
|
delete: 48..60,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "i32",
|
||||||
|
delete: 61..81,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "=",
|
||||||
|
delete: 82..86,
|
||||||
|
},
|
||||||
|
Indel {
|
||||||
|
insert: "5;\n if let 2..6 = VAR_NAME {\n true\n } else {\n false\n }",
|
||||||
|
delete: 87..108,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
Some(
|
||||||
|
SnippetEdit(
|
||||||
|
[
|
||||||
|
(
|
||||||
|
0,
|
||||||
|
52..52,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
file_system_edits: [],
|
||||||
|
is_snippet: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
command: Some(
|
||||||
|
Rename,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
"#]]
|
||||||
|
.assert_debug_eq(&extract_into_static_assist);
|
||||||
|
|
||||||
let extract_into_function_assist = assists.next().unwrap();
|
let extract_into_function_assist = assists.next().unwrap();
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
Assist {
|
Assist {
|
||||||
|
@ -683,7 +962,11 @@ pub fn test_some_range(a: int) -> bool {
|
||||||
RefactorExtract,
|
RefactorExtract,
|
||||||
),
|
),
|
||||||
label: "Extract into function",
|
label: "Extract into function",
|
||||||
group: None,
|
group: Some(
|
||||||
|
GroupLabel(
|
||||||
|
"Extract into...",
|
||||||
|
),
|
||||||
|
),
|
||||||
target: 59..60,
|
target: 59..60,
|
||||||
source_change: Some(
|
source_change: Some(
|
||||||
SourceChange {
|
SourceChange {
|
||||||
|
|
|
@ -932,6 +932,24 @@ enum TheEnum {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_extract_constant() {
|
||||||
|
check_doc_test(
|
||||||
|
"extract_constant",
|
||||||
|
r#####"
|
||||||
|
fn main() {
|
||||||
|
$0(1 + 2)$0 * 4;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
fn main() {
|
||||||
|
const $0VAR_NAME: i32 = 1 + 2;
|
||||||
|
VAR_NAME * 4;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_extract_expressions_from_format_string() {
|
fn doctest_extract_expressions_from_format_string() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
@ -1006,6 +1024,24 @@ fn bar(name: i32) -> i32 {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_extract_static() {
|
||||||
|
check_doc_test(
|
||||||
|
"extract_static",
|
||||||
|
r#####"
|
||||||
|
fn main() {
|
||||||
|
$0(1 + 2)$0 * 4;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
fn main() {
|
||||||
|
static $0VAR_NAME: i32 = 1 + 2;
|
||||||
|
VAR_NAME * 4;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_extract_struct_from_enum_variant() {
|
fn doctest_extract_struct_from_enum_variant() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
|
|
@ -3,11 +3,13 @@
|
||||||
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
|
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
|
||||||
use hir::{
|
use hir::{
|
||||||
db::{ExpandDatabase, HirDatabase},
|
db::{ExpandDatabase, HirDatabase},
|
||||||
HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics,
|
HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution, Semantics,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
famous_defs::FamousDefs, path_transform::PathTransform,
|
famous_defs::FamousDefs,
|
||||||
syntax_helpers::prettify_macro_expansion, RootDatabase,
|
path_transform::PathTransform,
|
||||||
|
syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion},
|
||||||
|
RootDatabase,
|
||||||
};
|
};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -19,7 +21,7 @@ use syntax::{
|
||||||
},
|
},
|
||||||
ted, AstNode, AstToken, Direction, Edition, NodeOrToken, SourceFile,
|
ted, AstNode, AstToken, Direction, Edition, NodeOrToken, SourceFile,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
|
SyntaxNode, SyntaxToken, TextRange, TextSize, WalkEvent, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
||||||
|
@ -966,3 +968,37 @@ pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec<NodeOrToken<ast::TokenTree
|
||||||
|
|
||||||
tt_stack.pop().expect("parent token tree was closed before it was completed").1
|
tt_stack.pop().expect("parent token tree was closed before it was completed").1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
|
||||||
|
let mut is_const = true;
|
||||||
|
preorder_expr(expr, &mut |ev| {
|
||||||
|
let expr = match ev {
|
||||||
|
WalkEvent::Enter(_) if !is_const => return true,
|
||||||
|
WalkEvent::Enter(expr) => expr,
|
||||||
|
WalkEvent::Leave(_) => return false,
|
||||||
|
};
|
||||||
|
match expr {
|
||||||
|
ast::Expr::CallExpr(call) => {
|
||||||
|
if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() {
|
||||||
|
if let Some(PathResolution::Def(ModuleDef::Function(func))) =
|
||||||
|
path_expr.path().and_then(|path| sema.resolve_path(&path))
|
||||||
|
{
|
||||||
|
is_const &= func.is_const(sema.db);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ast::Expr::MethodCallExpr(call) => {
|
||||||
|
is_const &=
|
||||||
|
sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true)
|
||||||
|
}
|
||||||
|
ast::Expr::ForExpr(_)
|
||||||
|
| ast::Expr::ReturnExpr(_)
|
||||||
|
| ast::Expr::TryExpr(_)
|
||||||
|
| ast::Expr::YieldExpr(_)
|
||||||
|
| ast::Expr::AwaitExpr(_) => is_const = false,
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
!is_const
|
||||||
|
});
|
||||||
|
is_const
|
||||||
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ pub(super) fn complete_lint(
|
||||||
existing_lints: &[ast::Path],
|
existing_lints: &[ast::Path],
|
||||||
lints_completions: &[Lint],
|
lints_completions: &[Lint],
|
||||||
) {
|
) {
|
||||||
for &Lint { label, description } in lints_completions {
|
for &Lint { label, description, .. } in lints_completions {
|
||||||
let (qual, name) = {
|
let (qual, name) = {
|
||||||
// FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
|
// FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
|
||||||
let mut parts = label.split("::");
|
let mut parts = label.split("::");
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Module responsible for analyzing the code surrounding the cursor for completion.
|
//! Module responsible for analyzing the code surrounding the cursor for completion.
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use hir::{Semantics, Type, TypeInfo, Variant};
|
use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
|
||||||
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
|
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
|
||||||
use itertools::Either;
|
use itertools::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -104,7 +104,10 @@ fn expand(
|
||||||
// maybe parent items have attributes, so continue walking the ancestors
|
// maybe parent items have attributes, so continue walking the ancestors
|
||||||
(None, None) => continue 'ancestors,
|
(None, None) => continue 'ancestors,
|
||||||
// successful expansions
|
// successful expansions
|
||||||
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
|
(
|
||||||
|
Some(ExpandResult { value: actual_expansion, err: _ }),
|
||||||
|
Some((fake_expansion, fake_mapped_token)),
|
||||||
|
) => {
|
||||||
let new_offset = fake_mapped_token.text_range().start();
|
let new_offset = fake_mapped_token.text_range().start();
|
||||||
if new_offset + relative_offset > actual_expansion.text_range().end() {
|
if new_offset + relative_offset > actual_expansion.text_range().end() {
|
||||||
// offset outside of bounds from the original expansion,
|
// offset outside of bounds from the original expansion,
|
||||||
|
@ -239,8 +242,8 @@ fn expand(
|
||||||
};
|
};
|
||||||
|
|
||||||
match (
|
match (
|
||||||
sema.expand(&actual_macro_call),
|
sema.expand_macro_call(&actual_macro_call),
|
||||||
sema.speculative_expand(
|
sema.speculative_expand_macro_call(
|
||||||
&actual_macro_call,
|
&actual_macro_call,
|
||||||
&speculative_args,
|
&speculative_args,
|
||||||
fake_ident_token.clone(),
|
fake_ident_token.clone(),
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -327,3 +327,11 @@ impl<'a> Ranker<'a> {
|
||||||
| ((no_tt_parent as usize) << 3)
|
| ((no_tt_parent as usize) << 3)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum Severity {
|
||||||
|
Error,
|
||||||
|
Warning,
|
||||||
|
WeakWarning,
|
||||||
|
Allow,
|
||||||
|
}
|
||||||
|
|
|
@ -184,7 +184,7 @@ impl<'a> PathTransform<'a> {
|
||||||
if let Some(expr) = v.expr() {
|
if let Some(expr) = v.expr() {
|
||||||
// FIXME: expressions in curly brackets can cause ambiguity after insertion
|
// FIXME: expressions in curly brackets can cause ambiguity after insertion
|
||||||
// (e.g. `N * 2` -> `{1 + 1} * 2`; it's unclear whether `{1 + 1}`
|
// (e.g. `N * 2` -> `{1 + 1} * 2`; it's unclear whether `{1 + 1}`
|
||||||
// is a standalone statement or a part of another expresson)
|
// is a standalone statement or a part of another expression)
|
||||||
// and sometimes require slight modifications; see
|
// and sometimes require slight modifications; see
|
||||||
// https://doc.rust-lang.org/reference/statements.html#expression-statements
|
// https://doc.rust-lang.org/reference/statements.html#expression-statements
|
||||||
// (default values in curly brackets can cause the same problem)
|
// (default values in curly brackets can cause the same problem)
|
||||||
|
|
|
@ -59,7 +59,7 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -
|
||||||
DiagnosticCode::RustcHardError("E0606"),
|
DiagnosticCode::RustcHardError("E0606"),
|
||||||
format_ty!(
|
format_ty!(
|
||||||
ctx,
|
ctx,
|
||||||
"casting `{}` as `{}` is invalid: needs defererence or removal of unneeded borrow",
|
"casting `{}` as `{}` is invalid: needs dereference or removal of unneeded borrow",
|
||||||
d.expr_ty,
|
d.expr_ty,
|
||||||
d.cast_ty
|
d.cast_ty
|
||||||
),
|
),
|
||||||
|
|
|
@ -586,14 +586,47 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unsafe_op_in_unsafe_fn_allowed_by_default() {
|
fn unsafe_op_in_unsafe_fn_allowed_by_default_in_edition_2021() {
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
|
//- /lib.rs crate:foo edition:2021
|
||||||
unsafe fn foo(p: *mut i32) {
|
unsafe fn foo(p: *mut i32) {
|
||||||
*p = 123;
|
*p = 123;
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
)
|
);
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs crate:foo edition:2021
|
||||||
|
#![deny(warnings)]
|
||||||
|
unsafe fn foo(p: *mut i32) {
|
||||||
|
*p = 123;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unsafe_op_in_unsafe_fn_warn_by_default_in_edition_2024() {
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs crate:foo edition:2024
|
||||||
|
unsafe fn foo(p: *mut i32) {
|
||||||
|
*p = 123;
|
||||||
|
//^^💡 warn: dereference of raw pointer is unsafe and requires an unsafe function or block
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs crate:foo edition:2024
|
||||||
|
#![deny(warnings)]
|
||||||
|
unsafe fn foo(p: *mut i32) {
|
||||||
|
*p = 123;
|
||||||
|
//^^💡 error: dereference of raw pointer is unsafe and requires an unsafe function or block
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -84,12 +84,12 @@ use hir::{db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, HirFileId, InFi
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
||||||
base_db::SourceDatabase,
|
base_db::SourceDatabase,
|
||||||
generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS},
|
generated::lints::{Lint, LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, DEFAULT_LINT_GROUPS},
|
||||||
imports::insert_use::InsertUseConfig,
|
imports::insert_use::InsertUseConfig,
|
||||||
label::Label,
|
label::Label,
|
||||||
source_change::SourceChange,
|
source_change::SourceChange,
|
||||||
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
|
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
|
||||||
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
|
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -210,14 +210,6 @@ impl Diagnostic {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub enum Severity {
|
|
||||||
Error,
|
|
||||||
Warning,
|
|
||||||
WeakWarning,
|
|
||||||
Allow,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum ExprFillDefaultMode {
|
pub enum ExprFillDefaultMode {
|
||||||
Todo,
|
Todo,
|
||||||
|
@ -568,26 +560,35 @@ fn handle_diag_from_macros(
|
||||||
|
|
||||||
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
|
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
|
||||||
|
|
||||||
static RUSTC_LINT_GROUPS_DICT: LazyLock<FxHashMap<&str, Vec<&str>>> =
|
struct BuiltLint {
|
||||||
LazyLock::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
|
lint: &'static Lint,
|
||||||
|
groups: Vec<&'static str>,
|
||||||
|
}
|
||||||
|
|
||||||
static CLIPPY_LINT_GROUPS_DICT: LazyLock<FxHashMap<&str, Vec<&str>>> =
|
static RUSTC_LINTS: LazyLock<FxHashMap<&str, BuiltLint>> =
|
||||||
LazyLock::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
|
LazyLock::new(|| build_lints_map(DEFAULT_LINTS, DEFAULT_LINT_GROUPS, ""));
|
||||||
|
|
||||||
|
static CLIPPY_LINTS: LazyLock<FxHashMap<&str, BuiltLint>> = LazyLock::new(|| {
|
||||||
|
build_lints_map(ide_db::generated::lints::CLIPPY_LINTS, CLIPPY_LINT_GROUPS, "clippy::")
|
||||||
|
});
|
||||||
|
|
||||||
// FIXME: Autogenerate this instead of enumerating by hand.
|
// FIXME: Autogenerate this instead of enumerating by hand.
|
||||||
static LINTS_TO_REPORT_IN_EXTERNAL_MACROS: LazyLock<FxHashSet<&str>> =
|
static LINTS_TO_REPORT_IN_EXTERNAL_MACROS: LazyLock<FxHashSet<&str>> =
|
||||||
LazyLock::new(|| FxHashSet::from_iter([]));
|
LazyLock::new(|| FxHashSet::from_iter([]));
|
||||||
|
|
||||||
fn build_group_dict(
|
fn build_lints_map(
|
||||||
|
lints: &'static [Lint],
|
||||||
lint_group: &'static [LintGroup],
|
lint_group: &'static [LintGroup],
|
||||||
all_groups: &'static [&'static str],
|
|
||||||
prefix: &'static str,
|
prefix: &'static str,
|
||||||
) -> FxHashMap<&'static str, Vec<&'static str>> {
|
) -> FxHashMap<&'static str, BuiltLint> {
|
||||||
let mut map_with_prefixes: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
|
let mut map_with_prefixes: FxHashMap<_, _> = lints
|
||||||
|
.iter()
|
||||||
|
.map(|lint| (lint.label, BuiltLint { lint, groups: vec![lint.label, "__RA_EVERY_LINT"] }))
|
||||||
|
.collect();
|
||||||
for g in lint_group {
|
for g in lint_group {
|
||||||
let mut add_children = |label: &'static str| {
|
let mut add_children = |label: &'static str| {
|
||||||
for child in g.children {
|
for child in g.children {
|
||||||
map_with_prefixes.entry(child).or_default().push(label);
|
map_with_prefixes.get_mut(child).unwrap().groups.push(label);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
add_children(g.lint.label);
|
add_children(g.lint.label);
|
||||||
|
@ -597,18 +598,9 @@ fn build_group_dict(
|
||||||
add_children("bad_style");
|
add_children("bad_style");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (lint, groups) in map_with_prefixes.iter_mut() {
|
|
||||||
groups.push(lint);
|
|
||||||
groups.extend_from_slice(all_groups);
|
|
||||||
}
|
|
||||||
map_with_prefixes.into_iter().map(|(k, v)| (k.strip_prefix(prefix).unwrap(), v)).collect()
|
map_with_prefixes.into_iter().map(|(k, v)| (k.strip_prefix(prefix).unwrap(), v)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Thd default severity for lints that are not warn by default.
|
|
||||||
// FIXME: Autogenerate this instead of write manually.
|
|
||||||
static LINTS_DEFAULT_SEVERITY: LazyLock<FxHashMap<&str, Severity>> =
|
|
||||||
LazyLock::new(|| FxHashMap::from_iter([("unsafe_op_in_unsafe_fn", Severity::Allow)]));
|
|
||||||
|
|
||||||
fn handle_lints(
|
fn handle_lints(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
|
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
|
||||||
|
@ -618,10 +610,12 @@ fn handle_lints(
|
||||||
) {
|
) {
|
||||||
for (node, diag) in diagnostics {
|
for (node, diag) in diagnostics {
|
||||||
let lint = match diag.code {
|
let lint = match diag.code {
|
||||||
DiagnosticCode::RustcLint(lint) | DiagnosticCode::Clippy(lint) => lint,
|
DiagnosticCode::RustcLint(lint) => RUSTC_LINTS[lint].lint,
|
||||||
|
DiagnosticCode::Clippy(lint) => CLIPPY_LINTS[lint].lint,
|
||||||
_ => panic!("non-lint passed to `handle_lints()`"),
|
_ => panic!("non-lint passed to `handle_lints()`"),
|
||||||
};
|
};
|
||||||
if let Some(&default_severity) = LINTS_DEFAULT_SEVERITY.get(lint) {
|
let default_severity = default_lint_severity(lint, edition);
|
||||||
|
if !(default_severity == Severity::Allow && diag.severity == Severity::WeakWarning) {
|
||||||
diag.severity = default_severity;
|
diag.severity = default_severity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -639,6 +633,16 @@ fn handle_lints(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn default_lint_severity(lint: &Lint, edition: Edition) -> Severity {
|
||||||
|
if lint.deny_since.is_some_and(|e| edition >= e) {
|
||||||
|
Severity::Error
|
||||||
|
} else if lint.warn_since.is_some_and(|e| edition >= e) {
|
||||||
|
Severity::Warning
|
||||||
|
} else {
|
||||||
|
lint.default_severity
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn find_outline_mod_lint_severity(
|
fn find_outline_mod_lint_severity(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
node: &InFile<SyntaxNode>,
|
node: &InFile<SyntaxNode>,
|
||||||
|
@ -654,14 +658,14 @@ fn find_outline_mod_lint_severity(
|
||||||
let mod_def = sema.to_module_def(&mod_node)?;
|
let mod_def = sema.to_module_def(&mod_node)?;
|
||||||
let module_source_file = sema.module_definition_node(mod_def);
|
let module_source_file = sema.module_definition_node(mod_def);
|
||||||
let mut result = None;
|
let mut result = None;
|
||||||
let lint_groups = lint_groups(&diag.code);
|
let lint_groups = lint_groups(&diag.code, edition);
|
||||||
lint_attrs(
|
lint_attrs(
|
||||||
sema,
|
sema,
|
||||||
ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
|
ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
|
||||||
edition,
|
edition,
|
||||||
)
|
)
|
||||||
.for_each(|(lint, severity)| {
|
.for_each(|(lint, severity)| {
|
||||||
if lint_groups.contains(&&*lint) {
|
if lint_groups.contains(&lint) {
|
||||||
result = Some(severity);
|
result = Some(severity);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -737,9 +741,9 @@ fn fill_lint_attrs(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let all_matching_groups = lint_groups(&diag.code)
|
let all_matching_groups = lint_groups(&diag.code, edition)
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|lint_group| cached.get(&**lint_group));
|
.filter_map(|lint_group| cached.get(lint_group));
|
||||||
let cached_severity =
|
let cached_severity =
|
||||||
all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
|
all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
|
||||||
|
|
||||||
|
@ -751,7 +755,7 @@ fn fill_lint_attrs(
|
||||||
// Insert this node's descendants' attributes into any outline descendant, but not including this node.
|
// Insert this node's descendants' attributes into any outline descendant, but not including this node.
|
||||||
// This must come before inserting this node's own attributes to preserve order.
|
// This must come before inserting this node's own attributes to preserve order.
|
||||||
collected_lint_attrs.drain().for_each(|(lint, severity)| {
|
collected_lint_attrs.drain().for_each(|(lint, severity)| {
|
||||||
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
|
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
|
||||||
diag_severity = Some(severity.severity);
|
diag_severity = Some(severity.severity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -774,7 +778,7 @@ fn fill_lint_attrs(
|
||||||
if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
|
if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
|
||||||
// Insert this node's attributes into any outline descendant, including this node.
|
// Insert this node's attributes into any outline descendant, including this node.
|
||||||
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
|
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
|
||||||
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
|
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
|
||||||
diag_severity = Some(severity);
|
diag_severity = Some(severity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -804,7 +808,7 @@ fn fill_lint_attrs(
|
||||||
return diag_severity;
|
return diag_severity;
|
||||||
} else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
|
} else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
|
||||||
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
|
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
|
||||||
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
|
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
|
||||||
diag_severity = Some(severity);
|
diag_severity = Some(severity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -905,16 +909,37 @@ fn cfg_attr_lint_attrs(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lint_groups(lint: &DiagnosticCode) -> &'static [&'static str] {
|
#[derive(Debug)]
|
||||||
match lint {
|
struct LintGroups {
|
||||||
|
groups: &'static [&'static str],
|
||||||
|
inside_warnings: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LintGroups {
|
||||||
|
fn contains(&self, group: &str) -> bool {
|
||||||
|
self.groups.contains(&group) || (self.inside_warnings && group == "warnings")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter(&self) -> impl Iterator<Item = &'static str> {
|
||||||
|
self.groups.iter().copied().chain(self.inside_warnings.then_some("warnings"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lint_groups(lint: &DiagnosticCode, edition: Edition) -> LintGroups {
|
||||||
|
let (groups, inside_warnings) = match lint {
|
||||||
DiagnosticCode::RustcLint(name) => {
|
DiagnosticCode::RustcLint(name) => {
|
||||||
RUSTC_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
|
let lint = &RUSTC_LINTS[name];
|
||||||
|
let inside_warnings = default_lint_severity(lint.lint, edition) == Severity::Warning;
|
||||||
|
(&lint.groups, inside_warnings)
|
||||||
}
|
}
|
||||||
DiagnosticCode::Clippy(name) => {
|
DiagnosticCode::Clippy(name) => {
|
||||||
CLIPPY_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
|
let lint = &CLIPPY_LINTS[name];
|
||||||
|
let inside_warnings = default_lint_severity(lint.lint, edition) == Severity::Warning;
|
||||||
|
(&lint.groups, inside_warnings)
|
||||||
}
|
}
|
||||||
_ => &[],
|
_ => panic!("non-lint passed to `handle_lints()`"),
|
||||||
}
|
};
|
||||||
|
LintGroups { groups, inside_warnings }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
|
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
|
||||||
|
|
|
@ -286,7 +286,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
|
} else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
|
||||||
if let Some(expanded) = self.sema.expand(¯o_call) {
|
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
|
||||||
if let Some(tt) = macro_call.token_tree() {
|
if let Some(tt) = macro_call.token_tree() {
|
||||||
self.output_debug_for_nodes_at_range(
|
self.output_debug_for_nodes_at_range(
|
||||||
&expanded,
|
&expanded,
|
||||||
|
|
|
@ -189,7 +189,7 @@ impl MatchFinder<'_> {
|
||||||
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
||||||
// way to match the whole macro, now try expanding it and matching the expansion.
|
// way to match the whole macro, now try expanding it and matching the expansion.
|
||||||
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
||||||
if let Some(expanded) = self.sema.expand(¯o_call) {
|
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
|
||||||
if let Some(tt) = macro_call.token_tree() {
|
if let Some(tt) = macro_call.token_tree() {
|
||||||
// When matching within a macro expansion, we only want to allow matches of
|
// When matching within a macro expansion, we only want to allow matches of
|
||||||
// nodes that originated entirely from within the token tree of the macro call.
|
// nodes that originated entirely from within the token tree of the macro call.
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
use hir::db::ExpandDatabase;
|
use hir::db::ExpandDatabase;
|
||||||
use hir::{InFile, MacroFileIdExt, Semantics};
|
use hir::{ExpandResult, InFile, MacroFileIdExt, Semantics};
|
||||||
use ide_db::base_db::CrateId;
|
use ide_db::base_db::CrateId;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, FileId, RootDatabase,
|
helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, FileId, RootDatabase,
|
||||||
};
|
};
|
||||||
use span::{Edition, SpanMap, SyntaxContextId, TextRange, TextSize};
|
use span::{Edition, SpanMap, SyntaxContextId, TextRange, TextSize};
|
||||||
|
use stdx::format_to;
|
||||||
use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
|
use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
|
||||||
|
|
||||||
use crate::FilePosition;
|
use crate::FilePosition;
|
||||||
|
@ -63,10 +64,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
.take_while(|it| it != &token)
|
.take_while(|it| it != &token)
|
||||||
.filter(|it| it.kind() == T![,])
|
.filter(|it| it.kind() == T![,])
|
||||||
.count();
|
.count();
|
||||||
let expansion = expansions.get(idx)?.clone();
|
let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone();
|
||||||
let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?;
|
let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?;
|
||||||
let expansion_span_map = db.expansion_span_map(expansion_file_id);
|
let expansion_span_map = db.expansion_span_map(expansion_file_id);
|
||||||
let expansion = format(
|
let mut expansion = format(
|
||||||
db,
|
db,
|
||||||
SyntaxKind::MACRO_ITEMS,
|
SyntaxKind::MACRO_ITEMS,
|
||||||
position.file_id,
|
position.file_id,
|
||||||
|
@ -74,6 +75,12 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
&expansion_span_map,
|
&expansion_span_map,
|
||||||
krate,
|
krate,
|
||||||
);
|
);
|
||||||
|
if let Some(err) = err {
|
||||||
|
expansion.insert_str(
|
||||||
|
0,
|
||||||
|
&format!("Expansion had errors: {}\n\n", err.render_to_string(sema.db)),
|
||||||
|
);
|
||||||
|
}
|
||||||
Some(ExpandedMacro { name, expansion })
|
Some(ExpandedMacro { name, expansion })
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -83,6 +90,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
|
|
||||||
let mut anc = tok.parent_ancestors();
|
let mut anc = tok.parent_ancestors();
|
||||||
let mut span_map = SpanMap::empty();
|
let mut span_map = SpanMap::empty();
|
||||||
|
let mut error = String::new();
|
||||||
let (name, expanded, kind) = loop {
|
let (name, expanded, kind) = loop {
|
||||||
let node = anc.next()?;
|
let node = anc.next()?;
|
||||||
|
|
||||||
|
@ -97,7 +105,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
.unwrap_or(Edition::CURRENT),
|
.unwrap_or(Edition::CURRENT),
|
||||||
)
|
)
|
||||||
.to_string(),
|
.to_string(),
|
||||||
expand_macro_recur(&sema, &item, &mut span_map, TextSize::new(0))?,
|
expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?,
|
||||||
SyntaxKind::MACRO_ITEMS,
|
SyntaxKind::MACRO_ITEMS,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -112,6 +120,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
expand_macro_recur(
|
expand_macro_recur(
|
||||||
&sema,
|
&sema,
|
||||||
&ast::Item::MacroCall(mac),
|
&ast::Item::MacroCall(mac),
|
||||||
|
&mut error,
|
||||||
&mut span_map,
|
&mut span_map,
|
||||||
TextSize::new(0),
|
TextSize::new(0),
|
||||||
)?,
|
)?,
|
||||||
|
@ -123,24 +132,31 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
// FIXME:
|
// FIXME:
|
||||||
// macro expansion may lose all white space information
|
// macro expansion may lose all white space information
|
||||||
// But we hope someday we can use ra_fmt for that
|
// But we hope someday we can use ra_fmt for that
|
||||||
let expansion = format(db, kind, position.file_id, expanded, &span_map, krate);
|
let mut expansion = format(db, kind, position.file_id, expanded, &span_map, krate);
|
||||||
|
|
||||||
|
if !error.is_empty() {
|
||||||
|
expansion.insert_str(0, &format!("Expansion had errors:{error}\n\n"));
|
||||||
|
}
|
||||||
Some(ExpandedMacro { name, expansion })
|
Some(ExpandedMacro { name, expansion })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_macro_recur(
|
fn expand_macro_recur(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
macro_call: &ast::Item,
|
macro_call: &ast::Item,
|
||||||
|
error: &mut String,
|
||||||
result_span_map: &mut SpanMap<SyntaxContextId>,
|
result_span_map: &mut SpanMap<SyntaxContextId>,
|
||||||
offset_in_original_node: TextSize,
|
offset_in_original_node: TextSize,
|
||||||
) -> Option<SyntaxNode> {
|
) -> Option<SyntaxNode> {
|
||||||
let expanded = match macro_call {
|
let ExpandResult { value: expanded, err } = match macro_call {
|
||||||
item @ ast::Item::MacroCall(macro_call) => sema
|
item @ ast::Item::MacroCall(macro_call) => {
|
||||||
.expand_attr_macro(item)
|
sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
|
||||||
.or_else(|| sema.expand_allowed_builtins(macro_call))?
|
}
|
||||||
.clone_for_update(),
|
item => sema.expand_attr_macro(item)?,
|
||||||
item => sema.expand_attr_macro(item)?.clone_for_update(),
|
|
||||||
};
|
};
|
||||||
|
let expanded = expanded.clone_for_update();
|
||||||
|
if let Some(err) = err {
|
||||||
|
format_to!(error, "\n{}", err.render_to_string(sema.db));
|
||||||
|
}
|
||||||
let file_id =
|
let file_id =
|
||||||
sema.hir_file_for(&expanded).macro_file().expect("expansion must produce a macro file");
|
sema.hir_file_for(&expanded).macro_file().expect("expansion must produce a macro file");
|
||||||
let expansion_span_map = sema.db.expansion_span_map(file_id);
|
let expansion_span_map = sema.db.expansion_span_map(file_id);
|
||||||
|
@ -149,12 +165,13 @@ fn expand_macro_recur(
|
||||||
expanded.text_range().len(),
|
expanded.text_range().len(),
|
||||||
&expansion_span_map,
|
&expansion_span_map,
|
||||||
);
|
);
|
||||||
Some(expand(sema, expanded, result_span_map, u32::from(offset_in_original_node) as i32))
|
Some(expand(sema, expanded, error, result_span_map, u32::from(offset_in_original_node) as i32))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand(
|
fn expand(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
expanded: SyntaxNode,
|
expanded: SyntaxNode,
|
||||||
|
error: &mut String,
|
||||||
result_span_map: &mut SpanMap<SyntaxContextId>,
|
result_span_map: &mut SpanMap<SyntaxContextId>,
|
||||||
mut offset_in_original_node: i32,
|
mut offset_in_original_node: i32,
|
||||||
) -> SyntaxNode {
|
) -> SyntaxNode {
|
||||||
|
@ -165,6 +182,7 @@ fn expand(
|
||||||
if let Some(new_node) = expand_macro_recur(
|
if let Some(new_node) = expand_macro_recur(
|
||||||
sema,
|
sema,
|
||||||
&child,
|
&child,
|
||||||
|
error,
|
||||||
result_span_map,
|
result_span_map,
|
||||||
TextSize::new(
|
TextSize::new(
|
||||||
(offset_in_original_node + (u32::from(child.syntax().text_range().start()) as i32))
|
(offset_in_original_node + (u32::from(child.syntax().text_range().start()) as i32))
|
||||||
|
@ -495,6 +513,9 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
foo!
|
foo!
|
||||||
|
Expansion had errors:
|
||||||
|
expected ident: `BAD`
|
||||||
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -608,7 +608,7 @@ impl<'a> WalkExpandedExprCtx<'a> {
|
||||||
|
|
||||||
if let ast::Expr::MacroExpr(expr) = expr {
|
if let ast::Expr::MacroExpr(expr) = expr {
|
||||||
if let Some(expanded) =
|
if let Some(expanded) =
|
||||||
expr.macro_call().and_then(|call| self.sema.expand(&call))
|
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
|
||||||
{
|
{
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match expanded {
|
match expanded {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//! Logic for rendering the different hover messages
|
//! Logic for rendering the different hover messages
|
||||||
use std::{mem, ops::Not};
|
use std::{env, mem, ops::Not};
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{
|
use hir::{
|
||||||
|
@ -28,6 +28,7 @@ use syntax::{algo, ast, match_ast, AstNode, AstToken, Direction, SyntaxToken, T}
|
||||||
use crate::{
|
use crate::{
|
||||||
doc_links::{remove_links, rewrite_links},
|
doc_links::{remove_links, rewrite_links},
|
||||||
hover::{notable_traits, walk_and_push_ty},
|
hover::{notable_traits, walk_and_push_ty},
|
||||||
|
interpret::render_const_eval_error,
|
||||||
HoverAction, HoverConfig, HoverResult, Markup, MemoryLayoutHoverConfig,
|
HoverAction, HoverConfig, HoverResult, Markup, MemoryLayoutHoverConfig,
|
||||||
MemoryLayoutHoverRenderKind,
|
MemoryLayoutHoverRenderKind,
|
||||||
};
|
};
|
||||||
|
@ -464,41 +465,77 @@ pub(super) fn definition(
|
||||||
Ok(it) => {
|
Ok(it) => {
|
||||||
Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
|
Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
|
||||||
}
|
}
|
||||||
Err(_) => it.value(db).map(|it| format!("{it:?}")),
|
Err(err) => {
|
||||||
|
let res = it.value(db).map(|it| format!("{it:?}"));
|
||||||
|
if env::var_os("RA_DEV").is_some() {
|
||||||
|
let res = res.as_deref().unwrap_or("");
|
||||||
|
Some(format!("{res} ({})", render_const_eval_error(db, err, edition)))
|
||||||
|
} else {
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Definition::Const(it) => {
|
Definition::Const(it) => {
|
||||||
let body = it.render_eval(db, edition);
|
let body = it.eval(db);
|
||||||
match body {
|
Some(match body {
|
||||||
Ok(it) => Some(it),
|
Ok(it) => match it.render_debug(db) {
|
||||||
Err(_) => {
|
Ok(it) => it,
|
||||||
|
Err(err) => {
|
||||||
|
let it = it.render(db, edition);
|
||||||
|
if env::var_os("RA_DEV").is_some() {
|
||||||
|
format!("{it}\n{}", render_const_eval_error(db, err.into(), edition))
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
let source = it.source(db)?;
|
let source = it.source(db)?;
|
||||||
let mut body = source.value.body()?.syntax().clone();
|
let mut body = source.value.body()?.syntax().clone();
|
||||||
if let Some(macro_file) = source.file_id.macro_file() {
|
if let Some(macro_file) = source.file_id.macro_file() {
|
||||||
let span_map = db.expansion_span_map(macro_file);
|
let span_map = db.expansion_span_map(macro_file);
|
||||||
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
||||||
}
|
}
|
||||||
Some(body.to_string())
|
if env::var_os("RA_DEV").is_some() {
|
||||||
|
format!("{body}\n{}", render_const_eval_error(db, err, edition))
|
||||||
|
} else {
|
||||||
|
body.to_string()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
Definition::Static(it) => {
|
Definition::Static(it) => {
|
||||||
let body = it.render_eval(db, edition);
|
let body = it.eval(db);
|
||||||
match body {
|
Some(match body {
|
||||||
Ok(it) => Some(it),
|
Ok(it) => match it.render_debug(db) {
|
||||||
Err(_) => {
|
Ok(it) => it,
|
||||||
|
Err(err) => {
|
||||||
|
let it = it.render(db, edition);
|
||||||
|
if env::var_os("RA_DEV").is_some() {
|
||||||
|
format!("{it}\n{}", render_const_eval_error(db, err.into(), edition))
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
let source = it.source(db)?;
|
let source = it.source(db)?;
|
||||||
let mut body = source.value.body()?.syntax().clone();
|
let mut body = source.value.body()?.syntax().clone();
|
||||||
if let Some(macro_file) = source.file_id.macro_file() {
|
if let Some(macro_file) = source.file_id.macro_file() {
|
||||||
let span_map = db.expansion_span_map(macro_file);
|
let span_map = db.expansion_span_map(macro_file);
|
||||||
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
||||||
}
|
}
|
||||||
Some(body.to_string())
|
if env::var_os("RA_DEV").is_some() {
|
||||||
|
format!("{body}\n{}", render_const_eval_error(db, err, edition))
|
||||||
|
} else {
|
||||||
|
body.to_string()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
|
@ -6413,7 +6413,7 @@ fn hover_feature() {
|
||||||
by the codegen backend, but not the MIR inliner.
|
by the codegen backend, but not the MIR inliner.
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
#![feature(rustc_attrs)]
|
#![feature(intrinsics)]
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
|
|
||||||
#[rustc_intrinsic]
|
#[rustc_intrinsic]
|
||||||
|
@ -6423,7 +6423,7 @@ fn hover_feature() {
|
||||||
Since these are just regular functions, it is perfectly ok to create the intrinsic twice:
|
Since these are just regular functions, it is perfectly ok to create the intrinsic twice:
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
#![feature(rustc_attrs)]
|
#![feature(intrinsics)]
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
|
|
||||||
#[rustc_intrinsic]
|
#[rustc_intrinsic]
|
||||||
|
@ -9465,4 +9465,39 @@ fn main() {
|
||||||
size = 0, align = 1
|
size = 0, align = 1
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: eq
|
||||||
|
pub struct RandomState;
|
||||||
|
pub struct HashMap<K, V, S = RandomState>(K, V, S);
|
||||||
|
|
||||||
|
impl<K, V> HashMap<K, V, RandomState> {
|
||||||
|
pub fn new() -> HashMap<K, V, RandomState> {
|
||||||
|
loop {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V, S> PartialEq for HashMap<K, V, S> {
|
||||||
|
fn eq(&self, other: &HashMap<K, V, S>) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let s$0 = HashMap::<_, u64>::ne;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
*s*
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let s: fn ne<HashMap<{unknown}, u64>>(&HashMap<{unknown}, u64>, &HashMap<{unknown}, u64>) -> bool
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
size = 0, align = 1
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use hir::{DefWithBody, Semantics};
|
use hir::{ConstEvalError, DefWithBody, Semantics};
|
||||||
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
|
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
|
||||||
|
use span::Edition;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
||||||
|
@ -47,18 +48,36 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura
|
||||||
None => format!("file://{path} range {text_range:?}"),
|
None => format!("file://{path} range {text_range:?}"),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let edition = def.module(db).krate().edition(db);
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
let res = match def {
|
let res = match def {
|
||||||
DefWithBody::Function(it) => it.eval(db, span_formatter),
|
DefWithBody::Function(it) => it.eval(db, span_formatter),
|
||||||
DefWithBody::Static(it) => it.eval(db),
|
DefWithBody::Static(it) => it.eval(db).map(|it| it.render(db, edition)),
|
||||||
DefWithBody::Const(it) => it.eval(db),
|
DefWithBody::Const(it) => it.eval(db).map(|it| it.render(db, edition)),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
let res = res.unwrap_or_else(|e| {
|
let res = res.unwrap_or_else(|e| render_const_eval_error(db, e, edition));
|
||||||
let mut r = String::new();
|
|
||||||
_ = e.pretty_print(&mut r, db, span_formatter, def.module(db).krate().edition(db));
|
|
||||||
r
|
|
||||||
});
|
|
||||||
let duration = Instant::now() - start_time;
|
let duration = Instant::now() - start_time;
|
||||||
Some((duration, res))
|
Some((duration, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn render_const_eval_error(
|
||||||
|
db: &RootDatabase,
|
||||||
|
e: ConstEvalError,
|
||||||
|
edition: Edition,
|
||||||
|
) -> String {
|
||||||
|
let span_formatter = |file_id, text_range: TextRange| {
|
||||||
|
let path = &db
|
||||||
|
.source_root(db.file_source_root(file_id))
|
||||||
|
.path_for_file(&file_id)
|
||||||
|
.map(|x| x.to_string());
|
||||||
|
let path = path.as_deref().unwrap_or("<unknown file>");
|
||||||
|
match db.line_index(file_id).try_line_col(text_range.start()) {
|
||||||
|
Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
|
||||||
|
None => format!("file://{path} range {text_range:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut r = String::new();
|
||||||
|
_ = e.pretty_print(&mut r, db, span_formatter, edition);
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
|
@ -132,11 +132,9 @@ pub use ide_db::{
|
||||||
search::{ReferenceCategory, SearchScope},
|
search::{ReferenceCategory, SearchScope},
|
||||||
source_change::{FileSystemEdit, SnippetEdit, SourceChange},
|
source_change::{FileSystemEdit, SnippetEdit, SourceChange},
|
||||||
symbol_index::Query,
|
symbol_index::Query,
|
||||||
FileId, FilePosition, FileRange, RootDatabase, SymbolKind,
|
FileId, FilePosition, FileRange, RootDatabase, Severity, SymbolKind,
|
||||||
};
|
|
||||||
pub use ide_diagnostics::{
|
|
||||||
Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity,
|
|
||||||
};
|
};
|
||||||
|
pub use ide_diagnostics::{Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode};
|
||||||
pub use ide_ssr::SsrError;
|
pub use ide_ssr::SsrError;
|
||||||
pub use span::Edition;
|
pub use span::Edition;
|
||||||
pub use syntax::{TextRange, TextSize};
|
pub use syntax::{TextRange, TextSize};
|
||||||
|
@ -301,7 +299,7 @@ impl Analysis {
|
||||||
|
|
||||||
/// Gets the syntax tree of the file.
|
/// Gets the syntax tree of the file.
|
||||||
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
|
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
|
||||||
// FIXME editiojn
|
// FIXME edition
|
||||||
self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree())
|
self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -542,7 +540,7 @@ impl Analysis {
|
||||||
/// Returns URL(s) for the documentation of the symbol under the cursor.
|
/// Returns URL(s) for the documentation of the symbol under the cursor.
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
/// * `position` - Position in the file.
|
/// * `position` - Position in the file.
|
||||||
/// * `target_dir` - Directory where the build output is storeda.
|
/// * `target_dir` - Directory where the build output is stored.
|
||||||
pub fn external_docs(
|
pub fn external_docs(
|
||||||
&self,
|
&self,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
|
|
|
@ -307,6 +307,7 @@ define_symbols! {
|
||||||
module_path,
|
module_path,
|
||||||
mul_assign,
|
mul_assign,
|
||||||
mul,
|
mul,
|
||||||
|
naked_asm,
|
||||||
ne,
|
ne,
|
||||||
neg,
|
neg,
|
||||||
Neg,
|
Neg,
|
||||||
|
|
|
@ -15,9 +15,7 @@ use ide_db::{
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use proc_macro_api::{MacroDylib, ProcMacroServer};
|
use proc_macro_api::{MacroDylib, ProcMacroServer};
|
||||||
use project_model::{
|
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
|
||||||
CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind,
|
|
||||||
};
|
|
||||||
use span::Span;
|
use span::Span;
|
||||||
use vfs::{
|
use vfs::{
|
||||||
file_set::FileSetConfig,
|
file_set::FileSetConfig,
|
||||||
|
@ -244,6 +242,9 @@ impl ProjectFolders {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if dirs.include.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
vfs::loader::Entry::Directories(dirs)
|
vfs::loader::Entry::Directories(dirs)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -258,43 +259,6 @@ impl ProjectFolders {
|
||||||
fsc.add_file_set(file_set_roots)
|
fsc.add_file_set(file_set_roots)
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the workspace manifest as well, note that this currently causes duplicates for
|
|
||||||
// non-virtual cargo workspaces! We ought to fix that
|
|
||||||
for ws in workspaces.iter() {
|
|
||||||
let mut file_set_roots: Vec<VfsPath> = vec![];
|
|
||||||
let mut entries = vec![];
|
|
||||||
|
|
||||||
if let Some(manifest) = ws.manifest().map(|it| it.to_path_buf()) {
|
|
||||||
file_set_roots.push(VfsPath::from(manifest.to_owned()));
|
|
||||||
entries.push(manifest.to_owned());
|
|
||||||
}
|
|
||||||
|
|
||||||
for buildfile in ws.buildfiles() {
|
|
||||||
file_set_roots.push(VfsPath::from(buildfile.to_owned()));
|
|
||||||
entries.push(buildfile.to_owned());
|
|
||||||
}
|
|
||||||
|
|
||||||
// In case of detached files we do **not** look for a rust-analyzer.toml.
|
|
||||||
if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
|
|
||||||
let ws_root = ws.workspace_root();
|
|
||||||
let ratoml_path = {
|
|
||||||
let mut p = ws_root.to_path_buf();
|
|
||||||
p.push("rust-analyzer.toml");
|
|
||||||
p
|
|
||||||
};
|
|
||||||
file_set_roots.push(VfsPath::from(ratoml_path.to_owned()));
|
|
||||||
entries.push(ratoml_path.to_owned());
|
|
||||||
}
|
|
||||||
|
|
||||||
if !file_set_roots.is_empty() {
|
|
||||||
let entry = vfs::loader::Entry::Files(entries);
|
|
||||||
res.watch.push(res.load.len());
|
|
||||||
res.load.push(entry);
|
|
||||||
local_filesets.push(fsc.len() as u64);
|
|
||||||
fsc.add_file_set(file_set_roots)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(user_config_path) = user_config_dir_path {
|
if let Some(user_config_path) = user_config_dir_path {
|
||||||
let ratoml_path = {
|
let ratoml_path = {
|
||||||
let mut p = user_config_path.to_path_buf();
|
let mut p = user_config_path.to_path_buf();
|
||||||
|
@ -303,7 +267,7 @@ impl ProjectFolders {
|
||||||
};
|
};
|
||||||
|
|
||||||
let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())];
|
let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())];
|
||||||
let entry = vfs::loader::Entry::Files(vec![ratoml_path.to_owned()]);
|
let entry = vfs::loader::Entry::Files(vec![ratoml_path]);
|
||||||
|
|
||||||
res.watch.push(res.load.len());
|
res.watch.push(res.load.len());
|
||||||
res.load.push(entry);
|
res.load.push(entry);
|
||||||
|
|
|
@ -18,6 +18,8 @@ ra-ap-rustc_lexer.workspace = true
|
||||||
limit.workspace = true
|
limit.workspace = true
|
||||||
tracing = { workspace = true, optional = true }
|
tracing = { workspace = true, optional = true }
|
||||||
|
|
||||||
|
edition.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.4.0"
|
expect-test = "1.4.0"
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
extern crate rustc_lexer;
|
extern crate rustc_lexer;
|
||||||
|
|
||||||
mod edition;
|
|
||||||
mod event;
|
mod event;
|
||||||
mod grammar;
|
mod grammar;
|
||||||
mod input;
|
mod input;
|
||||||
|
@ -41,8 +40,9 @@ mod tests;
|
||||||
|
|
||||||
pub(crate) use token_set::TokenSet;
|
pub(crate) use token_set::TokenSet;
|
||||||
|
|
||||||
|
pub use edition::Edition;
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
edition::Edition,
|
|
||||||
input::Input,
|
input::Input,
|
||||||
lexed_str::LexedStr,
|
lexed_str::LexedStr,
|
||||||
output::{Output, Step},
|
output::{Output, Step},
|
||||||
|
|
|
@ -42,7 +42,7 @@ use rustc_hash::FxHashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
|
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
|
||||||
|
|
||||||
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
|
use crate::msg::EXTENDED_LEAF_DATA;
|
||||||
|
|
||||||
pub type SpanDataIndexMap =
|
pub type SpanDataIndexMap =
|
||||||
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||||
|
@ -145,11 +145,7 @@ impl FlatTree {
|
||||||
w.write(subtree);
|
w.write(subtree);
|
||||||
|
|
||||||
FlatTree {
|
FlatTree {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: write_vec(w.subtree, SubtreeRepr::write),
|
||||||
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
|
|
||||||
} else {
|
|
||||||
write_vec(w.subtree, SubtreeRepr::write)
|
|
||||||
},
|
|
||||||
literal: if version >= EXTENDED_LEAF_DATA {
|
literal: if version >= EXTENDED_LEAF_DATA {
|
||||||
write_vec(w.literal, LiteralRepr::write_with_kind)
|
write_vec(w.literal, LiteralRepr::write_with_kind)
|
||||||
} else {
|
} else {
|
||||||
|
@ -183,11 +179,7 @@ impl FlatTree {
|
||||||
w.write(subtree);
|
w.write(subtree);
|
||||||
|
|
||||||
FlatTree {
|
FlatTree {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: write_vec(w.subtree, SubtreeRepr::write),
|
||||||
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
|
|
||||||
} else {
|
|
||||||
write_vec(w.subtree, SubtreeRepr::write)
|
|
||||||
},
|
|
||||||
literal: if version >= EXTENDED_LEAF_DATA {
|
literal: if version >= EXTENDED_LEAF_DATA {
|
||||||
write_vec(w.literal, LiteralRepr::write_with_kind)
|
write_vec(w.literal, LiteralRepr::write_with_kind)
|
||||||
} else {
|
} else {
|
||||||
|
@ -210,11 +202,7 @@ impl FlatTree {
|
||||||
span_data_table: &SpanDataIndexMap,
|
span_data_table: &SpanDataIndexMap,
|
||||||
) -> tt::Subtree<Span> {
|
) -> tt::Subtree<Span> {
|
||||||
Reader {
|
Reader {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: read_vec(self.subtree, SubtreeRepr::read),
|
||||||
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
|
||||||
} else {
|
|
||||||
read_vec(self.subtree, SubtreeRepr::read)
|
|
||||||
},
|
|
||||||
literal: if version >= EXTENDED_LEAF_DATA {
|
literal: if version >= EXTENDED_LEAF_DATA {
|
||||||
read_vec(self.literal, LiteralRepr::read_with_kind)
|
read_vec(self.literal, LiteralRepr::read_with_kind)
|
||||||
} else {
|
} else {
|
||||||
|
@ -236,11 +224,7 @@ impl FlatTree {
|
||||||
|
|
||||||
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
|
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
|
||||||
Reader {
|
Reader {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: read_vec(self.subtree, SubtreeRepr::read),
|
||||||
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
|
||||||
} else {
|
|
||||||
read_vec(self.subtree, SubtreeRepr::read)
|
|
||||||
},
|
|
||||||
literal: if version >= EXTENDED_LEAF_DATA {
|
literal: if version >= EXTENDED_LEAF_DATA {
|
||||||
read_vec(self.literal, LiteralRepr::read_with_kind)
|
read_vec(self.literal, LiteralRepr::read_with_kind)
|
||||||
} else {
|
} else {
|
||||||
|
@ -273,26 +257,7 @@ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SubtreeRepr {
|
impl SubtreeRepr {
|
||||||
fn write(self) -> [u32; 4] {
|
fn write(self) -> [u32; 5] {
|
||||||
let kind = match self.kind {
|
|
||||||
tt::DelimiterKind::Invisible => 0,
|
|
||||||
tt::DelimiterKind::Parenthesis => 1,
|
|
||||||
tt::DelimiterKind::Brace => 2,
|
|
||||||
tt::DelimiterKind::Bracket => 3,
|
|
||||||
};
|
|
||||||
[self.open.0, kind, self.tt[0], self.tt[1]]
|
|
||||||
}
|
|
||||||
fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
|
|
||||||
let kind = match kind {
|
|
||||||
0 => tt::DelimiterKind::Invisible,
|
|
||||||
1 => tt::DelimiterKind::Parenthesis,
|
|
||||||
2 => tt::DelimiterKind::Brace,
|
|
||||||
3 => tt::DelimiterKind::Bracket,
|
|
||||||
other => panic!("bad kind {other}"),
|
|
||||||
};
|
|
||||||
SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
|
|
||||||
}
|
|
||||||
fn write_with_close_span(self) -> [u32; 5] {
|
|
||||||
let kind = match self.kind {
|
let kind = match self.kind {
|
||||||
tt::DelimiterKind::Invisible => 0,
|
tt::DelimiterKind::Invisible => 0,
|
||||||
tt::DelimiterKind::Parenthesis => 1,
|
tt::DelimiterKind::Parenthesis => 1,
|
||||||
|
@ -301,7 +266,7 @@ impl SubtreeRepr {
|
||||||
};
|
};
|
||||||
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
|
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
|
||||||
}
|
}
|
||||||
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
|
fn read([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
|
||||||
let kind = match kind {
|
let kind = match kind {
|
||||||
0 => tt::DelimiterKind::Invisible,
|
0 => tt::DelimiterKind::Invisible,
|
||||||
1 => tt::DelimiterKind::Parenthesis,
|
1 => tt::DelimiterKind::Parenthesis,
|
||||||
|
|
|
@ -56,8 +56,25 @@ impl ProcMacroProcessSrv {
|
||||||
match srv.version_check() {
|
match srv.version_check() {
|
||||||
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
|
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
|
||||||
io::ErrorKind::Other,
|
io::ErrorKind::Other,
|
||||||
format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
|
format!(
|
||||||
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
|
"The version of the proc-macro server ({v}) in your Rust toolchain \
|
||||||
|
is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
|
||||||
|
\
|
||||||
|
This will prevent proc-macro expansion from working. \
|
||||||
|
Please consider updating your rust-analyzer to ensure compatibility with your \
|
||||||
|
current toolchain."
|
||||||
|
),
|
||||||
|
)),
|
||||||
|
Ok(v) if v < RUST_ANALYZER_SPAN_SUPPORT => Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
format!(
|
||||||
|
"The version of the proc-macro server ({v}) in your Rust toolchain \
|
||||||
|
is too old and no longer supported by your rust-analyzer which requires\
|
||||||
|
version {RUST_ANALYZER_SPAN_SUPPORT} or higher.
|
||||||
|
\
|
||||||
|
This will prevent proc-macro expansion from working. \
|
||||||
|
Please consider updating your toolchain or downgrading your rust-analyzer \
|
||||||
|
to ensure compatibility with your current toolchain."
|
||||||
),
|
),
|
||||||
)),
|
)),
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
|
@ -72,10 +89,10 @@ impl ProcMacroProcessSrv {
|
||||||
tracing::info!("Proc-macro server span mode: {:?}", srv.mode);
|
tracing::info!("Proc-macro server span mode: {:?}", srv.mode);
|
||||||
Ok(srv)
|
Ok(srv)
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => Err(io::Error::new(
|
||||||
tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0");
|
io::ErrorKind::Other,
|
||||||
create_srv(false)
|
format!("Failed to fetch proc-macro server version: {e}"),
|
||||||
}
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,21 +6,16 @@
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
extern crate rustc_driver as _;
|
extern crate rustc_driver as _;
|
||||||
|
|
||||||
use proc_macro_api::json::{read_json, write_json};
|
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
|
let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
|
||||||
match v.as_deref() {
|
if v.is_err() {
|
||||||
Ok("this is unstable") => {
|
eprintln!("This is an IDE implementation detail, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE.");
|
||||||
// very well, if you must
|
eprintln!(
|
||||||
}
|
"Note that this tool's API is highly unstable and may break without prior notice"
|
||||||
_ => {
|
);
|
||||||
eprintln!("If you're rust-analyzer, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable'.");
|
std::process::exit(122);
|
||||||
eprintln!("If not, you probably shouldn't use this tool. But do what you want: I'm an error message, not a cop.");
|
|
||||||
std::process::exit(122);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
run()
|
run()
|
||||||
|
@ -28,40 +23,19 @@ fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
|
#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
|
||||||
fn run() -> io::Result<()> {
|
fn run() -> io::Result<()> {
|
||||||
let err = "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function";
|
Err(io::Error::new(
|
||||||
eprintln!("{err}");
|
io::ErrorKind::Unsupported,
|
||||||
use proc_macro_api::msg::{self, Message};
|
"proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function"
|
||||||
|
.to_owned(),
|
||||||
let read_request =
|
))
|
||||||
|buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
|
|
||||||
|
|
||||||
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
|
||||||
|
|
||||||
while let Some(req) = read_request(&mut buf)? {
|
|
||||||
let res = match req {
|
|
||||||
msg::Request::ListMacros { .. } => msg::Response::ListMacros(Err(err.to_owned())),
|
|
||||||
msg::Request::ExpandMacro(_) => {
|
|
||||||
msg::Response::ExpandMacro(Err(msg::PanicMessage(err.to_owned())))
|
|
||||||
}
|
|
||||||
msg::Request::ApiVersionCheck {} => {
|
|
||||||
msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION)
|
|
||||||
}
|
|
||||||
msg::Request::SetConfig(_) => {
|
|
||||||
msg::Response::SetConfig(proc_macro_api::msg::ServerConfig {
|
|
||||||
span_mode: msg::SpanMode::Id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
write_response(res)?
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||||
fn run() -> io::Result<()> {
|
fn run() -> io::Result<()> {
|
||||||
use proc_macro_api::msg::{self, Message};
|
use proc_macro_api::{
|
||||||
|
json::{read_json, write_json},
|
||||||
|
msg::{self, Message},
|
||||||
|
};
|
||||||
use proc_macro_srv::EnvSnapshot;
|
use proc_macro_srv::EnvSnapshot;
|
||||||
|
|
||||||
let read_request =
|
let read_request =
|
||||||
|
@ -69,7 +43,7 @@ fn run() -> io::Result<()> {
|
||||||
|
|
||||||
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
|
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
|
||||||
|
|
||||||
let env = EnvSnapshot::new();
|
let env = EnvSnapshot::default();
|
||||||
let mut srv = proc_macro_srv::ProcMacroSrv::new(&env);
|
let mut srv = proc_macro_srv::ProcMacroSrv::new(&env);
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
|
|
|
@ -3,12 +3,11 @@
|
||||||
mod version;
|
mod version;
|
||||||
|
|
||||||
use proc_macro::bridge;
|
use proc_macro::bridge;
|
||||||
use std::{fmt, fs::File, io};
|
use std::{fmt, fs, io, time::SystemTime};
|
||||||
|
|
||||||
use libloading::Library;
|
use libloading::Library;
|
||||||
use memmap2::Mmap;
|
|
||||||
use object::Object;
|
use object::Object;
|
||||||
use paths::{AbsPath, Utf8Path, Utf8PathBuf};
|
use paths::{Utf8Path, Utf8PathBuf};
|
||||||
use proc_macro_api::ProcMacroKind;
|
use proc_macro_api::ProcMacroKind;
|
||||||
|
|
||||||
use crate::ProcMacroSrvSpan;
|
use crate::ProcMacroSrvSpan;
|
||||||
|
@ -23,14 +22,9 @@ fn is_derive_registrar_symbol(symbol: &str) -> bool {
|
||||||
symbol.contains(NEW_REGISTRAR_SYMBOL)
|
symbol.contains(NEW_REGISTRAR_SYMBOL)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_registrar_symbol(file: &Utf8Path) -> io::Result<Option<String>> {
|
fn find_registrar_symbol(obj: &object::File<'_>) -> object::Result<Option<String>> {
|
||||||
let file = File::open(file)?;
|
Ok(obj
|
||||||
let buffer = unsafe { Mmap::map(&file)? };
|
.exports()?
|
||||||
|
|
||||||
Ok(object::File::parse(&*buffer)
|
|
||||||
.map_err(invalid_data_err)?
|
|
||||||
.exports()
|
|
||||||
.map_err(invalid_data_err)?
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|export| export.name())
|
.map(|export| export.name())
|
||||||
.filter_map(|sym| String::from_utf8(sym.into()).ok())
|
.filter_map(|sym| String::from_utf8(sym.into()).ok())
|
||||||
|
@ -113,17 +107,18 @@ struct ProcMacroLibraryLibloading {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProcMacroLibraryLibloading {
|
impl ProcMacroLibraryLibloading {
|
||||||
fn open(file: &Utf8Path) -> Result<Self, LoadProcMacroDylibError> {
|
fn open(path: &Utf8Path) -> Result<Self, LoadProcMacroDylibError> {
|
||||||
let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
|
let file = fs::File::open(path)?;
|
||||||
invalid_data_err(format!("Cannot find registrar symbol in file {file}"))
|
let file = unsafe { memmap2::Mmap::map(&file) }?;
|
||||||
})?;
|
let obj = object::File::parse(&*file)
|
||||||
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||||
|
let version_info = version::read_dylib_info(&obj)?;
|
||||||
|
let symbol_name =
|
||||||
|
find_registrar_symbol(&obj).map_err(invalid_data_err)?.ok_or_else(|| {
|
||||||
|
invalid_data_err(format!("Cannot find registrar symbol in file {path}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
let abs_file: &AbsPath = file
|
let lib = load_library(path).map_err(invalid_data_err)?;
|
||||||
.try_into()
|
|
||||||
.map_err(|_| invalid_data_err(format!("expected an absolute path, got {file}")))?;
|
|
||||||
let version_info = version::read_dylib_info(abs_file)?;
|
|
||||||
|
|
||||||
let lib = load_library(file).map_err(invalid_data_err)?;
|
|
||||||
let proc_macros = crate::proc_macros::ProcMacros::from_lib(
|
let proc_macros = crate::proc_macros::ProcMacros::from_lib(
|
||||||
&lib,
|
&lib,
|
||||||
symbol_name,
|
symbol_name,
|
||||||
|
@ -133,30 +128,33 @@ impl ProcMacroLibraryLibloading {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Expander {
|
struct RemoveFileOnDrop(Utf8PathBuf);
|
||||||
inner: ProcMacroLibraryLibloading,
|
impl Drop for RemoveFileOnDrop {
|
||||||
path: Utf8PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for Expander {
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
std::fs::remove_file(&self.path).ok();
|
std::fs::remove_file(&self.0).unwrap();
|
||||||
_ = self.path;
|
_ = self.0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Drop order matters as we can't remove the dylib before the library is unloaded
|
||||||
|
pub(crate) struct Expander {
|
||||||
|
inner: ProcMacroLibraryLibloading,
|
||||||
|
_remove_on_drop: RemoveFileOnDrop,
|
||||||
|
modified_time: SystemTime,
|
||||||
|
}
|
||||||
|
|
||||||
impl Expander {
|
impl Expander {
|
||||||
pub(crate) fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> {
|
pub(crate) fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> {
|
||||||
// Some libraries for dynamic loading require canonicalized path even when it is
|
// Some libraries for dynamic loading require canonicalized path even when it is
|
||||||
// already absolute
|
// already absolute
|
||||||
let lib = lib.canonicalize_utf8()?;
|
let lib = lib.canonicalize_utf8()?;
|
||||||
|
let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?;
|
||||||
|
|
||||||
let path = ensure_file_with_lock_free_access(&lib)?;
|
let path = ensure_file_with_lock_free_access(&lib)?;
|
||||||
|
|
||||||
let library = ProcMacroLibraryLibloading::open(path.as_ref())?;
|
let library = ProcMacroLibraryLibloading::open(path.as_ref())?;
|
||||||
|
|
||||||
Ok(Expander { inner: library, path })
|
Ok(Expander { inner: library, _remove_on_drop: RemoveFileOnDrop(path), modified_time })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
||||||
|
@ -181,6 +179,10 @@ impl Expander {
|
||||||
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
|
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
|
||||||
self.inner.proc_macros.list_macros()
|
self.inner.proc_macros.list_macros()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn modified_time(&self) -> SystemTime {
|
||||||
|
self.modified_time
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copy the dylib to temp directory to prevent locking in Windows
|
/// Copy the dylib to temp directory to prevent locking in Windows
|
||||||
|
@ -194,20 +196,20 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result<Utf8PathBuf>
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut to = Utf8PathBuf::from_path_buf(std::env::temp_dir()).unwrap();
|
let mut to = Utf8PathBuf::from_path_buf(std::env::temp_dir()).unwrap();
|
||||||
|
to.push("rust-analyzer-proc-macros");
|
||||||
|
_ = fs::create_dir(&to);
|
||||||
|
|
||||||
let file_name = path.file_name().ok_or_else(|| {
|
let file_name = path.file_stem().ok_or_else(|| {
|
||||||
io::Error::new(io::ErrorKind::InvalidInput, format!("File path is invalid: {path}"))
|
io::Error::new(io::ErrorKind::InvalidInput, format!("File path is invalid: {path}"))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Generate a unique number by abusing `HashMap`'s hasher.
|
to.push({
|
||||||
// Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
|
// Generate a unique number by abusing `HashMap`'s hasher.
|
||||||
let t = RandomState::new().build_hasher().finish();
|
// Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
|
||||||
|
let unique_name = RandomState::new().build_hasher().finish();
|
||||||
let mut unique_name = t.to_string();
|
format!("{file_name}-{unique_name}.dll")
|
||||||
unique_name.push_str(file_name);
|
});
|
||||||
|
fs::copy(path, &to)?;
|
||||||
to.push(unique_name);
|
|
||||||
std::fs::copy(path, &to)?;
|
|
||||||
Ok(to)
|
Ok(to)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,8 @@
|
||||||
//! Reading proc-macro rustc version information from binary data
|
//! Reading proc-macro rustc version information from binary data
|
||||||
|
|
||||||
use std::{
|
use std::io::{self, Read};
|
||||||
fs::File,
|
|
||||||
io::{self, Read},
|
|
||||||
};
|
|
||||||
|
|
||||||
use memmap2::Mmap;
|
use object::read::{Object, ObjectSection};
|
||||||
use object::read::{File as BinaryFile, Object, ObjectSection};
|
|
||||||
use paths::AbsPath;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -21,14 +16,14 @@ pub struct RustCInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read rustc dylib information
|
/// Read rustc dylib information
|
||||||
pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
|
pub fn read_dylib_info(obj: &object::File<'_>) -> io::Result<RustCInfo> {
|
||||||
macro_rules! err {
|
macro_rules! err {
|
||||||
($e:literal) => {
|
($e:literal) => {
|
||||||
io::Error::new(io::ErrorKind::InvalidData, $e)
|
io::Error::new(io::ErrorKind::InvalidData, $e)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let ver_str = read_version(dylib_path)?;
|
let ver_str = read_version(obj)?;
|
||||||
let mut items = ver_str.split_whitespace();
|
let mut items = ver_str.split_whitespace();
|
||||||
let tag = items.next().ok_or_else(|| err!("version format error"))?;
|
let tag = items.next().ok_or_else(|| err!("version format error"))?;
|
||||||
if tag != "rustc" {
|
if tag != "rustc" {
|
||||||
|
@ -75,10 +70,8 @@ pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
|
||||||
|
|
||||||
/// This is used inside read_version() to locate the ".rustc" section
|
/// This is used inside read_version() to locate the ".rustc" section
|
||||||
/// from a proc macro crate's binary file.
|
/// from a proc macro crate's binary file.
|
||||||
fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'a [u8]> {
|
fn read_section<'a>(obj: &object::File<'a>, section_name: &str) -> io::Result<&'a [u8]> {
|
||||||
BinaryFile::parse(dylib_binary)
|
obj.section_by_name(section_name)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
|
|
||||||
.section_by_name(section_name)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "section read error"))?
|
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "section read error"))?
|
||||||
.data()
|
.data()
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
|
||||||
|
@ -106,11 +99,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
|
||||||
///
|
///
|
||||||
/// Check this issue for more about the bytes layout:
|
/// Check this issue for more about the bytes layout:
|
||||||
/// <https://github.com/rust-lang/rust-analyzer/issues/6174>
|
/// <https://github.com/rust-lang/rust-analyzer/issues/6174>
|
||||||
pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
|
pub fn read_version(obj: &object::File<'_>) -> io::Result<String> {
|
||||||
let dylib_file = File::open(dylib_path)?;
|
let dot_rustc = read_section(obj, ".rustc")?;
|
||||||
let dylib_mmapped = unsafe { Mmap::map(&dylib_file) }?;
|
|
||||||
|
|
||||||
let dot_rustc = read_section(&dylib_mmapped, ".rustc")?;
|
|
||||||
|
|
||||||
// check if magic is valid
|
// check if magic is valid
|
||||||
if &dot_rustc[0..4] != b"rust" {
|
if &dot_rustc[0..4] != b"rust" {
|
||||||
|
@ -159,8 +149,12 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_version_check() {
|
fn test_version_check() {
|
||||||
let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path());
|
let info = read_dylib_info(
|
||||||
let info = read_dylib_info(&path).unwrap();
|
&object::File::parse(&*std::fs::read(crate::proc_macro_test_dylib_path()).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
info.version_string,
|
info.version_string,
|
||||||
crate::RUSTC_VERSION_STRING,
|
crate::RUSTC_VERSION_STRING,
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
||||||
#![allow(unreachable_pub, internal_features)]
|
#![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)]
|
||||||
|
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
|
@ -35,7 +35,6 @@ use std::{
|
||||||
fs,
|
fs,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
thread,
|
thread,
|
||||||
time::SystemTime,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use paths::{Utf8Path, Utf8PathBuf};
|
use paths::{Utf8Path, Utf8PathBuf};
|
||||||
|
@ -53,7 +52,7 @@ use crate::server_impl::TokenStream;
|
||||||
pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION");
|
pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION");
|
||||||
|
|
||||||
pub struct ProcMacroSrv<'env> {
|
pub struct ProcMacroSrv<'env> {
|
||||||
expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>,
|
expanders: HashMap<Utf8PathBuf, dylib::Expander>,
|
||||||
span_mode: SpanMode,
|
span_mode: SpanMode,
|
||||||
env: &'env EnvSnapshot,
|
env: &'env EnvSnapshot,
|
||||||
}
|
}
|
||||||
|
@ -66,7 +65,7 @@ impl<'env> ProcMacroSrv<'env> {
|
||||||
|
|
||||||
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
|
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
|
||||||
|
|
||||||
impl<'env> ProcMacroSrv<'env> {
|
impl ProcMacroSrv<'_> {
|
||||||
pub fn set_span_mode(&mut self, span_mode: SpanMode) {
|
pub fn set_span_mode(&mut self, span_mode: SpanMode) {
|
||||||
self.span_mode = span_mode;
|
self.span_mode = span_mode;
|
||||||
}
|
}
|
||||||
|
@ -81,10 +80,9 @@ impl<'env> ProcMacroSrv<'env> {
|
||||||
) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> {
|
) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> {
|
||||||
let span_mode = self.span_mode;
|
let span_mode = self.span_mode;
|
||||||
let snapped_env = self.env;
|
let snapped_env = self.env;
|
||||||
let expander = self.expander(lib.as_ref()).map_err(|err| {
|
let expander = self
|
||||||
debug_assert!(false, "should list macros before asking to expand");
|
.expander(lib.as_ref())
|
||||||
msg::PanicMessage(format!("failed to load macro: {err}"))
|
.map_err(|err| msg::PanicMessage(format!("failed to load macro: {err}")))?;
|
||||||
})?;
|
|
||||||
|
|
||||||
let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref));
|
let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref));
|
||||||
|
|
||||||
|
@ -107,16 +105,20 @@ impl<'env> ProcMacroSrv<'env> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expander(&mut self, path: &Utf8Path) -> Result<&dylib::Expander, String> {
|
fn expander(&mut self, path: &Utf8Path) -> Result<&dylib::Expander, String> {
|
||||||
let time = fs::metadata(path)
|
let expander = || {
|
||||||
.and_then(|it| it.modified())
|
dylib::Expander::new(path)
|
||||||
.map_err(|err| format!("Failed to get file metadata for {path}: {err}",))?;
|
.map_err(|err| format!("Cannot create expander for {path}: {err}",))
|
||||||
|
};
|
||||||
|
|
||||||
Ok(match self.expanders.entry((path.to_path_buf(), time)) {
|
Ok(match self.expanders.entry(path.to_path_buf()) {
|
||||||
Entry::Vacant(v) => v.insert(
|
Entry::Vacant(v) => v.insert(expander()?),
|
||||||
dylib::Expander::new(path)
|
Entry::Occupied(mut e) => {
|
||||||
.map_err(|err| format!("Cannot create expander for {path}: {err}",))?,
|
let time = fs::metadata(path).and_then(|it| it.modified()).ok();
|
||||||
),
|
if Some(e.get().modified_time()) != time {
|
||||||
Entry::Occupied(e) => e.into_mut(),
|
e.insert(expander()?);
|
||||||
|
}
|
||||||
|
e.into_mut()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -246,8 +248,8 @@ pub struct EnvSnapshot {
|
||||||
vars: HashMap<OsString, OsString>,
|
vars: HashMap<OsString, OsString>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EnvSnapshot {
|
impl Default for EnvSnapshot {
|
||||||
pub fn new() -> EnvSnapshot {
|
fn default() -> EnvSnapshot {
|
||||||
EnvSnapshot { vars: env::vars_os().collect() }
|
EnvSnapshot { vars: env::vars_os().collect() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -303,7 +305,7 @@ impl Drop for EnvChange<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(dir) = &self.prev_working_dir {
|
if let Some(dir) = &self.prev_working_dir {
|
||||||
if let Err(err) = std::env::set_current_dir(&dir) {
|
if let Err(err) = std::env::set_current_dir(dir) {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"Failed to set the current working dir to {}. Error: {:?}",
|
"Failed to set the current working dir to {}. Error: {:?}",
|
||||||
dir.display(),
|
dir.display(),
|
||||||
|
|
|
@ -13,7 +13,7 @@ pub(crate) struct ProcMacros {
|
||||||
|
|
||||||
impl From<bridge::PanicMessage> for crate::PanicMessage {
|
impl From<bridge::PanicMessage> for crate::PanicMessage {
|
||||||
fn from(p: bridge::PanicMessage) -> Self {
|
fn from(p: bridge::PanicMessage) -> Self {
|
||||||
Self { message: p.as_str().map(|s| s.to_string()) }
|
Self { message: p.as_str().map(|s| s.to_owned()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -498,7 +498,7 @@ mod tests {
|
||||||
})),
|
})),
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
sym: Symbol::intern("T"),
|
sym: Symbol::intern("T"),
|
||||||
span: span,
|
span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
})),
|
})),
|
||||||
tt::TokenTree::Subtree(tt::Subtree {
|
tt::TokenTree::Subtree(tt::Subtree {
|
||||||
|
|
|
@ -99,7 +99,7 @@ pub(super) struct TokenStreamBuilder<S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
|
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
|
||||||
pub(super) mod token_stream {
|
pub(super) mod token_stream_impls {
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
|
@ -137,6 +137,7 @@ pub(super) mod token_stream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::to_string_trait_impl)]
|
||||||
impl<S> ToString for TokenStream<S> {
|
impl<S> ToString for TokenStream<S> {
|
||||||
fn to_string(&self) -> String {
|
fn to_string(&self) -> String {
|
||||||
::tt::pretty(&self.token_trees)
|
::tt::pretty(&self.token_trees)
|
||||||
|
@ -150,7 +151,7 @@ impl<S> TokenStreamBuilder<S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn push(&mut self, stream: TokenStream<S>) {
|
pub(super) fn push(&mut self, stream: TokenStream<S>) {
|
||||||
self.acc.extend(stream.into_iter())
|
self.acc.extend(stream)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn build(self) -> TokenStream<S> {
|
pub(super) fn build(self) -> TokenStream<S> {
|
||||||
|
|
|
@ -97,7 +97,7 @@ fn assert_expand_impl(
|
||||||
|
|
||||||
pub(crate) fn list() -> Vec<String> {
|
pub(crate) fn list() -> Vec<String> {
|
||||||
let dylib_path = proc_macro_test_dylib_path();
|
let dylib_path = proc_macro_test_dylib_path();
|
||||||
let env = EnvSnapshot::new();
|
let env = EnvSnapshot::default();
|
||||||
let mut srv = ProcMacroSrv::new(&env);
|
let mut srv = ProcMacroSrv::new(&env);
|
||||||
let res = srv.list_macros(&dylib_path).unwrap();
|
let res = srv.list_macros(&dylib_path).unwrap();
|
||||||
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
|
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
|
||||||
|
|
|
@ -33,6 +33,7 @@ pub struct CargoWorkspace {
|
||||||
workspace_root: AbsPathBuf,
|
workspace_root: AbsPathBuf,
|
||||||
target_directory: AbsPathBuf,
|
target_directory: AbsPathBuf,
|
||||||
manifest_path: ManifestPath,
|
manifest_path: ManifestPath,
|
||||||
|
is_virtual_workspace: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ops::Index<Package> for CargoWorkspace {
|
impl ops::Index<Package> for CargoWorkspace {
|
||||||
|
@ -384,13 +385,20 @@ impl CargoWorkspace {
|
||||||
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
|
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(mut meta: cargo_metadata::Metadata, manifest_path: ManifestPath) -> CargoWorkspace {
|
pub fn new(
|
||||||
|
mut meta: cargo_metadata::Metadata,
|
||||||
|
ws_manifest_path: ManifestPath,
|
||||||
|
) -> CargoWorkspace {
|
||||||
let mut pkg_by_id = FxHashMap::default();
|
let mut pkg_by_id = FxHashMap::default();
|
||||||
let mut packages = Arena::default();
|
let mut packages = Arena::default();
|
||||||
let mut targets = Arena::default();
|
let mut targets = Arena::default();
|
||||||
|
|
||||||
let ws_members = &meta.workspace_members;
|
let ws_members = &meta.workspace_members;
|
||||||
|
|
||||||
|
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
|
||||||
|
let target_directory = AbsPathBuf::assert(meta.target_directory);
|
||||||
|
let mut is_virtual_workspace = true;
|
||||||
|
|
||||||
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
|
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||||
for meta_pkg in meta.packages {
|
for meta_pkg in meta.packages {
|
||||||
let cargo_metadata::Package {
|
let cargo_metadata::Package {
|
||||||
|
@ -429,12 +437,13 @@ impl CargoWorkspace {
|
||||||
let is_local = source.is_none();
|
let is_local = source.is_none();
|
||||||
let is_member = ws_members.contains(&id);
|
let is_member = ws_members.contains(&id);
|
||||||
|
|
||||||
let manifest = AbsPathBuf::assert(manifest_path);
|
let manifest = ManifestPath::try_from(AbsPathBuf::assert(manifest_path)).unwrap();
|
||||||
|
is_virtual_workspace &= manifest != ws_manifest_path;
|
||||||
let pkg = packages.alloc(PackageData {
|
let pkg = packages.alloc(PackageData {
|
||||||
id: id.repr.clone(),
|
id: id.repr.clone(),
|
||||||
name,
|
name,
|
||||||
version,
|
version,
|
||||||
manifest: manifest.clone().try_into().unwrap(),
|
manifest: manifest.clone(),
|
||||||
targets: Vec::new(),
|
targets: Vec::new(),
|
||||||
is_local,
|
is_local,
|
||||||
is_member,
|
is_member,
|
||||||
|
@ -468,7 +477,7 @@ impl CargoWorkspace {
|
||||||
// modified manifest file into a special target dir which is then used as
|
// modified manifest file into a special target dir which is then used as
|
||||||
// the source path. We don't want that, we want the original here so map it
|
// the source path. We don't want that, we want the original here so map it
|
||||||
// back
|
// back
|
||||||
manifest.clone()
|
manifest.clone().into()
|
||||||
} else {
|
} else {
|
||||||
AbsPathBuf::assert(src_path)
|
AbsPathBuf::assert(src_path)
|
||||||
},
|
},
|
||||||
|
@ -493,11 +502,14 @@ impl CargoWorkspace {
|
||||||
packages[source].active_features.extend(node.features);
|
packages[source].active_features.extend(node.features);
|
||||||
}
|
}
|
||||||
|
|
||||||
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
|
CargoWorkspace {
|
||||||
|
packages,
|
||||||
let target_directory = AbsPathBuf::assert(meta.target_directory);
|
targets,
|
||||||
|
workspace_root,
|
||||||
CargoWorkspace { packages, targets, workspace_root, target_directory, manifest_path }
|
target_directory,
|
||||||
|
manifest_path: ws_manifest_path,
|
||||||
|
is_virtual_workspace,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ {
|
pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ {
|
||||||
|
@ -579,6 +591,10 @@ impl CargoWorkspace {
|
||||||
fn is_unique(&self, name: &str) -> bool {
|
fn is_unique(&self, name: &str) -> bool {
|
||||||
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
|
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_virtual_workspace(&self) -> bool {
|
||||||
|
self.is_virtual_workspace
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_list_of_build_targets(
|
fn find_list_of_build_targets(
|
||||||
|
|
|
@ -29,6 +29,12 @@ impl TryFrom<AbsPathBuf> for ManifestPath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<ManifestPath> for AbsPathBuf {
|
||||||
|
fn from(it: ManifestPath) -> Self {
|
||||||
|
it.file
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ManifestPath {
|
impl ManifestPath {
|
||||||
// Shadow `parent` from `Deref`.
|
// Shadow `parent` from `Deref`.
|
||||||
pub fn parent(&self) -> &AbsPath {
|
pub fn parent(&self) -> &AbsPath {
|
||||||
|
|
|
@ -11,8 +11,9 @@ use base_db::{
|
||||||
};
|
};
|
||||||
use cfg::{CfgAtom, CfgDiff, CfgOptions};
|
use cfg::{CfgAtom, CfgDiff, CfgOptions};
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
|
use itertools::Itertools;
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::FxHashMap;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use span::{Edition, FileId};
|
use span::{Edition, FileId};
|
||||||
use toolchain::Tool;
|
use toolchain::Tool;
|
||||||
|
@ -41,7 +42,9 @@ pub type FileLoader<'a> = &'a mut dyn for<'b> FnMut(&'b AbsPath) -> Option<FileI
|
||||||
pub struct PackageRoot {
|
pub struct PackageRoot {
|
||||||
/// Is from the local filesystem and may be edited
|
/// Is from the local filesystem and may be edited
|
||||||
pub is_local: bool,
|
pub is_local: bool,
|
||||||
|
/// Directories to include
|
||||||
pub include: Vec<AbsPathBuf>,
|
pub include: Vec<AbsPathBuf>,
|
||||||
|
/// Directories to exclude
|
||||||
pub exclude: Vec<AbsPathBuf>,
|
pub exclude: Vec<AbsPathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -553,17 +556,6 @@ impl ProjectWorkspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn buildfiles(&self) -> Vec<AbsPathBuf> {
|
|
||||||
match &self.kind {
|
|
||||||
ProjectWorkspaceKind::Json(project) => project
|
|
||||||
.crates()
|
|
||||||
.filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone()))
|
|
||||||
.map(|build_file| self.workspace_root().join(build_file))
|
|
||||||
.collect(),
|
|
||||||
_ => vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
|
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
|
||||||
self.sysroot.discover_proc_macro_srv()
|
self.sysroot.discover_proc_macro_srv()
|
||||||
}
|
}
|
||||||
|
@ -608,15 +600,25 @@ impl ProjectWorkspace {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
ProjectWorkspaceKind::Json(project) => project
|
ProjectWorkspaceKind::Json(project) => project
|
||||||
.crates()
|
.crates()
|
||||||
.map(|(_, krate)| PackageRoot {
|
.map(|(_, krate)| {
|
||||||
is_local: krate.is_workspace_member,
|
let build_files = project
|
||||||
include: krate.include.clone(),
|
.crates()
|
||||||
exclude: krate.exclude.clone(),
|
.filter_map(|(_, krate)| {
|
||||||
|
krate.build.as_ref().map(|build| build.build_file.clone())
|
||||||
|
})
|
||||||
|
// FIXME: PackageRoots dont allow specifying files, only directories
|
||||||
|
.filter_map(|build_file| {
|
||||||
|
self.workspace_root().join(build_file).parent().map(ToOwned::to_owned)
|
||||||
|
});
|
||||||
|
PackageRoot {
|
||||||
|
is_local: krate.is_workspace_member,
|
||||||
|
include: krate.include.iter().cloned().chain(build_files).collect(),
|
||||||
|
exclude: krate.exclude.clone(),
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.collect::<FxHashSet<_>>()
|
|
||||||
.into_iter()
|
|
||||||
.chain(mk_sysroot())
|
.chain(mk_sysroot())
|
||||||
.collect::<Vec<_>>(),
|
.unique()
|
||||||
|
.collect(),
|
||||||
ProjectWorkspaceKind::Cargo {
|
ProjectWorkspaceKind::Cargo {
|
||||||
cargo,
|
cargo,
|
||||||
rustc,
|
rustc,
|
||||||
|
@ -671,6 +673,11 @@ impl ProjectWorkspace {
|
||||||
exclude: Vec::new(),
|
exclude: Vec::new(),
|
||||||
})
|
})
|
||||||
}))
|
}))
|
||||||
|
.chain(cargo.is_virtual_workspace().then(|| PackageRoot {
|
||||||
|
is_local: true,
|
||||||
|
include: vec![cargo.workspace_root().to_path_buf()],
|
||||||
|
exclude: Vec::new(),
|
||||||
|
}))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
|
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
/// case), and we know that the query only used inputs of medium
|
/// case), and we know that the query only used inputs of medium
|
||||||
/// durability or higher, then we can skip that enumeration.
|
/// durability or higher, then we can skip that enumeration.
|
||||||
///
|
///
|
||||||
/// Typically, one assigns low durabilites to inputs that the user is
|
/// Typically, one assigns low durabilities to inputs that the user is
|
||||||
/// frequently editing. Medium or high durabilities are used for
|
/// frequently editing. Medium or high durabilities are used for
|
||||||
/// configuration, the source from library crates, or other things
|
/// configuration, the source from library crates, or other things
|
||||||
/// that are unlikely to be edited.
|
/// that are unlikely to be edited.
|
||||||
|
|
|
@ -291,7 +291,7 @@ pub trait ParallelDatabase: Database + Send {
|
||||||
/// # Panics
|
/// # Panics
|
||||||
///
|
///
|
||||||
/// It is not permitted to create a snapshot from inside of a
|
/// It is not permitted to create a snapshot from inside of a
|
||||||
/// query. Attepting to do so will panic.
|
/// query. Attempting to do so will panic.
|
||||||
///
|
///
|
||||||
/// # Deadlock warning
|
/// # Deadlock warning
|
||||||
///
|
///
|
||||||
|
|
|
@ -255,7 +255,7 @@ fn cycle_revalidate_unchanged_twice() {
|
||||||
db.set_b_invokes(CycleQuery::A);
|
db.set_b_invokes(CycleQuery::A);
|
||||||
|
|
||||||
assert!(db.cycle_a().is_err());
|
assert!(db.cycle_a().is_err());
|
||||||
db.set_c_invokes(CycleQuery::A); // force new revisi5on
|
db.set_c_invokes(CycleQuery::A); // force new revision
|
||||||
|
|
||||||
// on this run
|
// on this run
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
|
|
@ -51,7 +51,9 @@ fn actual_main() -> anyhow::Result<ExitCode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setup_logging(flags.log_file.clone())?;
|
if let Err(e) = setup_logging(flags.log_file.clone()) {
|
||||||
|
eprintln!("Failed to setup logging: {e:#}");
|
||||||
|
}
|
||||||
|
|
||||||
let verbosity = flags.verbosity();
|
let verbosity = flags.verbosity();
|
||||||
|
|
||||||
|
|
|
@ -331,8 +331,8 @@ impl flags::AnalysisStats {
|
||||||
let mut fail = 0;
|
let mut fail = 0;
|
||||||
for &b in bodies {
|
for &b in bodies {
|
||||||
let res = match b {
|
let res = match b {
|
||||||
DefWithBody::Const(c) => c.render_eval(db, Edition::LATEST),
|
DefWithBody::Const(c) => c.eval(db),
|
||||||
DefWithBody::Static(s) => s.render_eval(db, Edition::LATEST),
|
DefWithBody::Static(s) => s.eval(db),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
all += 1;
|
all += 1;
|
||||||
|
|
|
@ -827,6 +827,7 @@ impl Config {
|
||||||
let mut should_update = false;
|
let mut should_update = false;
|
||||||
|
|
||||||
if let Some(change) = change.user_config_change {
|
if let Some(change) = change.user_config_change {
|
||||||
|
tracing::info!("updating config from user config toml: {:#}", change);
|
||||||
if let Ok(table) = toml::from_str(&change) {
|
if let Ok(table) = toml::from_str(&change) {
|
||||||
let mut toml_errors = vec![];
|
let mut toml_errors = vec![];
|
||||||
validate_toml_table(
|
validate_toml_table(
|
||||||
|
@ -919,7 +920,7 @@ impl Config {
|
||||||
RatomlFileKind::Crate => {
|
RatomlFileKind::Crate => {
|
||||||
if let Some(text) = text {
|
if let Some(text) = text {
|
||||||
let mut toml_errors = vec![];
|
let mut toml_errors = vec![];
|
||||||
tracing::info!("updating ra-toml config: {:#}", text);
|
tracing::info!("updating ra-toml crate config: {:#}", text);
|
||||||
match toml::from_str(&text) {
|
match toml::from_str(&text) {
|
||||||
Ok(table) => {
|
Ok(table) => {
|
||||||
validate_toml_table(
|
validate_toml_table(
|
||||||
|
@ -961,6 +962,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
RatomlFileKind::Workspace => {
|
RatomlFileKind::Workspace => {
|
||||||
if let Some(text) = text {
|
if let Some(text) = text {
|
||||||
|
tracing::info!("updating ra-toml workspace config: {:#}", text);
|
||||||
let mut toml_errors = vec![];
|
let mut toml_errors = vec![];
|
||||||
match toml::from_str(&text) {
|
match toml::from_str(&text) {
|
||||||
Ok(table) => {
|
Ok(table) => {
|
||||||
|
|
|
@ -417,8 +417,10 @@ impl GlobalState {
|
||||||
})
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files {
|
for (file_id, (change_kind, vfs_path)) in modified_ratoml_files {
|
||||||
|
tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes");
|
||||||
if vfs_path.as_path() == user_config_abs_path {
|
if vfs_path.as_path() == user_config_abs_path {
|
||||||
|
tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes");
|
||||||
change.change_user_config(Some(db.file_text(file_id)));
|
change.change_user_config(Some(db.file_text(file_id)));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -430,12 +432,14 @@ impl GlobalState {
|
||||||
|
|
||||||
if !sr.is_library {
|
if !sr.is_library {
|
||||||
let entry = if workspace_ratoml_paths.contains(&vfs_path) {
|
let entry = if workspace_ratoml_paths.contains(&vfs_path) {
|
||||||
|
tracing::info!(%vfs_path, ?sr_id, "workspace rust-analyzer.toml changes");
|
||||||
change.change_workspace_ratoml(
|
change.change_workspace_ratoml(
|
||||||
sr_id,
|
sr_id,
|
||||||
vfs_path.clone(),
|
vfs_path.clone(),
|
||||||
Some(db.file_text(file_id)),
|
Some(db.file_text(file_id)),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
tracing::info!(%vfs_path, ?sr_id, "crate rust-analyzer.toml changes");
|
||||||
change.change_ratoml(
|
change.change_ratoml(
|
||||||
sr_id,
|
sr_id,
|
||||||
vfs_path.clone(),
|
vfs_path.clone(),
|
||||||
|
@ -446,7 +450,7 @@ impl GlobalState {
|
||||||
if let Some((kind, old_path, old_text)) = entry {
|
if let Some((kind, old_path, old_text)) = entry {
|
||||||
// SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
|
// SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
|
||||||
if old_path < vfs_path {
|
if old_path < vfs_path {
|
||||||
span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
|
tracing::error!("Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
|
||||||
// Put the old one back in.
|
// Put the old one back in.
|
||||||
match kind {
|
match kind {
|
||||||
RatomlFileKind::Crate => {
|
RatomlFileKind::Crate => {
|
||||||
|
@ -459,8 +463,7 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Mapping to a SourceRoot should always end up in `Ok`
|
tracing::info!(%vfs_path, "Ignoring library rust-analyzer.toml");
|
||||||
span!(Level::ERROR, "Mapping to SourceRootId failed.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
change.change_source_root_parent_map(self.local_roots_parent_map.clone());
|
change.change_source_root_parent_map(self.local_roots_parent_map.clone());
|
||||||
|
|
|
@ -895,7 +895,29 @@ pub fn item_const(
|
||||||
None => String::new(),
|
None => String::new(),
|
||||||
Some(it) => format!("{it} "),
|
Some(it) => format!("{it} "),
|
||||||
};
|
};
|
||||||
ast_from_text(&format!("{visibility} const {name}: {ty} = {expr};"))
|
ast_from_text(&format!("{visibility}const {name}: {ty} = {expr};"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn item_static(
|
||||||
|
visibility: Option<ast::Visibility>,
|
||||||
|
is_unsafe: bool,
|
||||||
|
is_mut: bool,
|
||||||
|
name: ast::Name,
|
||||||
|
ty: ast::Type,
|
||||||
|
expr: Option<ast::Expr>,
|
||||||
|
) -> ast::Static {
|
||||||
|
let visibility = match visibility {
|
||||||
|
None => String::new(),
|
||||||
|
Some(it) => format!("{it} "),
|
||||||
|
};
|
||||||
|
let is_unsafe = if is_unsafe { "unsafe " } else { "" };
|
||||||
|
let is_mut = if is_mut { "mut " } else { "" };
|
||||||
|
let expr = match expr {
|
||||||
|
Some(it) => &format!(" = {it}"),
|
||||||
|
None => "",
|
||||||
|
};
|
||||||
|
|
||||||
|
ast_from_text(&format!("{visibility}{is_unsafe}static {is_mut}{name}: {ty}{expr};"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unnamed_param(ty: ast::Type) -> ast::Param {
|
pub fn unnamed_param(ty: ast::Type) -> ast::Param {
|
||||||
|
|
|
@ -188,6 +188,73 @@ impl SyntaxFactory {
|
||||||
ast
|
ast
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn item_const(
|
||||||
|
&self,
|
||||||
|
visibility: Option<ast::Visibility>,
|
||||||
|
name: ast::Name,
|
||||||
|
ty: ast::Type,
|
||||||
|
expr: ast::Expr,
|
||||||
|
) -> ast::Const {
|
||||||
|
let ast = make::item_const(visibility.clone(), name.clone(), ty.clone(), expr.clone())
|
||||||
|
.clone_for_update();
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
if let Some(visibility) = visibility {
|
||||||
|
builder.map_node(
|
||||||
|
visibility.syntax().clone(),
|
||||||
|
ast.visibility().unwrap().syntax().clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
|
||||||
|
builder.map_node(ty.syntax().clone(), ast.ty().unwrap().syntax().clone());
|
||||||
|
builder.map_node(expr.syntax().clone(), ast.body().unwrap().syntax().clone());
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn item_static(
|
||||||
|
&self,
|
||||||
|
visibility: Option<ast::Visibility>,
|
||||||
|
is_unsafe: bool,
|
||||||
|
is_mut: bool,
|
||||||
|
name: ast::Name,
|
||||||
|
ty: ast::Type,
|
||||||
|
expr: Option<ast::Expr>,
|
||||||
|
) -> ast::Static {
|
||||||
|
let ast = make::item_static(
|
||||||
|
visibility.clone(),
|
||||||
|
is_unsafe,
|
||||||
|
is_mut,
|
||||||
|
name.clone(),
|
||||||
|
ty.clone(),
|
||||||
|
expr.clone(),
|
||||||
|
)
|
||||||
|
.clone_for_update();
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
if let Some(visibility) = visibility {
|
||||||
|
builder.map_node(
|
||||||
|
visibility.syntax().clone(),
|
||||||
|
ast.visibility().unwrap().syntax().clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
|
||||||
|
builder.map_node(ty.syntax().clone(), ast.ty().unwrap().syntax().clone());
|
||||||
|
|
||||||
|
if let Some(expr) = expr {
|
||||||
|
builder.map_node(expr.syntax().clone(), ast.body().unwrap().syntax().clone());
|
||||||
|
}
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast
|
||||||
|
}
|
||||||
|
|
||||||
pub fn turbofish_generic_arg_list(
|
pub fn turbofish_generic_arg_list(
|
||||||
&self,
|
&self,
|
||||||
args: impl IntoIterator<Item = ast::GenericArg> + Clone,
|
args: impl IntoIterator<Item = ast::GenericArg> + Clone,
|
||||||
|
|
|
@ -580,7 +580,7 @@ Unfortunately, it downloads an old version of `rust-analyzer`, but you can set t
|
||||||
|
|
||||||
There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
|
There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
|
||||||
|
|
||||||
For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow].
|
For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/autopublish.yaml[the publish workflow].
|
||||||
|
|
||||||
=== Zed
|
=== Zed
|
||||||
|
|
||||||
|
|
|
@ -324,7 +324,7 @@ class ExperimentalFeatures implements lc.StaticFeature {
|
||||||
}
|
}
|
||||||
fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
|
fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
|
||||||
capabilities.experimental = {
|
capabilities.experimental = {
|
||||||
snippetTextEdit: false,
|
snippetTextEdit: true,
|
||||||
codeActionGroup: true,
|
codeActionGroup: true,
|
||||||
hoverActions: true,
|
hoverActions: true,
|
||||||
serverStatusNotification: true,
|
serverStatusNotification: true,
|
||||||
|
|
|
@ -21,6 +21,7 @@ quote = "1.0.20"
|
||||||
ungrammar = "1.16.1"
|
ungrammar = "1.16.1"
|
||||||
either.workspace = true
|
either.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
# Avoid adding more dependencies to this crate
|
# Avoid adding more dependencies to this crate
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
|
|
|
@ -1,7 +1,15 @@
|
||||||
//! Generates descriptor structures for unstable features from the unstable book
|
//! Generates descriptor structures for unstable features from the unstable book
|
||||||
//! and lints from rustc, rustdoc, and clippy.
|
//! and lints from rustc, rustdoc, and clippy.
|
||||||
use std::{borrow::Cow, fs, path::Path};
|
#![allow(clippy::disallowed_types)]
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::{hash_map, HashMap},
|
||||||
|
fs,
|
||||||
|
path::Path,
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use edition::Edition;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use xshell::{cmd, Shell};
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
|
@ -36,10 +44,17 @@ pub(crate) fn generate(check: bool) {
|
||||||
|
|
||||||
let mut contents = String::from(
|
let mut contents = String::from(
|
||||||
r"
|
r"
|
||||||
|
use span::Edition;
|
||||||
|
|
||||||
|
use crate::Severity;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Lint {
|
pub struct Lint {
|
||||||
pub label: &'static str,
|
pub label: &'static str,
|
||||||
pub description: &'static str,
|
pub description: &'static str,
|
||||||
|
pub default_severity: Severity,
|
||||||
|
pub warn_since: Option<Edition>,
|
||||||
|
pub deny_since: Option<Edition>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LintGroup {
|
pub struct LintGroup {
|
||||||
|
@ -68,7 +83,7 @@ pub struct LintGroup {
|
||||||
let lints_json = project_root().join("./target/clippy_lints.json");
|
let lints_json = project_root().join("./target/clippy_lints.json");
|
||||||
cmd!(
|
cmd!(
|
||||||
sh,
|
sh,
|
||||||
"curl https://rust-lang.github.io/rust-clippy/master/lints.json --output {lints_json}"
|
"curl https://rust-lang.github.io/rust-clippy/stable/lints.json --output {lints_json}"
|
||||||
)
|
)
|
||||||
.run()
|
.run()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -85,6 +100,48 @@ pub struct LintGroup {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
enum Severity {
|
||||||
|
Allow,
|
||||||
|
Warn,
|
||||||
|
Deny,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Severity {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Severity::{}",
|
||||||
|
match self {
|
||||||
|
Severity::Allow => "Allow",
|
||||||
|
Severity::Warn => "Warning",
|
||||||
|
Severity::Deny => "Error",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Severity {
|
||||||
|
type Err = &'static str;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"allow" => Ok(Self::Allow),
|
||||||
|
"warn" => Ok(Self::Warn),
|
||||||
|
"deny" => Ok(Self::Deny),
|
||||||
|
_ => Err("invalid severity"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Lint {
|
||||||
|
description: String,
|
||||||
|
default_severity: Severity,
|
||||||
|
warn_since: Option<Edition>,
|
||||||
|
deny_since: Option<Edition>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
|
/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
|
||||||
///
|
///
|
||||||
/// As of writing, the output of `rustc -Whelp` (not rustdoc) has the following format:
|
/// As of writing, the output of `rustc -Whelp` (not rustdoc) has the following format:
|
||||||
|
@ -108,52 +165,203 @@ pub struct LintGroup {
|
||||||
/// `rustdoc -Whelp` (and any other custom `rustc` driver) adds another two
|
/// `rustdoc -Whelp` (and any other custom `rustc` driver) adds another two
|
||||||
/// tables after the `rustc` ones, with a different title but the same format.
|
/// tables after the `rustc` ones, with a different title but the same format.
|
||||||
fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
|
fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
|
||||||
let stdout = cmd!(sh, "rustdoc -Whelp").read().unwrap();
|
fn get_lints_as_text(
|
||||||
let lints_pat = "---- ------- -------\n";
|
stdout: &str,
|
||||||
let lint_groups_pat = "---- ---------\n";
|
) -> (
|
||||||
let lints = find_and_slice(&stdout, lints_pat);
|
impl Iterator<Item = (String, &str, Severity)> + '_,
|
||||||
let lint_groups = find_and_slice(lints, lint_groups_pat);
|
impl Iterator<Item = (String, Lint, impl Iterator<Item = String> + '_)> + '_,
|
||||||
let lints_rustdoc = find_and_slice(lint_groups, lints_pat);
|
impl Iterator<Item = (String, &str, Severity)> + '_,
|
||||||
let lint_groups_rustdoc = find_and_slice(lints_rustdoc, lint_groups_pat);
|
impl Iterator<Item = (String, Lint, impl Iterator<Item = String> + '_)> + '_,
|
||||||
|
) {
|
||||||
|
let lints_pat = "---- ------- -------\n";
|
||||||
|
let lint_groups_pat = "---- ---------\n";
|
||||||
|
let lints = find_and_slice(stdout, lints_pat);
|
||||||
|
let lint_groups = find_and_slice(lints, lint_groups_pat);
|
||||||
|
let lints_rustdoc = find_and_slice(lint_groups, lints_pat);
|
||||||
|
let lint_groups_rustdoc = find_and_slice(lints_rustdoc, lint_groups_pat);
|
||||||
|
|
||||||
|
let lints = lints.lines().take_while(|l| !l.is_empty()).map(|line| {
|
||||||
|
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
let (severity, description) = rest.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
(name.trim().replace('-', "_"), description.trim(), severity.parse().unwrap())
|
||||||
|
});
|
||||||
|
let lint_groups = lint_groups.lines().take_while(|l| !l.is_empty()).map(|line| {
|
||||||
|
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
let label = name.trim().replace('-', "_");
|
||||||
|
let lint = Lint {
|
||||||
|
description: format!("lint group for: {}", lints.trim()),
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
};
|
||||||
|
let children = lints
|
||||||
|
.split_ascii_whitespace()
|
||||||
|
.map(|s| s.trim().trim_matches(',').replace('-', "_"));
|
||||||
|
(label, lint, children)
|
||||||
|
});
|
||||||
|
|
||||||
|
let lints_rustdoc = lints_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
|
||||||
|
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
let (severity, description) = rest.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
(name.trim().replace('-', "_"), description.trim(), severity.parse().unwrap())
|
||||||
|
});
|
||||||
|
let lint_groups_rustdoc =
|
||||||
|
lint_groups_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
|
||||||
|
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||||
|
let label = name.trim().replace('-', "_");
|
||||||
|
let lint = Lint {
|
||||||
|
description: format!("lint group for: {}", lints.trim()),
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
};
|
||||||
|
let children = lints
|
||||||
|
.split_ascii_whitespace()
|
||||||
|
.map(|s| s.trim().trim_matches(',').replace('-', "_"));
|
||||||
|
(label, lint, children)
|
||||||
|
});
|
||||||
|
|
||||||
|
(lints, lint_groups, lints_rustdoc, lint_groups_rustdoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_lints<'a>(
|
||||||
|
edition: Edition,
|
||||||
|
lints_map: &mut HashMap<String, Lint>,
|
||||||
|
lint_groups_map: &mut HashMap<String, (Lint, Vec<String>)>,
|
||||||
|
lints: impl Iterator<Item = (String, &'a str, Severity)>,
|
||||||
|
lint_groups: impl Iterator<Item = (String, Lint, impl Iterator<Item = String>)>,
|
||||||
|
) {
|
||||||
|
for (lint_name, lint_description, lint_severity) in lints {
|
||||||
|
let lint = lints_map.entry(lint_name).or_insert_with(|| Lint {
|
||||||
|
description: lint_description.to_owned(),
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
});
|
||||||
|
if lint_severity == Severity::Warn
|
||||||
|
&& lint.warn_since.is_none()
|
||||||
|
&& lint.default_severity < Severity::Warn
|
||||||
|
{
|
||||||
|
lint.warn_since = Some(edition);
|
||||||
|
}
|
||||||
|
if lint_severity == Severity::Deny
|
||||||
|
&& lint.deny_since.is_none()
|
||||||
|
&& lint.default_severity < Severity::Deny
|
||||||
|
{
|
||||||
|
lint.deny_since = Some(edition);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (group_name, lint, children) in lint_groups {
|
||||||
|
match lint_groups_map.entry(group_name) {
|
||||||
|
hash_map::Entry::Vacant(entry) => {
|
||||||
|
entry.insert((lint, Vec::from_iter(children)));
|
||||||
|
}
|
||||||
|
hash_map::Entry::Occupied(mut entry) => {
|
||||||
|
// Overwrite, because some groups (such as edition incompatibility) are changed.
|
||||||
|
*entry.get_mut() = (lint, Vec::from_iter(children));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_lints(
|
||||||
|
sh: &Shell,
|
||||||
|
edition: Edition,
|
||||||
|
lints_map: &mut HashMap<String, Lint>,
|
||||||
|
lint_groups_map: &mut HashMap<String, (Lint, Vec<String>)>,
|
||||||
|
lints_rustdoc_map: &mut HashMap<String, Lint>,
|
||||||
|
lint_groups_rustdoc_map: &mut HashMap<String, (Lint, Vec<String>)>,
|
||||||
|
) {
|
||||||
|
let edition_str = edition.to_string();
|
||||||
|
let stdout = cmd!(sh, "rustdoc +nightly -Whelp -Zunstable-options --edition={edition_str}")
|
||||||
|
.read()
|
||||||
|
.unwrap();
|
||||||
|
let (lints, lint_groups, lints_rustdoc, lint_groups_rustdoc) = get_lints_as_text(&stdout);
|
||||||
|
|
||||||
|
insert_lints(edition, lints_map, lint_groups_map, lints, lint_groups);
|
||||||
|
insert_lints(
|
||||||
|
edition,
|
||||||
|
lints_rustdoc_map,
|
||||||
|
lint_groups_rustdoc_map,
|
||||||
|
lints_rustdoc,
|
||||||
|
lint_groups_rustdoc,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let basic_lints = cmd!(sh, "rustdoc +nightly -Whelp --edition=2015").read().unwrap();
|
||||||
|
let (lints, lint_groups, lints_rustdoc, lint_groups_rustdoc) = get_lints_as_text(&basic_lints);
|
||||||
|
|
||||||
|
let mut lints = lints
|
||||||
|
.map(|(label, description, severity)| {
|
||||||
|
(
|
||||||
|
label,
|
||||||
|
Lint {
|
||||||
|
description: description.to_owned(),
|
||||||
|
default_severity: severity,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
let mut lint_groups = lint_groups
|
||||||
|
.map(|(label, lint, children)| (label, (lint, Vec::from_iter(children))))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
let mut lints_rustdoc = lints_rustdoc
|
||||||
|
.map(|(label, description, severity)| {
|
||||||
|
(
|
||||||
|
label,
|
||||||
|
Lint {
|
||||||
|
description: description.to_owned(),
|
||||||
|
default_severity: severity,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
let mut lint_groups_rustdoc = lint_groups_rustdoc
|
||||||
|
.map(|(label, lint, children)| (label, (lint, Vec::from_iter(children))))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
for edition in Edition::iter().skip(1) {
|
||||||
|
get_lints(
|
||||||
|
sh,
|
||||||
|
edition,
|
||||||
|
&mut lints,
|
||||||
|
&mut lint_groups,
|
||||||
|
&mut lints_rustdoc,
|
||||||
|
&mut lint_groups_rustdoc,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut lints = Vec::from_iter(lints);
|
||||||
|
lints.sort_unstable_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
let mut lint_groups = Vec::from_iter(lint_groups);
|
||||||
|
lint_groups.sort_unstable_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
let mut lints_rustdoc = Vec::from_iter(lints_rustdoc);
|
||||||
|
lints_rustdoc.sort_unstable_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
let mut lint_groups_rustdoc = Vec::from_iter(lint_groups_rustdoc);
|
||||||
|
lint_groups_rustdoc.sort_unstable_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
|
||||||
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
|
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
|
||||||
let lints = lints.lines().take_while(|l| !l.is_empty()).map(|line| {
|
for (name, lint) in &lints {
|
||||||
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
|
push_lint_completion(buf, name, lint);
|
||||||
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
|
}
|
||||||
(name.trim(), Cow::Borrowed(description.trim()), vec![])
|
for (name, (group, _)) in &lint_groups {
|
||||||
});
|
push_lint_completion(buf, name, group);
|
||||||
let lint_groups = lint_groups.lines().take_while(|l| !l.is_empty()).map(|line| {
|
|
||||||
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
|
|
||||||
(
|
|
||||||
name.trim(),
|
|
||||||
format!("lint group for: {}", lints.trim()).into(),
|
|
||||||
lints
|
|
||||||
.split_ascii_whitespace()
|
|
||||||
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut lints = lints.chain(lint_groups).collect::<Vec<_>>();
|
|
||||||
lints.sort_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2));
|
|
||||||
|
|
||||||
for (name, description, ..) in &lints {
|
|
||||||
push_lint_completion(buf, &name.replace('-', "_"), description);
|
|
||||||
}
|
}
|
||||||
buf.push_str("];\n\n");
|
buf.push_str("];\n\n");
|
||||||
|
|
||||||
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
|
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
|
||||||
for (name, description, children) in &lints {
|
for (name, (lint, children)) in &lint_groups {
|
||||||
if !children.is_empty() {
|
if name == "warnings" {
|
||||||
// HACK: warnings is emitted with a general description, not with its members
|
continue;
|
||||||
if name == &"warnings" {
|
|
||||||
push_lint_group(buf, name, description, &Vec::new());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
push_lint_group(buf, &name.replace('-', "_"), description, children);
|
|
||||||
}
|
}
|
||||||
|
push_lint_group(buf, name, lint, children);
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
|
@ -164,37 +372,17 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
|
||||||
buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
|
buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
|
||||||
let lints_rustdoc = lints_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
|
for (name, lint) in &lints_rustdoc {
|
||||||
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
|
push_lint_completion(buf, name, lint);
|
||||||
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
|
}
|
||||||
(name.trim(), Cow::Borrowed(description.trim()), vec![])
|
for (name, (group, _)) in &lint_groups_rustdoc {
|
||||||
});
|
push_lint_completion(buf, name, group);
|
||||||
let lint_groups_rustdoc =
|
|
||||||
lint_groups_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
|
|
||||||
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
|
|
||||||
(
|
|
||||||
name.trim(),
|
|
||||||
format!("lint group for: {}", lints.trim()).into(),
|
|
||||||
lints
|
|
||||||
.split_ascii_whitespace()
|
|
||||||
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut lints_rustdoc = lints_rustdoc.chain(lint_groups_rustdoc).collect::<Vec<_>>();
|
|
||||||
lints_rustdoc.sort_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2));
|
|
||||||
|
|
||||||
for (name, description, ..) in &lints_rustdoc {
|
|
||||||
push_lint_completion(buf, &name.replace('-', "_"), description)
|
|
||||||
}
|
}
|
||||||
buf.push_str("];\n\n");
|
buf.push_str("];\n\n");
|
||||||
|
|
||||||
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
|
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
|
||||||
for (name, description, children) in &lints_rustdoc {
|
for (name, (lint, children)) in &lint_groups_rustdoc {
|
||||||
if !children.is_empty() {
|
push_lint_group(buf, name, lint, children);
|
||||||
push_lint_group(buf, &name.replace('-', "_"), description, children);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
|
@ -228,13 +416,19 @@ fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
|
||||||
|
|
||||||
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
|
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
|
||||||
for (feature_ident, doc) in features.into_iter() {
|
for (feature_ident, doc) in features.into_iter() {
|
||||||
push_lint_completion(buf, &feature_ident, &doc)
|
let lint = Lint {
|
||||||
|
description: doc,
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
};
|
||||||
|
push_lint_completion(buf, &feature_ident, &lint);
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Debug, Default)]
|
||||||
struct ClippyLint {
|
struct ClippyLint {
|
||||||
help: String,
|
help: String,
|
||||||
id: String,
|
id: String,
|
||||||
|
@ -295,8 +489,14 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
for clippy_lint in clippy_lints.into_iter() {
|
for clippy_lint in clippy_lints.into_iter() {
|
||||||
let lint_ident = format!("clippy::{}", clippy_lint.id);
|
let lint_ident = format!("clippy::{}", clippy_lint.id);
|
||||||
let doc = clippy_lint.help;
|
let lint = Lint {
|
||||||
push_lint_completion(buf, &lint_ident, &doc);
|
description: clippy_lint.help,
|
||||||
|
// Allow clippy lints by default, not all users want them.
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
};
|
||||||
|
push_lint_completion(buf, &lint_ident, &lint);
|
||||||
}
|
}
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
|
|
||||||
|
@ -306,33 +506,59 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||||
if !children.is_empty() {
|
if !children.is_empty() {
|
||||||
let lint_ident = format!("clippy::{id}");
|
let lint_ident = format!("clippy::{id}");
|
||||||
let description = format!("lint group for: {}", children.join(", "));
|
let description = format!("lint group for: {}", children.join(", "));
|
||||||
push_lint_group(buf, &lint_ident, &description, &children);
|
let lint = Lint {
|
||||||
|
description,
|
||||||
|
default_severity: Severity::Allow,
|
||||||
|
warn_since: None,
|
||||||
|
deny_since: None,
|
||||||
|
};
|
||||||
|
push_lint_group(buf, &lint_ident, &lint, &children);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
|
fn push_lint_completion(buf: &mut String, name: &str, lint: &Lint) {
|
||||||
format_to!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
r###" Lint {{
|
r###" Lint {{
|
||||||
label: "{}",
|
label: "{}",
|
||||||
description: r##"{}"##,
|
description: r##"{}"##,
|
||||||
}},"###,
|
default_severity: {},
|
||||||
label,
|
warn_since: "###,
|
||||||
description,
|
name,
|
||||||
|
lint.description,
|
||||||
|
lint.default_severity,
|
||||||
|
);
|
||||||
|
match lint.warn_since {
|
||||||
|
Some(edition) => format_to!(buf, "Some(Edition::Edition{edition})"),
|
||||||
|
None => buf.push_str("None"),
|
||||||
|
}
|
||||||
|
format_to!(
|
||||||
|
buf,
|
||||||
|
r###",
|
||||||
|
deny_since: "###
|
||||||
|
);
|
||||||
|
match lint.deny_since {
|
||||||
|
Some(edition) => format_to!(buf, "Some(Edition::Edition{edition})"),
|
||||||
|
None => buf.push_str("None"),
|
||||||
|
}
|
||||||
|
format_to!(
|
||||||
|
buf,
|
||||||
|
r###",
|
||||||
|
}},"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_lint_group(buf: &mut String, label: &str, description: &str, children: &[String]) {
|
fn push_lint_group(buf: &mut String, name: &str, lint: &Lint, children: &[String]) {
|
||||||
buf.push_str(
|
buf.push_str(
|
||||||
r###" LintGroup {
|
r###" LintGroup {
|
||||||
lint:
|
lint:
|
||||||
"###,
|
"###,
|
||||||
);
|
);
|
||||||
|
|
||||||
push_lint_completion(buf, label, description);
|
push_lint_completion(buf, name, lint);
|
||||||
|
|
||||||
let children = format!(
|
let children = format!(
|
||||||
"&[{}]",
|
"&[{}]",
|
||||||
|
|
Loading…
Reference in a new issue