mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
Merge branch 'master' into add-disable-diagnostics
This commit is contained in:
commit
b50bb800a5
134 changed files with 1907 additions and 1282 deletions
32
.github/workflows/ci.yaml
vendored
32
.github/workflows/ci.yaml
vendored
|
@ -16,20 +16,6 @@ env:
|
|||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
# rust-audit:
|
||||
# name: Audit Rust vulnerabilities
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v2
|
||||
|
||||
# - uses: actions-rs/install@v0.1
|
||||
# with:
|
||||
# crate: cargo-audit
|
||||
# use-tool-cache: true
|
||||
|
||||
# - run: cargo audit
|
||||
|
||||
rust:
|
||||
name: Rust
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
@ -88,11 +74,14 @@ jobs:
|
|||
if: matrix.os == 'windows-latest'
|
||||
run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe
|
||||
|
||||
# Weird target to catch non-portable code
|
||||
rust-power:
|
||||
name: Rust Power
|
||||
# Weird targets to catch non-portable code
|
||||
rust-cross:
|
||||
name: Rust Cross
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl"
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
@ -103,7 +92,9 @@ jobs:
|
|||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
target: 'powerpc-unknown-linux-gnu'
|
||||
|
||||
- name: Install Rust targets
|
||||
run: rustup target add ${{ env.targets }}
|
||||
|
||||
- name: Cache cargo directories
|
||||
uses: actions/cache@v2
|
||||
|
@ -114,7 +105,10 @@ jobs:
|
|||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Check
|
||||
run: cargo check --target=powerpc-unknown-linux-gnu --all-targets
|
||||
run: |
|
||||
for target in ${{ env.targets }}; do
|
||||
cargo check --target=$target --all-targets
|
||||
done
|
||||
|
||||
typescript:
|
||||
name: TypeScript
|
||||
|
|
36
Cargo.lock
generated
36
Cargo.lock
generated
|
@ -125,9 +125,9 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
|||
|
||||
[[package]]
|
||||
name = "chalk-derive"
|
||||
version = "0.19.0"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "654c611946ba2629c5028cb7708687af975faf2c29d731824cb294c873df4697"
|
||||
checksum = "c1df0dbb57d74b4acd20f20fa66ab2acd09776b79eaeb9d8f947b2f3e01c40bf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -137,9 +137,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "chalk-ir"
|
||||
version = "0.19.0"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a5341fbc654ca886b73b804a36aebf0e621057ccc1a68e9815b5b39b3ac9ae8"
|
||||
checksum = "44361a25dbdb1dc428f56ad7a3c21ba9ca12f3225c26a47919ff6fcb10a583d4"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"lazy_static",
|
||||
|
@ -147,9 +147,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "chalk-recursive"
|
||||
version = "0.19.0"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4484807b155b5a411e6135d330295f9ba5042e2920b8712c6574ca6ea91e9ee5"
|
||||
checksum = "dd89556b98de156d5eaf21077d297cd2198628f10f2df140798ea3a5dd84bc86"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-ir",
|
||||
|
@ -160,9 +160,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "chalk-solve"
|
||||
version = "0.19.0"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "281f82facd2538997fbe52132b1941ed213d266748215c31d15f62a8664429ad"
|
||||
checksum = "a886da37a0dc457057d86f78f026f7a09c6d8088aa13f4f4127fdb8dc80119a3"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-ir",
|
||||
|
@ -418,9 +418,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.8.1"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34f595585f103464d8d2f6e9864682d74c1601fed5e07d62b1c9058dba8246fb"
|
||||
checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
@ -465,9 +465,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "1.5.0"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b88cd59ee5f71fea89a62248fc8f387d44400cefe05ef548466d61ced9029a7"
|
||||
checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"hashbrown",
|
||||
|
@ -852,9 +852,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "perf-event-open-sys"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95db63e37862bc1b842135d2234ef9418f222cc660c6752f45e7cf9ddfb97f96"
|
||||
checksum = "83e7183862f36d10263d0a1ccaef50fef734ade948bf026afd1bd97355c78273"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
@ -871,9 +871,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pico-args"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b1eee8b1f4966c8343d7ca0f5a8452cd35d5610a2e0efbe2a68cae44bef2046"
|
||||
checksum = "28b9b4df73455c861d7cbf8be42f01d3b373ed7f02e378d55fa84eafc6f638b1"
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
|
@ -1694,9 +1694,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-tree"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37ee7f0f53ed2093971a698db799ef56a2dfd89b32e3aeb5165f0e637a02be04"
|
||||
checksum = "e1a3dc4774db3a6b2d66a4f8d8de670e874ec3ed55615860c994927419b32c5f"
|
||||
dependencies = [
|
||||
"ansi_term",
|
||||
"atty",
|
||||
|
|
|
@ -74,7 +74,7 @@ impl fmt::Display for Position {
|
|||
impl Expect {
|
||||
pub fn assert_eq(&self, actual: &str) {
|
||||
let trimmed = self.trimmed();
|
||||
if &trimmed == actual {
|
||||
if trimmed == actual {
|
||||
return;
|
||||
}
|
||||
Runtime::fail_expect(self, &trimmed, actual);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! cargo_check provides the functionality needed to run `cargo check` or
|
||||
//! Flycheck provides the functionality needed to run `cargo check` or
|
||||
//! another compatible command (f.x. clippy) in a background thread and provide
|
||||
//! LSP diagnostics based on the output of the command.
|
||||
|
||||
|
@ -147,6 +147,12 @@ impl FlycheckActor {
|
|||
// avoid busy-waiting.
|
||||
let cargo_handle = self.cargo_handle.take().unwrap();
|
||||
let res = cargo_handle.join();
|
||||
if res.is_err() {
|
||||
log::error!(
|
||||
"Flycheck failed to run the following command: {:?}",
|
||||
self.check_command()
|
||||
)
|
||||
}
|
||||
self.send(Message::Progress(Progress::DidFinish(res)));
|
||||
}
|
||||
Event::CheckEvent(Some(message)) => match message {
|
||||
|
@ -253,7 +259,7 @@ impl CargoHandle {
|
|||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Cargo watcher failed,the command produced no valid metadata (exit code: {:?})",
|
||||
"Cargo watcher failed, the command produced no valid metadata (exit code: {:?})",
|
||||
exit_status
|
||||
),
|
||||
));
|
||||
|
|
|
@ -51,7 +51,7 @@ impl<'a> SubstituteTypeParams<'a> {
|
|||
// this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
|
||||
.skip(1)
|
||||
// The actual list of trait type parameters may be longer than the one
|
||||
// used in the `impl` block due to trailing default type parametrs.
|
||||
// used in the `impl` block due to trailing default type parameters.
|
||||
// For that case we extend the `substs` with an empty iterator so we
|
||||
// can still hit those trailing values and check if they actually have
|
||||
// a default type. If they do, go for that type from `hir` to `ast` so
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use itertools::Itertools;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
Direction, SmolStr,
|
||||
SyntaxKind::{IDENT, WHITESPACE},
|
||||
TextRange, TextSize,
|
||||
};
|
||||
use stdx::SepBy;
|
||||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
|
@ -61,9 +61,9 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
.filter(|t| t != trait_token.text())
|
||||
.collect::<Vec<SmolStr>>();
|
||||
let has_more_derives = !new_attr_input.is_empty();
|
||||
let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string();
|
||||
|
||||
if has_more_derives {
|
||||
let new_attr_input = format!("({})", new_attr_input.iter().format(", "));
|
||||
builder.replace(input.syntax().text_range(), new_attr_input);
|
||||
} else {
|
||||
let attr_range = attr.syntax().text_range();
|
||||
|
|
|
@ -41,7 +41,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
let name_ref = ast::NameRef::cast(ident.parent())?;
|
||||
let def = match classify_name_ref(&ctx.sema, &name_ref)? {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { .. } => return None,
|
||||
NameRefClass::ExternCrate(_) | NameRefClass::FieldShorthand { .. } => return None,
|
||||
};
|
||||
let fun = match def {
|
||||
Definition::ModuleDef(hir::ModuleDef::Function(it)) => it,
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKin
|
|||
|
||||
// Assist: apply_demorgan
|
||||
//
|
||||
// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
|
||||
// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law].
|
||||
// This transforms expressions of the form `!l || !r` into `!(l && r)`.
|
||||
// This also works with `&&`. This assist can only be applied with the cursor
|
||||
// on either `||` or `&&`, with both operands being a negation of some kind.
|
||||
|
|
|
@ -121,7 +121,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
Some(cap) => match current_visibility {
|
||||
Some(current_visibility) => builder.replace_snippet(
|
||||
cap,
|
||||
dbg!(current_visibility.syntax()).text_range(),
|
||||
current_visibility.syntax().text_range(),
|
||||
format!("$0{}", missing_visibility),
|
||||
),
|
||||
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use itertools::Itertools;
|
||||
use ra_syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner};
|
||||
use stdx::{format_to, SepBy};
|
||||
use stdx::format_to;
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
|
@ -50,7 +51,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()
|
|||
.filter_map(|it| it.name())
|
||||
.map(|it| it.text().clone());
|
||||
|
||||
let generic_params = lifetime_params.chain(type_params).sep_by(", ");
|
||||
let generic_params = lifetime_params.chain(type_params).format(", ");
|
||||
format_to!(buf, "<{}>", generic_params)
|
||||
}
|
||||
match ctx.config.snippet_cap {
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
use hir::Adt;
|
||||
use itertools::Itertools;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
|
||||
T,
|
||||
};
|
||||
use stdx::{format_to, SepBy};
|
||||
use stdx::format_to;
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
|
@ -52,8 +53,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
let params = field_list
|
||||
.fields()
|
||||
.filter_map(|f| Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax())))
|
||||
.sep_by(", ");
|
||||
let fields = field_list.fields().filter_map(|f| f.name()).sep_by(", ");
|
||||
.format(", ");
|
||||
let fields = field_list.fields().filter_map(|f| f.name()).format(", ");
|
||||
|
||||
format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields);
|
||||
|
||||
|
@ -102,7 +103,7 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
|
|||
.map(|it| it.text().clone());
|
||||
let type_params =
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
|
||||
format_to!(buf, "<{}>", lifetime_params.chain(type_params).sep_by(", "))
|
||||
format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
|
||||
}
|
||||
|
||||
format_to!(buf, " {{\n{}\n}}\n", code);
|
||||
|
|
|
@ -66,13 +66,13 @@ pub struct GroupLabel(pub String);
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Assist {
|
||||
pub id: AssistId,
|
||||
id: AssistId,
|
||||
/// Short description of the assist, as shown in the UI.
|
||||
pub label: String,
|
||||
pub group: Option<GroupLabel>,
|
||||
label: String,
|
||||
group: Option<GroupLabel>,
|
||||
/// Target ranges are used to sort assists: the smaller the target range,
|
||||
/// the more specific assist is, and so it should be sorted first.
|
||||
pub target: TextRange,
|
||||
target: TextRange,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -120,10 +120,25 @@ impl Assist {
|
|||
group: Option<GroupLabel>,
|
||||
target: TextRange,
|
||||
) -> Assist {
|
||||
// FIXME: make fields private, so that this invariant can't be broken
|
||||
assert!(label.starts_with(|c: char| c.is_uppercase()));
|
||||
Assist { id, label, group, target }
|
||||
}
|
||||
|
||||
pub fn id(&self) -> AssistId {
|
||||
self.id
|
||||
}
|
||||
|
||||
pub fn label(&self) -> String {
|
||||
self.label.clone()
|
||||
}
|
||||
|
||||
pub fn group(&self) -> Option<GroupLabel> {
|
||||
self.group.clone()
|
||||
}
|
||||
|
||||
pub fn target(&self) -> TextRange {
|
||||
self.target
|
||||
}
|
||||
}
|
||||
|
||||
mod handlers {
|
||||
|
|
|
@ -20,7 +20,7 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_
|
|||
|
||||
// FIXME: instead of having a separate function here, maybe use
|
||||
// `extract_ranges` and mark the target as `<target> </target>` in the
|
||||
// fixuture?
|
||||
// fixture?
|
||||
pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
|
||||
check(assist, ra_fixture, ExpectedResult::Target(target));
|
||||
}
|
||||
|
|
|
@ -257,7 +257,7 @@ pub use prelude::*;
|
|||
.find(|dep| &dep.name.to_string() == std_crate)?
|
||||
.krate;
|
||||
|
||||
let mut module = std_crate.root_module(db)?;
|
||||
let mut module = std_crate.root_module(db);
|
||||
for segment in path {
|
||||
module = module.children(db).find_map(|child| {
|
||||
let name = child.name(db)?;
|
||||
|
|
|
@ -4,6 +4,7 @@ use std::{iter, sync::Arc};
|
|||
use arrayvec::ArrayVec;
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
adt::ReprKind,
|
||||
adt::StructKind,
|
||||
adt::VariantData,
|
||||
builtin_type::BuiltinType,
|
||||
|
@ -83,9 +84,9 @@ impl Crate {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn root_module(self, db: &dyn HirDatabase) -> Option<Module> {
|
||||
pub fn root_module(self, db: &dyn HirDatabase) -> Module {
|
||||
let module_id = db.crate_def_map(self.id).root;
|
||||
Some(Module::new(self, module_id))
|
||||
Module::new(self, module_id)
|
||||
}
|
||||
|
||||
pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
|
||||
|
@ -431,6 +432,10 @@ impl Struct {
|
|||
Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id)
|
||||
}
|
||||
|
||||
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
|
||||
db.struct_data(self.id).repr.clone()
|
||||
}
|
||||
|
||||
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
|
||||
db.struct_data(self.id).variant_data.clone()
|
||||
}
|
||||
|
@ -1253,6 +1258,19 @@ impl Type {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
|
||||
let adt_id = match self.ty.value {
|
||||
Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_id), .. }) => adt_id,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
let adt = adt_id.into();
|
||||
match adt {
|
||||
Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_raw_ptr(&self) -> bool {
|
||||
matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }))
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
pub use hir_def::diagnostics::UnresolvedModule;
|
||||
pub use hir_expand::diagnostics::{
|
||||
AstDiagnostic, Diagnostic, DiagnosticSink, DiagnosticSinkBuilder,
|
||||
};
|
||||
pub use hir_expand::diagnostics::{Diagnostic, DiagnosticSink, DiagnosticSinkBuilder};
|
||||
pub use hir_ty::diagnostics::{
|
||||
MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField,
|
||||
};
|
||||
|
|
|
@ -49,7 +49,7 @@ pub use hir_def::{
|
|||
docs::Documentation,
|
||||
nameres::ModuleSource,
|
||||
path::{ModPath, Path, PathKind},
|
||||
type_ref::Mutability,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
};
|
||||
pub use hir_expand::{
|
||||
hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc,
|
||||
|
|
|
@ -8,7 +8,7 @@ use hir_def::{
|
|||
resolver::{self, HasResolver, Resolver},
|
||||
AsMacroCall, FunctionId, TraitId, VariantId,
|
||||
};
|
||||
use hir_expand::{diagnostics::AstDiagnostic, hygiene::Hygiene, ExpansionInfo};
|
||||
use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
|
||||
use hir_ty::associated_type_shorthand_candidates;
|
||||
use itertools::Itertools;
|
||||
use ra_db::{FileId, FileRange};
|
||||
|
@ -24,8 +24,9 @@ use crate::{
|
|||
diagnostics::Diagnostic,
|
||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||
source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer},
|
||||
AssocItem, Callable, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module,
|
||||
ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
||||
AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
|
||||
Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef,
|
||||
VariantDef,
|
||||
};
|
||||
use resolver::TypeNs;
|
||||
|
||||
|
@ -109,13 +110,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.parse(file_id)
|
||||
}
|
||||
|
||||
pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
|
||||
let file_id = d.source().file_id;
|
||||
let root = self.db.parse_or_expand(file_id).unwrap();
|
||||
self.imp.cache(root, file_id);
|
||||
d.ast(self.db.upcast())
|
||||
}
|
||||
|
||||
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||
self.imp.expand(macro_call)
|
||||
}
|
||||
|
@ -145,8 +139,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.original_range(node)
|
||||
}
|
||||
|
||||
pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
self.imp.diagnostics_range(diagnostics)
|
||||
pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
self.imp.diagnostics_display_range(diagnostics)
|
||||
}
|
||||
|
||||
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
|
@ -228,6 +222,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.resolve_path(path)
|
||||
}
|
||||
|
||||
pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
|
||||
self.imp.resolve_extern_crate(extern_crate)
|
||||
}
|
||||
|
||||
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
|
||||
self.imp.resolve_variant(record_lit).map(VariantDef::from)
|
||||
}
|
||||
|
@ -275,6 +273,18 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
pub fn assert_contains_node(&self, node: &SyntaxNode) {
|
||||
self.imp.assert_contains_node(node)
|
||||
}
|
||||
|
||||
pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool {
|
||||
self.imp.is_unsafe_method_call(method_call_expr)
|
||||
}
|
||||
|
||||
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
|
||||
self.imp.is_unsafe_ref_expr(ref_expr)
|
||||
}
|
||||
|
||||
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
|
||||
self.imp.is_unsafe_ident_pat(ident_pat)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> SemanticsImpl<'db> {
|
||||
|
@ -372,10 +382,11 @@ impl<'db> SemanticsImpl<'db> {
|
|||
original_range(self.db, node.as_ref())
|
||||
}
|
||||
|
||||
fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
let src = diagnostics.source();
|
||||
fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||
let src = diagnostics.display_source();
|
||||
let root = self.db.parse_or_expand(src.file_id).unwrap();
|
||||
let node = src.value.to_node(&root);
|
||||
self.cache(root, src.file_id);
|
||||
original_range(self.db, src.with_value(&node))
|
||||
}
|
||||
|
||||
|
@ -443,6 +454,17 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.analyze(path.syntax()).resolve_path(self.db, path)
|
||||
}
|
||||
|
||||
fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
|
||||
let krate = self.scope(extern_crate.syntax()).krate()?;
|
||||
krate.dependencies(self.db).into_iter().find_map(|dep| {
|
||||
if dep.name == extern_crate.name_ref()?.as_name() {
|
||||
Some(dep.krate)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
||||
self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
|
||||
}
|
||||
|
@ -559,6 +581,90 @@ impl<'db> SemanticsImpl<'db> {
|
|||
});
|
||||
InFile::new(file_id, node)
|
||||
}
|
||||
|
||||
pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool {
|
||||
method_call_expr
|
||||
.expr()
|
||||
.and_then(|expr| {
|
||||
let field_expr = if let ast::Expr::FieldExpr(field_expr) = expr {
|
||||
field_expr
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let ty = self.type_of_expr(&field_expr.expr()?)?;
|
||||
if !ty.is_packed(self.db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
|
||||
let is_unsafe = func.has_self_param(self.db)
|
||||
&& matches!(func.params(self.db).first(), Some(TypeRef::Reference(..)));
|
||||
Some(is_unsafe)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
|
||||
ref_expr
|
||||
.expr()
|
||||
.and_then(|expr| {
|
||||
let field_expr = match expr {
|
||||
ast::Expr::FieldExpr(field_expr) => field_expr,
|
||||
_ => return None,
|
||||
};
|
||||
let expr = field_expr.expr()?;
|
||||
self.type_of_expr(&expr)
|
||||
})
|
||||
// Binding a reference to a packed type is possibly unsafe.
|
||||
.map(|ty| ty.is_packed(self.db))
|
||||
.unwrap_or(false)
|
||||
|
||||
// FIXME This needs layout computation to be correct. It will highlight
|
||||
// more than it should with the current implementation.
|
||||
}
|
||||
|
||||
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
|
||||
if !ident_pat.ref_token().is_some() {
|
||||
return false;
|
||||
}
|
||||
|
||||
ident_pat
|
||||
.syntax()
|
||||
.parent()
|
||||
.and_then(|parent| {
|
||||
// `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
|
||||
// `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
|
||||
// so this tries to lookup the `IdentPat` anywhere along that structure to the
|
||||
// `RecordPat` so we can get the containing type.
|
||||
let record_pat = ast::RecordPatField::cast(parent.clone())
|
||||
.and_then(|record_pat| record_pat.syntax().parent())
|
||||
.or_else(|| Some(parent.clone()))
|
||||
.and_then(|parent| {
|
||||
ast::RecordPatFieldList::cast(parent)?
|
||||
.syntax()
|
||||
.parent()
|
||||
.and_then(ast::RecordPat::cast)
|
||||
});
|
||||
|
||||
// If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
|
||||
// this is initialized from a `FieldExpr`.
|
||||
if let Some(record_pat) = record_pat {
|
||||
self.type_of_pat(&ast::Pat::RecordPat(record_pat))
|
||||
} else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
|
||||
let field_expr = match let_stmt.initializer()? {
|
||||
ast::Expr::FieldExpr(field_expr) => field_expr,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
self.type_of_expr(&field_expr.expr()?)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
// Binding a reference to a packed type is possibly unsafe.
|
||||
.map(|ty| ty.is_packed(self.db))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ToDef: AstNode + Clone {
|
||||
|
@ -612,6 +718,10 @@ impl<'a> SemanticsScope<'a> {
|
|||
Some(Module { id: self.resolver.module()? })
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> Option<Crate> {
|
||||
Some(Crate { id: self.resolver.krate()? })
|
||||
}
|
||||
|
||||
/// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
|
||||
// FIXME: rename to visible_traits to not repeat scope?
|
||||
pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
|
||||
|
|
|
@ -265,8 +265,7 @@ impl SourceAnalyzer {
|
|||
}
|
||||
|
||||
// This must be a normal source file rather than macro file.
|
||||
let hir_path =
|
||||
crate::Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
|
||||
let hir_path = Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
|
||||
|
||||
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we
|
||||
// trying to resolve foo::bar.
|
||||
|
@ -451,7 +450,7 @@ fn adjust(
|
|||
pub(crate) fn resolve_hir_path(
|
||||
db: &dyn HirDatabase,
|
||||
resolver: &Resolver,
|
||||
path: &crate::Path,
|
||||
path: &Path,
|
||||
) -> Option<PathResolution> {
|
||||
let types =
|
||||
resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty {
|
||||
|
@ -512,7 +511,7 @@ pub(crate) fn resolve_hir_path(
|
|||
pub(crate) fn resolve_hir_path_qualifier(
|
||||
db: &dyn HirDatabase,
|
||||
resolver: &Resolver,
|
||||
path: &crate::Path,
|
||||
path: &Path,
|
||||
) -> Option<PathResolution> {
|
||||
let items = resolver
|
||||
.resolve_module_path_in_items(db.upcast(), path.mod_path())
|
||||
|
|
|
@ -9,11 +9,12 @@ use hir_expand::{
|
|||
};
|
||||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_syntax::ast::{self, NameOwner, VisibilityOwner};
|
||||
use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
|
||||
|
||||
use crate::{
|
||||
body::{CfgExpander, LowerCtx},
|
||||
db::DefDatabase,
|
||||
item_tree::{Field, Fields, ItemTree},
|
||||
item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem},
|
||||
src::HasChildSource,
|
||||
src::HasSource,
|
||||
trace::Trace,
|
||||
|
@ -29,6 +30,7 @@ use ra_cfg::CfgOptions;
|
|||
pub struct StructData {
|
||||
pub name: Name,
|
||||
pub variant_data: Arc<VariantData>,
|
||||
pub repr: Option<ReprKind>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -58,26 +60,58 @@ pub struct FieldData {
|
|||
pub visibility: RawVisibility,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ReprKind {
|
||||
Packed,
|
||||
Other,
|
||||
}
|
||||
|
||||
fn repr_from_value(item_tree: &ItemTree, of: AttrOwner) -> Option<ReprKind> {
|
||||
item_tree.attrs(of).by_key("repr").tt_values().find_map(parse_repr_tt)
|
||||
}
|
||||
|
||||
fn parse_repr_tt(tt: &Subtree) -> Option<ReprKind> {
|
||||
match tt.delimiter {
|
||||
Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
|
||||
_ => return None,
|
||||
}
|
||||
|
||||
let mut it = tt.token_trees.iter();
|
||||
match it.next()? {
|
||||
TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed),
|
||||
_ => Some(ReprKind::Other),
|
||||
}
|
||||
}
|
||||
|
||||
impl StructData {
|
||||
pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
|
||||
let loc = id.lookup(db);
|
||||
let item_tree = db.item_tree(loc.id.file_id);
|
||||
let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into());
|
||||
let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone();
|
||||
|
||||
let strukt = &item_tree[loc.id.value];
|
||||
let variant_data = lower_fields(&item_tree, &cfg_options, &strukt.fields);
|
||||
|
||||
Arc::new(StructData { name: strukt.name.clone(), variant_data: Arc::new(variant_data) })
|
||||
Arc::new(StructData {
|
||||
name: strukt.name.clone(),
|
||||
variant_data: Arc::new(variant_data),
|
||||
repr,
|
||||
})
|
||||
}
|
||||
pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
|
||||
let loc = id.lookup(db);
|
||||
let item_tree = db.item_tree(loc.id.file_id);
|
||||
let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into());
|
||||
let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone();
|
||||
|
||||
let union = &item_tree[loc.id.value];
|
||||
let variant_data = lower_fields(&item_tree, &cfg_options, &union.fields);
|
||||
|
||||
Arc::new(StructData { name: union.name.clone(), variant_data: Arc::new(variant_data) })
|
||||
Arc::new(StructData {
|
||||
name: union.name.clone(),
|
||||
variant_data: Arc::new(variant_data),
|
||||
repr,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ impl Diagnostic for UnresolvedModule {
|
|||
fn message(&self) -> String {
|
||||
"unresolved module".to_string()
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile::new(self.file, self.decl.clone().into())
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
|
|
@ -16,36 +16,21 @@
|
|||
|
||||
use std::{any::Any, fmt};
|
||||
|
||||
use ra_syntax::{SyntaxNode, SyntaxNodePtr};
|
||||
use ra_syntax::SyntaxNodePtr;
|
||||
|
||||
use crate::{db::AstDatabase, InFile};
|
||||
use crate::InFile;
|
||||
|
||||
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
|
||||
fn name(&self) -> &'static str;
|
||||
fn message(&self) -> String;
|
||||
fn source(&self) -> InFile<SyntaxNodePtr>;
|
||||
/// Used in highlighting and related purposes
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr>;
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static);
|
||||
fn is_experimental(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AstDiagnostic {
|
||||
type AST;
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST;
|
||||
}
|
||||
|
||||
impl dyn Diagnostic {
|
||||
pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode {
|
||||
let node = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
self.source().value.to_node(&node)
|
||||
}
|
||||
|
||||
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
|
||||
self.as_any().downcast_ref()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DiagnosticSink<'a> {
|
||||
callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
|
||||
filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
|
||||
|
@ -90,7 +75,7 @@ impl<'a> DiagnosticSinkBuilder<'a> {
|
|||
}
|
||||
|
||||
pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> Self {
|
||||
let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() {
|
||||
let cb = move |diag: &dyn Diagnostic| match diag.as_any().downcast_ref::<D>() {
|
||||
Some(d) => {
|
||||
cb(d);
|
||||
Ok(())
|
||||
|
|
|
@ -17,7 +17,7 @@ pub struct Hygiene {
|
|||
// This is what `$crate` expands to
|
||||
def_crate: Option<CrateId>,
|
||||
|
||||
// Indiciate this is a local inner macro
|
||||
// Indicate this is a local inner macro
|
||||
local_inner: bool,
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,8 @@ mod test_db;
|
|||
/// containing the call plus the offset of the macro call in the file. Note that
|
||||
/// this is a recursive definition! However, the size_of of `HirFileId` is
|
||||
/// finite (because everything bottoms out at the real `FileId`) and small
|
||||
/// (`MacroCallId` uses the location interner).
|
||||
/// (`MacroCallId` uses the location interning. You can check details here:
|
||||
/// https://en.wikipedia.org/wiki/String_interning).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct HirFileId(HirFileIdRepr);
|
||||
|
||||
|
|
|
@ -28,9 +28,9 @@ test_utils = { path = "../test_utils" }
|
|||
|
||||
scoped-tls = "1"
|
||||
|
||||
chalk-solve = { version = "0.19.0" }
|
||||
chalk-ir = { version = "0.19.0" }
|
||||
chalk-recursive = { version = "0.19.0" }
|
||||
chalk-solve = { version = "0.21.0" }
|
||||
chalk-ir = { version = "0.21.0" }
|
||||
chalk-recursive = { version = "0.21.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
expect = { path = "../expect" }
|
||||
|
|
|
@ -6,10 +6,10 @@ mod unsafe_check;
|
|||
use std::any::Any;
|
||||
|
||||
use hir_def::DefWithBodyId;
|
||||
use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
|
||||
use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile};
|
||||
use hir_expand::diagnostics::{Diagnostic, DiagnosticSink};
|
||||
use hir_expand::{name::Name, HirFileId, InFile};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
|
||||
use ra_syntax::{ast, AstPtr, SyntaxNodePtr};
|
||||
use stdx::format_to;
|
||||
|
||||
use crate::db::HirDatabase;
|
||||
|
@ -41,7 +41,7 @@ impl Diagnostic for NoSuchField {
|
|||
"no such field".to_string()
|
||||
}
|
||||
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile::new(self.file, self.field.clone().into())
|
||||
}
|
||||
|
||||
|
@ -50,20 +50,11 @@ impl Diagnostic for NoSuchField {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for NoSuchField {
|
||||
type AST = ast::RecordExprField;
|
||||
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordExprField::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingFields {
|
||||
pub file: HirFileId,
|
||||
pub field_list: AstPtr<ast::RecordExprFieldList>,
|
||||
pub field_list_parent: AstPtr<ast::RecordExpr>,
|
||||
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
|
||||
pub missed_fields: Vec<Name>,
|
||||
}
|
||||
|
||||
|
@ -78,28 +69,28 @@ impl Diagnostic for MissingFields {
|
|||
}
|
||||
buf
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.field_list.clone().into() }
|
||||
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile {
|
||||
file_id: self.file,
|
||||
value: self
|
||||
.field_list_parent_path
|
||||
.clone()
|
||||
.map(SyntaxNodePtr::from)
|
||||
.unwrap_or_else(|| self.field_list_parent.clone().into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingFields {
|
||||
type AST = ast::RecordExprFieldList;
|
||||
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordExprFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingPatFields {
|
||||
pub file: HirFileId,
|
||||
pub field_list: AstPtr<ast::RecordPatFieldList>,
|
||||
pub field_list_parent: AstPtr<ast::RecordPat>,
|
||||
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
|
||||
pub missed_fields: Vec<Name>,
|
||||
}
|
||||
|
||||
|
@ -114,8 +105,15 @@ impl Diagnostic for MissingPatFields {
|
|||
}
|
||||
buf
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.field_list.clone().into() }
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile {
|
||||
file_id: self.file,
|
||||
value: self
|
||||
.field_list_parent_path
|
||||
.clone()
|
||||
.map(SyntaxNodePtr::from)
|
||||
.unwrap_or_else(|| self.field_list_parent.clone().into()),
|
||||
}
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -136,7 +134,7 @@ impl Diagnostic for MissingMatchArms {
|
|||
fn message(&self) -> String {
|
||||
String::from("Missing match arm")
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.match_expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -157,7 +155,7 @@ impl Diagnostic for MissingOkInTailExpr {
|
|||
fn message(&self) -> String {
|
||||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -165,16 +163,6 @@ impl Diagnostic for MissingOkInTailExpr {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingOkInTailExpr {
|
||||
type AST = ast::Expr;
|
||||
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BreakOutsideOfLoop {
|
||||
pub file: HirFileId,
|
||||
|
@ -188,7 +176,7 @@ impl Diagnostic for BreakOutsideOfLoop {
|
|||
fn message(&self) -> String {
|
||||
"break outside of loop".to_string()
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -196,16 +184,6 @@ impl Diagnostic for BreakOutsideOfLoop {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for BreakOutsideOfLoop {
|
||||
type AST = ast::Expr;
|
||||
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingUnsafe {
|
||||
pub file: HirFileId,
|
||||
|
@ -219,7 +197,7 @@ impl Diagnostic for MissingUnsafe {
|
|||
fn message(&self) -> String {
|
||||
format!("This operation is unsafe and requires an unsafe function or block")
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -227,16 +205,6 @@ impl Diagnostic for MissingUnsafe {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingUnsafe {
|
||||
type AST = ast::Expr;
|
||||
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MismatchedArgCount {
|
||||
pub file: HirFileId,
|
||||
|
@ -253,7 +221,7 @@ impl Diagnostic for MismatchedArgCount {
|
|||
let s = if self.expected == 1 { "" } else { "s" };
|
||||
format!("Expected {} argument{}, found {}", self.expected, s, self.found)
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
fn display_source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.call_expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -264,19 +232,13 @@ impl Diagnostic for MismatchedArgCount {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MismatchedArgCount {
|
||||
type AST = ast::CallExpr;
|
||||
fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::CallExpr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
|
||||
use hir_expand::diagnostics::{Diagnostic, DiagnosticSinkBuilder};
|
||||
use hir_expand::{
|
||||
db::AstDatabase,
|
||||
diagnostics::{Diagnostic, DiagnosticSinkBuilder},
|
||||
};
|
||||
use ra_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
|
||||
use ra_syntax::{TextRange, TextSize};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
@ -321,9 +283,11 @@ mod tests {
|
|||
|
||||
let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
|
||||
db.diagnostics(|d| {
|
||||
// FXIME: macros...
|
||||
let file_id = d.source().file_id.original_file(&db);
|
||||
let range = d.syntax_node(&db).text_range();
|
||||
let src = d.display_source();
|
||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
||||
// FIXME: macros...
|
||||
let file_id = src.file_id.original_file(&db);
|
||||
let range = src.value.to_node(&root).text_range();
|
||||
let message = d.message().to_owned();
|
||||
actual.entry(file_id).or_default().push((range, message));
|
||||
});
|
||||
|
@ -351,8 +315,8 @@ struct S { foo: i32, bar: () }
|
|||
impl S {
|
||||
fn new() -> S {
|
||||
S {
|
||||
//^... Missing structure fields:
|
||||
//| - bar
|
||||
//^ Missing structure fields:
|
||||
//| - bar
|
||||
foo: 92,
|
||||
baz: 62,
|
||||
//^^^^^^^ no such field
|
||||
|
@ -473,8 +437,8 @@ impl Foo {
|
|||
struct S { foo: i32, bar: () }
|
||||
fn baz(s: S) {
|
||||
let S { foo: _ } = s;
|
||||
//^^^^^^^^^^ Missing structure fields:
|
||||
// | - bar
|
||||
//^ Missing structure fields:
|
||||
//| - bar
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
|
@ -100,8 +100,8 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
|
||||
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Expr::RecordExpr(record_lit) = &source_ptr.value.to_node(&root) {
|
||||
if let Some(field_list) = record_lit.record_expr_field_list() {
|
||||
if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) {
|
||||
if let Some(_) = record_expr.record_expr_field_list() {
|
||||
let variant_data = variant_data(db.upcast(), variant_def);
|
||||
let missed_fields = missed_fields
|
||||
.into_iter()
|
||||
|
@ -109,7 +109,8 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
.collect();
|
||||
self.sink.push(MissingFields {
|
||||
file: source_ptr.file_id,
|
||||
field_list: AstPtr::new(&field_list),
|
||||
field_list_parent: AstPtr::new(&record_expr),
|
||||
field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)),
|
||||
missed_fields,
|
||||
})
|
||||
}
|
||||
|
@ -131,7 +132,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
if let Some(expr) = source_ptr.value.as_ref().left() {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
|
||||
if let Some(field_list) = record_pat.record_pat_field_list() {
|
||||
if let Some(_) = record_pat.record_pat_field_list() {
|
||||
let variant_data = variant_data(db.upcast(), variant_def);
|
||||
let missed_fields = missed_fields
|
||||
.into_iter()
|
||||
|
@ -139,7 +140,10 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
.collect();
|
||||
self.sink.push(MissingPatFields {
|
||||
file: source_ptr.file_id,
|
||||
field_list: AstPtr::new(&field_list),
|
||||
field_list_parent: AstPtr::new(&record_pat),
|
||||
field_list_parent_path: record_pat
|
||||
.path()
|
||||
.map(|path| AstPtr::new(&path)),
|
||||
missed_fields,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1161,15 +1161,15 @@ fn main() {
|
|||
//^ Missing match arm
|
||||
match a {
|
||||
Either::A { } => (),
|
||||
//^^^ Missing structure fields:
|
||||
// | - foo
|
||||
//^^^^^^^^^ Missing structure fields:
|
||||
// | - foo
|
||||
Either::B => (),
|
||||
}
|
||||
match a {
|
||||
//^ Missing match arm
|
||||
Either::A { } => (),
|
||||
} //^^^ Missing structure fields:
|
||||
// | - foo
|
||||
} //^^^^^^^^^ Missing structure fields:
|
||||
// | - foo
|
||||
|
||||
match a {
|
||||
Either::A { foo: true } => (),
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::sync::Arc;
|
|||
use hir_def::{
|
||||
body::Body,
|
||||
expr::{Expr, ExprId, UnaryOp},
|
||||
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
|
||||
DefWithBodyId,
|
||||
};
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
|
@ -70,7 +71,7 @@ pub fn unsafe_expressions(
|
|||
) -> Vec<UnsafeExpr> {
|
||||
let mut unsafe_exprs = vec![];
|
||||
let body = db.body(def);
|
||||
walk_unsafe(&mut unsafe_exprs, db, infer, &body, body.body_expr, false);
|
||||
walk_unsafe(&mut unsafe_exprs, db, infer, def, &body, body.body_expr, false);
|
||||
|
||||
unsafe_exprs
|
||||
}
|
||||
|
@ -79,6 +80,7 @@ fn walk_unsafe(
|
|||
unsafe_exprs: &mut Vec<UnsafeExpr>,
|
||||
db: &dyn HirDatabase,
|
||||
infer: &InferenceResult,
|
||||
def: DefWithBodyId,
|
||||
body: &Body,
|
||||
current: ExprId,
|
||||
inside_unsafe_block: bool,
|
||||
|
@ -97,6 +99,15 @@ fn walk_unsafe(
|
|||
}
|
||||
}
|
||||
}
|
||||
Expr::Path(path) => {
|
||||
let resolver = resolver_for_expr(db.upcast(), def, current);
|
||||
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
|
||||
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
|
||||
if db.static_data(id).mutable {
|
||||
unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::MethodCall { .. } => {
|
||||
if infer
|
||||
.method_resolution(current)
|
||||
|
@ -112,13 +123,13 @@ fn walk_unsafe(
|
|||
}
|
||||
}
|
||||
Expr::Unsafe { body: child } => {
|
||||
return walk_unsafe(unsafe_exprs, db, infer, body, *child, true);
|
||||
return walk_unsafe(unsafe_exprs, db, infer, def, body, *child, true);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
expr.walk_child_exprs(|child| {
|
||||
walk_unsafe(unsafe_exprs, db, infer, body, child, inside_unsafe_block);
|
||||
walk_unsafe(unsafe_exprs, db, infer, def, body, child, inside_unsafe_block);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -167,6 +178,27 @@ fn main() {
|
|||
HasUnsafe.unsafe_fn();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_unsafe_diagnostic_with_static_mut() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
struct Ty {
|
||||
a: u8,
|
||||
}
|
||||
|
||||
static mut static_mut: Ty = Ty { a: 0 };
|
||||
|
||||
fn main() {
|
||||
let x = static_mut.a;
|
||||
//^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
|
||||
unsafe {
|
||||
let x = static_mut.a;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -440,6 +440,12 @@ impl<'a> InferenceContext<'a> {
|
|||
let ty = self.insert_type_vars(ty.subst(&substs));
|
||||
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
|
||||
}
|
||||
TypeNs::AdtId(AdtId::UnionId(u)) => {
|
||||
let substs = Ty::substs_from_path(&ctx, path, u.into(), true);
|
||||
let ty = self.db.ty(u.into());
|
||||
let ty = self.insert_type_vars(ty.subst(&substs));
|
||||
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
|
||||
}
|
||||
TypeNs::EnumVariantId(var) => {
|
||||
let substs = Ty::substs_from_path(&ctx, path, var.into(), true);
|
||||
let ty = self.db.ty(var.parent.into());
|
||||
|
@ -490,10 +496,7 @@ impl<'a> InferenceContext<'a> {
|
|||
// FIXME potentially resolve assoc type
|
||||
(Ty::Unknown, None)
|
||||
}
|
||||
TypeNs::AdtId(AdtId::EnumId(_))
|
||||
| TypeNs::AdtId(AdtId::UnionId(_))
|
||||
| TypeNs::BuiltinType(_)
|
||||
| TypeNs::TraitId(_) => {
|
||||
TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
|
||||
// FIXME diagnostic
|
||||
(Ty::Unknown, None)
|
||||
}
|
||||
|
|
|
@ -518,6 +518,7 @@ impl Ty {
|
|||
let (segment, generic_def) = match resolved {
|
||||
ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::StructId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::UnionId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::ConstId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::StaticId(_) => (last, None),
|
||||
ValueTyDefId::EnumVariantId(var) => {
|
||||
|
@ -1148,11 +1149,12 @@ impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefI
|
|||
pub enum ValueTyDefId {
|
||||
FunctionId(FunctionId),
|
||||
StructId(StructId),
|
||||
UnionId(UnionId),
|
||||
EnumVariantId(EnumVariantId),
|
||||
ConstId(ConstId),
|
||||
StaticId(StaticId),
|
||||
}
|
||||
impl_from!(FunctionId, StructId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
|
||||
impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
|
||||
|
||||
/// Build the declared type of an item. This depends on the namespace; e.g. for
|
||||
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
|
||||
|
@ -1179,6 +1181,7 @@ pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders
|
|||
match def {
|
||||
ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
|
||||
ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
|
||||
ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
|
||||
ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
|
||||
ValueTyDefId::ConstId(it) => type_for_const(db, it),
|
||||
ValueTyDefId::StaticId(it) => type_for_static(db, it),
|
||||
|
|
|
@ -334,16 +334,44 @@ fn infer_union() {
|
|||
bar: f32,
|
||||
}
|
||||
|
||||
fn test() {
|
||||
let u = MyUnion { foo: 0 };
|
||||
unsafe { baz(u); }
|
||||
let u = MyUnion { bar: 0.0 };
|
||||
unsafe { baz(u); }
|
||||
}
|
||||
|
||||
unsafe fn baz(u: MyUnion) {
|
||||
let inner = u.foo;
|
||||
let inner = u.bar;
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
61..62 'u': MyUnion
|
||||
73..99 '{ ...foo; }': ()
|
||||
83..88 'inner': u32
|
||||
91..92 'u': MyUnion
|
||||
91..96 'u.foo': u32
|
||||
57..172 '{ ...); } }': ()
|
||||
67..68 'u': MyUnion
|
||||
71..89 'MyUnio...o: 0 }': MyUnion
|
||||
86..87 '0': u32
|
||||
95..113 'unsafe...(u); }': ()
|
||||
102..113 '{ baz(u); }': ()
|
||||
104..107 'baz': fn baz(MyUnion)
|
||||
104..110 'baz(u)': ()
|
||||
108..109 'u': MyUnion
|
||||
122..123 'u': MyUnion
|
||||
126..146 'MyUnio... 0.0 }': MyUnion
|
||||
141..144 '0.0': f32
|
||||
152..170 'unsafe...(u); }': ()
|
||||
159..170 '{ baz(u); }': ()
|
||||
161..164 'baz': fn baz(MyUnion)
|
||||
161..167 'baz(u)': ()
|
||||
165..166 'u': MyUnion
|
||||
188..189 'u': MyUnion
|
||||
200..249 '{ ...bar; }': ()
|
||||
210..215 'inner': u32
|
||||
218..219 'u': MyUnion
|
||||
218..223 'u.foo': u32
|
||||
233..238 'inner': f32
|
||||
241..242 'u': MyUnion
|
||||
241..246 'u.bar': f32
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionConte
|
|||
snippet(
|
||||
ctx,
|
||||
cap,
|
||||
"Test module",
|
||||
"tmod (Test module)",
|
||||
"\
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
@ -54,7 +54,7 @@ mod tests {
|
|||
snippet(
|
||||
ctx,
|
||||
cap,
|
||||
"Test function",
|
||||
"tfn (Test function)",
|
||||
"\
|
||||
#[test]
|
||||
fn ${1:feature}() {
|
||||
|
@ -106,10 +106,10 @@ mod tests {
|
|||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
sn Test function
|
||||
sn Test module
|
||||
sn macro_rules
|
||||
sn pub(crate)
|
||||
sn tfn (Test function)
|
||||
sn tmod (Test module)
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ pub(crate) struct CompletionContext<'a> {
|
|||
pub(super) scope: SemanticsScope<'a>,
|
||||
pub(super) db: &'a RootDatabase,
|
||||
pub(super) config: &'a CompletionConfig,
|
||||
pub(super) offset: TextSize,
|
||||
pub(super) position: FilePosition,
|
||||
/// The token before the cursor, in the original file.
|
||||
pub(super) original_token: SyntaxToken,
|
||||
/// The token before the cursor, in the macro-expanded file.
|
||||
|
@ -117,7 +117,7 @@ impl<'a> CompletionContext<'a> {
|
|||
config,
|
||||
original_token,
|
||||
token,
|
||||
offset: position.offset,
|
||||
position,
|
||||
krate,
|
||||
expected_type: None,
|
||||
name_ref_syntax: None,
|
||||
|
@ -209,7 +209,7 @@ impl<'a> CompletionContext<'a> {
|
|||
mark::hit!(completes_if_prefix_is_keyword);
|
||||
self.original_token.text_range()
|
||||
} else {
|
||||
TextRange::empty(self.offset)
|
||||
TextRange::empty(self.position.offset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -379,8 +379,8 @@ impl<'a> CompletionContext<'a> {
|
|||
self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some();
|
||||
self.has_type_args = segment.generic_arg_list().is_some();
|
||||
|
||||
#[allow(deprecated)]
|
||||
if let Some(path) = hir::Path::from_ast(path.clone()) {
|
||||
let hygiene = hir::Hygiene::new(self.db, self.position.file_id.into());
|
||||
if let Some(path) = hir::Path::from_src(path.clone(), &hygiene) {
|
||||
if let Some(path_prefix) = path.qualifier() {
|
||||
self.path_prefix = Some(path_prefix);
|
||||
return;
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
//! It also handles scoring (sorting) completions.
|
||||
|
||||
use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type};
|
||||
use itertools::Itertools;
|
||||
use ra_syntax::ast::NameOwner;
|
||||
use stdx::SepBy;
|
||||
use test_utils::mark;
|
||||
|
||||
use crate::{
|
||||
|
@ -289,16 +289,16 @@ impl Completions {
|
|||
.map(|field| (field.name(ctx.db), field.signature_ty(ctx.db)));
|
||||
let variant_kind = variant.kind(ctx.db);
|
||||
let detail = match variant_kind {
|
||||
StructKind::Tuple | StructKind::Unit => detail_types
|
||||
.map(|(_, t)| t.display(ctx.db).to_string())
|
||||
.sep_by(", ")
|
||||
.surround_with("(", ")")
|
||||
.to_string(),
|
||||
StructKind::Record => detail_types
|
||||
.map(|(n, t)| format!("{}: {}", n, t.display(ctx.db).to_string()))
|
||||
.sep_by(", ")
|
||||
.surround_with("{ ", " }")
|
||||
.to_string(),
|
||||
StructKind::Tuple | StructKind::Unit => format!(
|
||||
"({})",
|
||||
detail_types.map(|(_, t)| t.display(ctx.db).to_string()).format(", ")
|
||||
),
|
||||
StructKind::Record => format!(
|
||||
"{{ {} }}",
|
||||
detail_types
|
||||
.map(|(n, t)| format!("{}: {}", n, t.display(ctx.db).to_string()))
|
||||
.format(", ")
|
||||
),
|
||||
};
|
||||
let mut res = CompletionItem::new(
|
||||
CompletionKind::Reference,
|
||||
|
@ -412,11 +412,10 @@ impl Builder {
|
|||
self = self.trigger_call_info();
|
||||
let snippet = match (ctx.config.add_call_argument_snippets, params) {
|
||||
(true, Params::Named(params)) => {
|
||||
let function_params_snippet = params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, param_name)| format!("${{{}:{}}}", index + 1, param_name))
|
||||
.sep_by(", ");
|
||||
let function_params_snippet =
|
||||
params.iter().enumerate().format_with(", ", |(index, param_name), f| {
|
||||
f(&format_args!("${{{}:{}}}", index + 1, param_name))
|
||||
});
|
||||
format!("{}({})$0", name, function_params_snippet)
|
||||
}
|
||||
_ => {
|
||||
|
|
|
@ -6,22 +6,21 @@
|
|||
|
||||
use std::cell::RefCell;
|
||||
|
||||
use hir::{
|
||||
diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSinkBuilder},
|
||||
HasSource, HirDisplay, Semantics, VariantDef,
|
||||
};
|
||||
use hir::{diagnostics::DiagnosticSinkBuilder, Semantics};
|
||||
use itertools::Itertools;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, edit::IndentLevel, make, AstNode},
|
||||
ast::{self, AstNode},
|
||||
SyntaxNode, TextRange, T,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
use crate::{AnalysisConfig, Diagnostic, FileId, FileSystemEdit, Fix, SourceFileEdit};
|
||||
use crate::{AnalysisConfig, Diagnostic, FileId, Fix, SourceFileEdit};
|
||||
|
||||
mod diagnostics_with_fix;
|
||||
use diagnostics_with_fix::DiagnosticWithFix;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum Severity {
|
||||
|
@ -56,77 +55,16 @@ pub(crate) fn diagnostics(
|
|||
let res = RefCell::new(res);
|
||||
let mut sink_builder = DiagnosticSinkBuilder::new()
|
||||
.on::<hir::diagnostics::UnresolvedModule, _>(|d| {
|
||||
let original_file = d.source().file_id.original_file(db);
|
||||
let fix = Fix::new(
|
||||
"Create module",
|
||||
FileSystemEdit::CreateFile { anchor: original_file, dst: d.candidate.clone() }
|
||||
.into(),
|
||||
);
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
severity: Severity::Error,
|
||||
fix: Some(fix),
|
||||
})
|
||||
res.borrow_mut().push(diagnostic_with_fix(d, &sema));
|
||||
})
|
||||
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
||||
// Note that although we could add a diagnostics to
|
||||
// fill the missing tuple field, e.g :
|
||||
// `struct A(usize);`
|
||||
// `let a = A { 0: () }`
|
||||
// but it is uncommon usage and it should not be encouraged.
|
||||
let fix = if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
|
||||
None
|
||||
} else {
|
||||
let mut field_list = d.ast(db);
|
||||
for f in d.missed_fields.iter() {
|
||||
let field = make::record_expr_field(
|
||||
make::name_ref(&f.to_string()),
|
||||
Some(make::expr_unit()),
|
||||
);
|
||||
field_list = field_list.append_field(&field);
|
||||
}
|
||||
|
||||
let edit = {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
algo::diff(&d.ast(db).syntax(), &field_list.syntax())
|
||||
.into_text_edit(&mut builder);
|
||||
builder.finish()
|
||||
};
|
||||
Some(Fix::new("Fill struct fields", SourceFileEdit { file_id, edit }.into()))
|
||||
};
|
||||
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
severity: Severity::Error,
|
||||
fix,
|
||||
})
|
||||
res.borrow_mut().push(diagnostic_with_fix(d, &sema));
|
||||
})
|
||||
.on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| {
|
||||
let node = d.ast(db);
|
||||
let replacement = format!("Ok({})", node.syntax());
|
||||
let edit = TextEdit::replace(node.syntax().text_range(), replacement);
|
||||
let source_change = SourceFileEdit { file_id, edit }.into();
|
||||
let fix = Fix::new("Wrap with ok", source_change);
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
severity: Severity::Error,
|
||||
fix: Some(fix),
|
||||
})
|
||||
res.borrow_mut().push(diagnostic_with_fix(d, &sema));
|
||||
})
|
||||
.on::<hir::diagnostics::NoSuchField, _>(|d| {
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
severity: Severity::Error,
|
||||
fix: missing_struct_field_fix(&sema, file_id, d),
|
||||
})
|
||||
res.borrow_mut().push(diagnostic_with_fix(d, &sema));
|
||||
})
|
||||
// Only collect experimental diagnostics when they're enabled.
|
||||
.filter(|diag| !diag.is_experimental() || enable_experimental);
|
||||
|
@ -144,7 +82,7 @@ pub(crate) fn diagnostics(
|
|||
res.borrow_mut().push(Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
message: d.message(),
|
||||
range: sema.diagnostics_range(d).range,
|
||||
range: sema.diagnostics_display_range(d).range,
|
||||
severity: Severity::Error,
|
||||
fix: None,
|
||||
})
|
||||
|
@ -157,77 +95,13 @@ pub(crate) fn diagnostics(
|
|||
res.into_inner()
|
||||
}
|
||||
|
||||
fn missing_struct_field_fix(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
usage_file_id: FileId,
|
||||
d: &hir::diagnostics::NoSuchField,
|
||||
) -> Option<Fix> {
|
||||
let record_expr = sema.ast(d);
|
||||
|
||||
let record_lit = ast::RecordExpr::cast(record_expr.syntax().parent()?.parent()?)?;
|
||||
let def_id = sema.resolve_variant(record_lit)?;
|
||||
let module;
|
||||
let def_file_id;
|
||||
let record_fields = match VariantDef::from(def_id) {
|
||||
VariantDef::Struct(s) => {
|
||||
module = s.module(sema.db);
|
||||
let source = s.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
let fields = source.value.field_list()?;
|
||||
record_field_list(fields)?
|
||||
}
|
||||
VariantDef::Union(u) => {
|
||||
module = u.module(sema.db);
|
||||
let source = u.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
source.value.record_field_list()?
|
||||
}
|
||||
VariantDef::EnumVariant(e) => {
|
||||
module = e.module(sema.db);
|
||||
let source = e.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
let fields = source.value.field_list()?;
|
||||
record_field_list(fields)?
|
||||
}
|
||||
};
|
||||
let def_file_id = def_file_id.original_file(sema.db);
|
||||
|
||||
let new_field_type = sema.type_of_expr(&record_expr.expr()?)?;
|
||||
if new_field_type.is_unknown() {
|
||||
return None;
|
||||
}
|
||||
let new_field = make::record_field(
|
||||
record_expr.field_name()?,
|
||||
make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
|
||||
);
|
||||
|
||||
let last_field = record_fields.fields().last()?;
|
||||
let last_field_syntax = last_field.syntax();
|
||||
let indent = IndentLevel::from_node(last_field_syntax);
|
||||
|
||||
let mut new_field = new_field.to_string();
|
||||
if usage_file_id != def_file_id {
|
||||
new_field = format!("pub(crate) {}", new_field);
|
||||
}
|
||||
new_field = format!("\n{}{}", indent, new_field);
|
||||
|
||||
let needs_comma = !last_field_syntax.to_string().ends_with(',');
|
||||
if needs_comma {
|
||||
new_field = format!(",{}", new_field);
|
||||
}
|
||||
|
||||
let source_change = SourceFileEdit {
|
||||
file_id: def_file_id,
|
||||
edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field),
|
||||
};
|
||||
let fix = Fix::new("Create field", source_change.into());
|
||||
return Some(fix);
|
||||
|
||||
fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
|
||||
match field_def_list {
|
||||
ast::FieldList::RecordFieldList(it) => Some(it),
|
||||
ast::FieldList::TupleFieldList(_) => None,
|
||||
}
|
||||
fn diagnostic_with_fix<D: DiagnosticWithFix>(d: &D, sema: &Semantics<RootDatabase>) -> Diagnostic {
|
||||
Diagnostic {
|
||||
name: Some(d.name().into()),
|
||||
range: sema.diagnostics_display_range(d).range,
|
||||
message: d.message(),
|
||||
severity: Severity::Error,
|
||||
fix: d.fix(&sema),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,25 +112,26 @@ fn check_unnecessary_braces_in_use_statement(
|
|||
) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
|
||||
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
|
||||
let range = use_tree_list.syntax().text_range();
|
||||
let use_range = use_tree_list.syntax().text_range();
|
||||
let edit =
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
|
||||
.unwrap_or_else(|| {
|
||||
let to_replace = single_use_tree.syntax().text().to_string();
|
||||
let mut edit_builder = TextEditBuilder::default();
|
||||
edit_builder.delete(range);
|
||||
edit_builder.insert(range.start(), to_replace);
|
||||
edit_builder.delete(use_range);
|
||||
edit_builder.insert(use_range.start(), to_replace);
|
||||
edit_builder.finish()
|
||||
});
|
||||
|
||||
acc.push(Diagnostic {
|
||||
name: None,
|
||||
range,
|
||||
range: use_range,
|
||||
message: "Unnecessary braces in use statement".to_string(),
|
||||
severity: Severity::WeakWarning,
|
||||
fix: Some(Fix::new(
|
||||
"Remove unnecessary braces",
|
||||
SourceFileEdit { file_id, edit }.into(),
|
||||
use_range,
|
||||
)),
|
||||
});
|
||||
}
|
||||
|
@ -271,8 +146,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
|
|||
if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] {
|
||||
let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
|
||||
let end = use_tree_list_node.text_range().end();
|
||||
let range = TextRange::new(start, end);
|
||||
return Some(TextEdit::delete(range));
|
||||
return Some(TextEdit::delete(TextRange::new(start, end)));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
@ -295,14 +169,16 @@ fn check_struct_shorthand_initialization(
|
|||
edit_builder.insert(record_field.syntax().text_range().start(), field_name);
|
||||
let edit = edit_builder.finish();
|
||||
|
||||
let field_range = record_field.syntax().text_range();
|
||||
acc.push(Diagnostic {
|
||||
name: None,
|
||||
range: record_field.syntax().text_range(),
|
||||
range: field_range,
|
||||
message: "Shorthand struct initialization".to_string(),
|
||||
severity: Severity::WeakWarning,
|
||||
fix: Some(Fix::new(
|
||||
"Use struct shorthand initialization",
|
||||
SourceFileEdit { file_id, edit }.into(),
|
||||
field_range,
|
||||
)),
|
||||
});
|
||||
}
|
||||
|
@ -326,7 +202,7 @@ mod tests {
|
|||
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||
/// and checks that:
|
||||
/// * a diagnostic is produced
|
||||
/// * this diagnostic touches the input cursor position
|
||||
/// * this diagnostic fix trigger range touches the input cursor position
|
||||
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
|
||||
fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
let after = trim_indent(ra_fixture_after);
|
||||
|
@ -344,10 +220,10 @@ mod tests {
|
|||
|
||||
assert_eq_text!(&after, &actual);
|
||||
assert!(
|
||||
diagnostic.range.start() <= file_position.offset
|
||||
&& diagnostic.range.end() >= file_position.offset,
|
||||
"diagnostic range {:?} does not touch cursor position {:?}",
|
||||
diagnostic.range,
|
||||
fix.fix_trigger_range.start() <= file_position.offset
|
||||
&& fix.fix_trigger_range.end() >= file_position.offset,
|
||||
"diagnostic fix range {:?} does not touch cursor position {:?}",
|
||||
fix.fix_trigger_range,
|
||||
file_position.offset
|
||||
);
|
||||
}
|
||||
|
@ -712,6 +588,7 @@ fn test_fn() {
|
|||
],
|
||||
is_snippet: false,
|
||||
},
|
||||
fix_trigger_range: 0..8,
|
||||
},
|
||||
),
|
||||
},
|
||||
|
|
171
crates/ra_ide/src/diagnostics/diagnostics_with_fix.rs
Normal file
171
crates/ra_ide/src/diagnostics/diagnostics_with_fix.rs
Normal file
|
@ -0,0 +1,171 @@
|
|||
//! Provides a way to attach fixes to the diagnostics.
|
||||
//! The same module also has all curret custom fixes for the diagnostics implemented.
|
||||
use crate::Fix;
|
||||
use ast::{edit::IndentLevel, make};
|
||||
use hir::{
|
||||
db::AstDatabase,
|
||||
diagnostics::{Diagnostic, MissingFields, MissingOkInTailExpr, NoSuchField, UnresolvedModule},
|
||||
HasSource, HirDisplay, Semantics, VariantDef,
|
||||
};
|
||||
use ra_db::FileId;
|
||||
use ra_ide_db::{
|
||||
source_change::{FileSystemEdit, SourceFileEdit},
|
||||
RootDatabase,
|
||||
};
|
||||
use ra_syntax::{algo, ast, AstNode};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
/// A [Diagnostic] that potentially has a fix available.
|
||||
///
|
||||
/// [Diagnostic]: hir::diagnostics::Diagnostic
|
||||
pub trait DiagnosticWithFix: Diagnostic {
|
||||
fn fix(&self, sema: &Semantics<RootDatabase>) -> Option<Fix>;
|
||||
}
|
||||
|
||||
impl DiagnosticWithFix for UnresolvedModule {
|
||||
fn fix(&self, sema: &Semantics<RootDatabase>) -> Option<Fix> {
|
||||
let root = sema.db.parse_or_expand(self.file)?;
|
||||
let unresolved_module = self.decl.to_node(&root);
|
||||
Some(Fix::new(
|
||||
"Create module",
|
||||
FileSystemEdit::CreateFile {
|
||||
anchor: self.file.original_file(sema.db),
|
||||
dst: self.candidate.clone(),
|
||||
}
|
||||
.into(),
|
||||
unresolved_module.syntax().text_range(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticWithFix for NoSuchField {
|
||||
fn fix(&self, sema: &Semantics<RootDatabase>) -> Option<Fix> {
|
||||
let root = sema.db.parse_or_expand(self.file)?;
|
||||
missing_record_expr_field_fix(
|
||||
&sema,
|
||||
self.file.original_file(sema.db),
|
||||
&self.field.to_node(&root),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticWithFix for MissingFields {
|
||||
fn fix(&self, sema: &Semantics<RootDatabase>) -> Option<Fix> {
|
||||
// Note that although we could add a diagnostics to
|
||||
// fill the missing tuple field, e.g :
|
||||
// `struct A(usize);`
|
||||
// `let a = A { 0: () }`
|
||||
// but it is uncommon usage and it should not be encouraged.
|
||||
if self.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let root = sema.db.parse_or_expand(self.file)?;
|
||||
let old_field_list = self.field_list_parent.to_node(&root).record_expr_field_list()?;
|
||||
let mut new_field_list = old_field_list.clone();
|
||||
for f in self.missed_fields.iter() {
|
||||
let field =
|
||||
make::record_expr_field(make::name_ref(&f.to_string()), Some(make::expr_unit()));
|
||||
new_field_list = new_field_list.append_field(&field);
|
||||
}
|
||||
|
||||
let edit = {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
|
||||
.into_text_edit(&mut builder);
|
||||
builder.finish()
|
||||
};
|
||||
Some(Fix::new(
|
||||
"Fill struct fields",
|
||||
SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(),
|
||||
sema.original_range(&old_field_list.syntax()).range,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticWithFix for MissingOkInTailExpr {
|
||||
fn fix(&self, sema: &Semantics<RootDatabase>) -> Option<Fix> {
|
||||
let root = sema.db.parse_or_expand(self.file)?;
|
||||
let tail_expr = self.expr.to_node(&root);
|
||||
let tail_expr_range = tail_expr.syntax().text_range();
|
||||
let edit = TextEdit::replace(tail_expr_range, format!("Ok({})", tail_expr.syntax()));
|
||||
let source_change =
|
||||
SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into();
|
||||
Some(Fix::new("Wrap with ok", source_change, tail_expr_range))
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_record_expr_field_fix(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
usage_file_id: FileId,
|
||||
record_expr_field: &ast::RecordExprField,
|
||||
) -> Option<Fix> {
|
||||
let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
|
||||
let def_id = sema.resolve_variant(record_lit)?;
|
||||
let module;
|
||||
let def_file_id;
|
||||
let record_fields = match VariantDef::from(def_id) {
|
||||
VariantDef::Struct(s) => {
|
||||
module = s.module(sema.db);
|
||||
let source = s.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
let fields = source.value.field_list()?;
|
||||
record_field_list(fields)?
|
||||
}
|
||||
VariantDef::Union(u) => {
|
||||
module = u.module(sema.db);
|
||||
let source = u.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
source.value.record_field_list()?
|
||||
}
|
||||
VariantDef::EnumVariant(e) => {
|
||||
module = e.module(sema.db);
|
||||
let source = e.source(sema.db);
|
||||
def_file_id = source.file_id;
|
||||
let fields = source.value.field_list()?;
|
||||
record_field_list(fields)?
|
||||
}
|
||||
};
|
||||
let def_file_id = def_file_id.original_file(sema.db);
|
||||
|
||||
let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?;
|
||||
if new_field_type.is_unknown() {
|
||||
return None;
|
||||
}
|
||||
let new_field = make::record_field(
|
||||
record_expr_field.field_name()?,
|
||||
make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
|
||||
);
|
||||
|
||||
let last_field = record_fields.fields().last()?;
|
||||
let last_field_syntax = last_field.syntax();
|
||||
let indent = IndentLevel::from_node(last_field_syntax);
|
||||
|
||||
let mut new_field = new_field.to_string();
|
||||
if usage_file_id != def_file_id {
|
||||
new_field = format!("pub(crate) {}", new_field);
|
||||
}
|
||||
new_field = format!("\n{}{}", indent, new_field);
|
||||
|
||||
let needs_comma = !last_field_syntax.to_string().ends_with(',');
|
||||
if needs_comma {
|
||||
new_field = format!(",{}", new_field);
|
||||
}
|
||||
|
||||
let source_change = SourceFileEdit {
|
||||
file_id: def_file_id,
|
||||
edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field),
|
||||
};
|
||||
return Some(Fix::new(
|
||||
"Create field",
|
||||
source_change.into(),
|
||||
record_expr_field.syntax().text_range(),
|
||||
));
|
||||
|
||||
fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
|
||||
match field_def_list {
|
||||
ast::FieldList::RecordFieldList(it) => Some(it),
|
||||
ast::FieldList::TupleFieldList(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -47,6 +47,12 @@ impl ShortLabel for ast::Module {
|
|||
}
|
||||
}
|
||||
|
||||
impl ShortLabel for ast::SourceFile {
|
||||
fn short_label(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl ShortLabel for ast::TypeAlias {
|
||||
fn short_label(&self) -> Option<String> {
|
||||
short_label_from_node(self, "type ")
|
||||
|
@ -55,7 +61,11 @@ impl ShortLabel for ast::TypeAlias {
|
|||
|
||||
impl ShortLabel for ast::Const {
|
||||
fn short_label(&self) -> Option<String> {
|
||||
short_label_from_ty(self, self.ty(), "const ")
|
||||
let mut new_buf = short_label_from_ty(self, self.ty(), "const ")?;
|
||||
if let Some(expr) = self.body() {
|
||||
format_to!(new_buf, " = {}", expr.syntax());
|
||||
}
|
||||
Some(new_buf)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use hir::Semantics;
|
||||
use ra_ide_db::{
|
||||
defs::{classify_name, classify_name_ref, NameClass},
|
||||
defs::{classify_name, classify_name_ref},
|
||||
symbol_index, RootDatabase,
|
||||
};
|
||||
use ra_syntax::{
|
||||
|
@ -40,10 +40,7 @@ pub(crate) fn goto_definition(
|
|||
reference_definition(&sema, &name_ref).to_vec()
|
||||
},
|
||||
ast::Name(name) => {
|
||||
let def = match classify_name(&sema, &name)? {
|
||||
NameClass::Definition(def) | NameClass::ConstReference(def) => def,
|
||||
NameClass::FieldShorthand { local: _, field } => field,
|
||||
};
|
||||
let def = classify_name(&sema, &name)?.definition(sema.db);
|
||||
let nav = def.try_to_nav(sema.db)?;
|
||||
vec![nav]
|
||||
},
|
||||
|
@ -86,8 +83,7 @@ pub(crate) fn reference_definition(
|
|||
) -> ReferenceResult {
|
||||
let name_kind = classify_name_ref(sema, name_ref);
|
||||
if let Some(def) = name_kind {
|
||||
let def = def.definition();
|
||||
|
||||
let def = def.definition(sema.db);
|
||||
return match def.try_to_nav(sema.db) {
|
||||
Some(nav) => ReferenceResult::Exact(nav),
|
||||
None => ReferenceResult::Approximate(Vec::new()),
|
||||
|
@ -133,6 +129,32 @@ mod tests {
|
|||
assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() });
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_def_for_extern_crate() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
extern crate std<|>;
|
||||
//- /std/lib.rs
|
||||
// empty
|
||||
//^ file
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_def_for_renamed_extern_crate() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
extern crate std as abc<|>;
|
||||
//- /std/lib.rs
|
||||
// empty
|
||||
//^ file
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_def_in_items() {
|
||||
check(
|
||||
|
|
|
@ -85,8 +85,8 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
let node = token.parent();
|
||||
let definition = match_ast! {
|
||||
match node {
|
||||
ast::NameRef(name_ref) => classify_name_ref(&sema, &name_ref).map(|d| d.definition()),
|
||||
ast::Name(name) => classify_name(&sema, &name).map(|d| d.definition()),
|
||||
ast::NameRef(name_ref) => classify_name_ref(&sema, &name_ref).map(|d| d.definition(sema.db)),
|
||||
ast::Name(name) => classify_name(&sema, &name).map(|d| d.definition(sema.db)),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
@ -304,7 +304,10 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> {
|
|||
let docs = Documentation::from_ast(&it).map(Into::into);
|
||||
hover_markup(docs, it.short_label(), mod_path)
|
||||
}
|
||||
_ => None,
|
||||
ModuleSource::SourceFile(it) => {
|
||||
let docs = Documentation::from_ast(&it).map(Into::into);
|
||||
hover_markup(docs, it.short_label(), mod_path)
|
||||
}
|
||||
},
|
||||
ModuleDef::Function(it) => from_def_source(db, it, mod_path),
|
||||
ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path),
|
||||
|
@ -508,6 +511,37 @@ fn main() { }
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_shows_fn_doc() {
|
||||
check(
|
||||
r#"
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use std::path::Path;
|
||||
/// #
|
||||
/// foo(Path::new("hello, world!"))
|
||||
/// ```
|
||||
pub fn foo<|>(_: &Path) {}
|
||||
|
||||
fn main() { }
|
||||
"#,
|
||||
expect![[r#"
|
||||
*foo*
|
||||
```rust
|
||||
pub fn foo(_: &Path)
|
||||
```
|
||||
___
|
||||
|
||||
# Example
|
||||
```
|
||||
# use std::path::Path;
|
||||
#
|
||||
foo(Path::new("hello, world!"))
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_shows_struct_field_info() {
|
||||
// Hovering over the field when instantiating
|
||||
|
@ -556,16 +590,16 @@ fn main() {
|
|||
#[test]
|
||||
fn hover_const_static() {
|
||||
check(
|
||||
r#"const foo<|>: u32 = 0;"#,
|
||||
r#"const foo<|>: u32 = 123;"#,
|
||||
expect![[r#"
|
||||
*foo*
|
||||
```rust
|
||||
const foo: u32
|
||||
const foo: u32 = 123
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
check(
|
||||
r#"static foo<|>: u32 = 0;"#,
|
||||
r#"static foo<|>: u32 = 456;"#,
|
||||
expect![[r#"
|
||||
*foo*
|
||||
```rust
|
||||
|
@ -800,7 +834,7 @@ fn main() {
|
|||
expect![[r#"
|
||||
*C*
|
||||
```rust
|
||||
const C: u32
|
||||
const C: u32 = 1
|
||||
```
|
||||
"#]],
|
||||
)
|
||||
|
@ -1106,6 +1140,46 @@ fn bar() { fo<|>o(); }
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hover_extern_crate() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
extern crate st<|>d;
|
||||
//- /std/lib.rs
|
||||
//! Standard library for this test
|
||||
//!
|
||||
//! Printed?
|
||||
//! abc123
|
||||
"#,
|
||||
expect![[r#"
|
||||
*std*
|
||||
Standard library for this test
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
);
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
extern crate std as ab<|>c;
|
||||
//- /std/lib.rs
|
||||
//! Standard library for this test
|
||||
//!
|
||||
//! Printed?
|
||||
//! abc123
|
||||
"#,
|
||||
expect![[r#"
|
||||
*abc*
|
||||
Standard library for this test
|
||||
|
||||
Printed?
|
||||
abc123
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hover_mod_with_same_name_as_function() {
|
||||
check(
|
||||
|
|
|
@ -119,13 +119,19 @@ pub struct Diagnostic {
|
|||
pub struct Fix {
|
||||
pub label: String,
|
||||
pub source_change: SourceChange,
|
||||
/// Allows to trigger the fix only when the caret is in the range given
|
||||
pub fix_trigger_range: TextRange,
|
||||
}
|
||||
|
||||
impl Fix {
|
||||
pub fn new(label: impl Into<String>, source_change: SourceChange) -> Self {
|
||||
pub fn new(
|
||||
label: impl Into<String>,
|
||||
source_change: SourceChange,
|
||||
fix_trigger_range: TextRange,
|
||||
) -> Self {
|
||||
let label = label.into();
|
||||
assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.'));
|
||||
Self { label, source_change }
|
||||
Self { label, source_change, fix_trigger_range }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -130,13 +130,13 @@ fn find_name(
|
|||
opt_name: Option<ast::Name>,
|
||||
) -> Option<RangeInfo<Definition>> {
|
||||
if let Some(name) = opt_name {
|
||||
let def = classify_name(sema, &name)?.definition();
|
||||
let def = classify_name(sema, &name)?.definition(sema.db);
|
||||
let range = name.syntax().text_range();
|
||||
return Some(RangeInfo::new(range, def));
|
||||
}
|
||||
let name_ref =
|
||||
sema.find_node_at_offset_with_descend::<ast::NameRef>(&syntax, position.offset)?;
|
||||
let def = classify_name_ref(sema, &name_ref)?.definition();
|
||||
let def = classify_name_ref(sema, &name_ref)?.definition(sema.db);
|
||||
let range = name_ref.syntax().text_range();
|
||||
Some(RangeInfo::new(range, def))
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule};
|
|||
// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
|
||||
// code in the `foo` module, we'll insert just `Bar`.
|
||||
//
|
||||
// Method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will match
|
||||
// `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
|
||||
// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
|
||||
// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
|
||||
//
|
||||
// The scope of the search / replace will be restricted to the current selection if any, otherwise
|
||||
// it will apply to the whole workspace.
|
||||
|
|
|
@ -4,7 +4,7 @@ mod injection;
|
|||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use hir::{Name, Semantics};
|
||||
use hir::{Name, Semantics, VariantDef};
|
||||
use ra_ide_db::{
|
||||
defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass},
|
||||
RootDatabase,
|
||||
|
@ -455,6 +455,18 @@ fn macro_call_range(macro_call: &ast::MacroCall) -> Option<TextRange> {
|
|||
Some(TextRange::new(range_start, range_end))
|
||||
}
|
||||
|
||||
fn is_possibly_unsafe(name_ref: &ast::NameRef) -> bool {
|
||||
name_ref
|
||||
.syntax()
|
||||
.parent()
|
||||
.and_then(|parent| {
|
||||
ast::FieldExpr::cast(parent.clone())
|
||||
.map(|_| true)
|
||||
.or_else(|| ast::RecordPatField::cast(parent).map(|_| true))
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn highlight_element(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
bindings_shadow_count: &mut FxHashMap<Name, u32>,
|
||||
|
@ -483,11 +495,21 @@ fn highlight_element(
|
|||
};
|
||||
|
||||
match name_kind {
|
||||
Some(NameClass::ExternCrate(_)) => HighlightTag::Module.into(),
|
||||
Some(NameClass::Definition(def)) => {
|
||||
highlight_name(db, def) | HighlightModifier::Definition
|
||||
highlight_name(sema, db, def, None, false) | HighlightModifier::Definition
|
||||
}
|
||||
Some(NameClass::ConstReference(def)) => highlight_name(sema, db, def, None, false),
|
||||
Some(NameClass::FieldShorthand { field, .. }) => {
|
||||
let mut h = HighlightTag::Field.into();
|
||||
if let Definition::Field(field) = field {
|
||||
if let VariantDef::Union(_) = field.parent_def(db) {
|
||||
h |= HighlightModifier::Unsafe;
|
||||
}
|
||||
}
|
||||
|
||||
h
|
||||
}
|
||||
Some(NameClass::ConstReference(def)) => highlight_name(db, def),
|
||||
Some(NameClass::FieldShorthand { .. }) => HighlightTag::Field.into(),
|
||||
None => highlight_name_by_syntax(name) | HighlightModifier::Definition,
|
||||
}
|
||||
}
|
||||
|
@ -498,8 +520,10 @@ fn highlight_element(
|
|||
}
|
||||
NAME_REF => {
|
||||
let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap();
|
||||
let possibly_unsafe = is_possibly_unsafe(&name_ref);
|
||||
match classify_name_ref(sema, &name_ref) {
|
||||
Some(name_kind) => match name_kind {
|
||||
NameRefClass::ExternCrate(_) => HighlightTag::Module.into(),
|
||||
NameRefClass::Definition(def) => {
|
||||
if let Definition::Local(local) = &def {
|
||||
if let Some(name) = local.name(db) {
|
||||
|
@ -508,11 +532,13 @@ fn highlight_element(
|
|||
binding_hash = Some(calc_binding_hash(&name, *shadow_count))
|
||||
}
|
||||
};
|
||||
highlight_name(db, def)
|
||||
highlight_name(sema, db, def, Some(name_ref), possibly_unsafe)
|
||||
}
|
||||
NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(),
|
||||
},
|
||||
None if syntactic_name_ref_highlighting => highlight_name_ref_by_syntax(name_ref),
|
||||
None if syntactic_name_ref_highlighting => {
|
||||
highlight_name_ref_by_syntax(name_ref, sema)
|
||||
}
|
||||
None => HighlightTag::UnresolvedReference.into(),
|
||||
}
|
||||
}
|
||||
|
@ -540,9 +566,20 @@ fn highlight_element(
|
|||
}
|
||||
}
|
||||
p if p.is_punct() => match p {
|
||||
T![::] | T![->] | T![=>] | T![&] | T![..] | T![=] | T![@] => {
|
||||
HighlightTag::Operator.into()
|
||||
T![&] => {
|
||||
let h = HighlightTag::Operator.into();
|
||||
let is_unsafe = element
|
||||
.parent()
|
||||
.and_then(ast::RefExpr::cast)
|
||||
.map(|ref_expr| sema.is_unsafe_ref_expr(&ref_expr))
|
||||
.unwrap_or(false);
|
||||
if is_unsafe {
|
||||
h | HighlightModifier::Unsafe
|
||||
} else {
|
||||
h
|
||||
}
|
||||
}
|
||||
T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] => HighlightTag::Operator.into(),
|
||||
T![!] if element.parent().and_then(ast::MacroCall::cast).is_some() => {
|
||||
HighlightTag::Macro.into()
|
||||
}
|
||||
|
@ -623,6 +660,18 @@ fn highlight_element(
|
|||
HighlightTag::SelfKeyword.into()
|
||||
}
|
||||
}
|
||||
T![ref] => element
|
||||
.parent()
|
||||
.and_then(ast::IdentPat::cast)
|
||||
.and_then(|ident_pat| {
|
||||
if sema.is_unsafe_ident_pat(&ident_pat) {
|
||||
Some(HighlightModifier::Unsafe)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.map(|modifier| h | modifier)
|
||||
.unwrap_or(h),
|
||||
_ => h,
|
||||
}
|
||||
}
|
||||
|
@ -652,16 +701,40 @@ fn is_child_of_impl(element: &SyntaxElement) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
fn highlight_name(db: &RootDatabase, def: Definition) -> Highlight {
|
||||
fn highlight_name(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
db: &RootDatabase,
|
||||
def: Definition,
|
||||
name_ref: Option<ast::NameRef>,
|
||||
possibly_unsafe: bool,
|
||||
) -> Highlight {
|
||||
match def {
|
||||
Definition::Macro(_) => HighlightTag::Macro,
|
||||
Definition::Field(_) => HighlightTag::Field,
|
||||
Definition::Field(field) => {
|
||||
let mut h = HighlightTag::Field.into();
|
||||
if possibly_unsafe {
|
||||
if let VariantDef::Union(_) = field.parent_def(db) {
|
||||
h |= HighlightModifier::Unsafe;
|
||||
}
|
||||
}
|
||||
|
||||
return h;
|
||||
}
|
||||
Definition::ModuleDef(def) => match def {
|
||||
hir::ModuleDef::Module(_) => HighlightTag::Module,
|
||||
hir::ModuleDef::Function(func) => {
|
||||
let mut h = HighlightTag::Function.into();
|
||||
if func.is_unsafe(db) {
|
||||
h |= HighlightModifier::Unsafe;
|
||||
} else {
|
||||
let is_unsafe = name_ref
|
||||
.and_then(|name_ref| name_ref.syntax().parent())
|
||||
.and_then(ast::MethodCallExpr::cast)
|
||||
.map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr))
|
||||
.unwrap_or(false);
|
||||
if is_unsafe {
|
||||
h |= HighlightModifier::Unsafe;
|
||||
}
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
@ -677,6 +750,7 @@ fn highlight_name(db: &RootDatabase, def: Definition) -> Highlight {
|
|||
let mut h = Highlight::new(HighlightTag::Static);
|
||||
if s.is_mut(db) {
|
||||
h |= HighlightModifier::Mutable;
|
||||
h |= HighlightModifier::Unsafe;
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
@ -724,7 +798,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
|
|||
tag.into()
|
||||
}
|
||||
|
||||
fn highlight_name_ref_by_syntax(name: ast::NameRef) -> Highlight {
|
||||
fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics<RootDatabase>) -> Highlight {
|
||||
let default = HighlightTag::UnresolvedReference;
|
||||
|
||||
let parent = match name.syntax().parent() {
|
||||
|
@ -732,9 +806,36 @@ fn highlight_name_ref_by_syntax(name: ast::NameRef) -> Highlight {
|
|||
_ => return default.into(),
|
||||
};
|
||||
|
||||
let tag = match parent.kind() {
|
||||
METHOD_CALL_EXPR => HighlightTag::Function,
|
||||
FIELD_EXPR => HighlightTag::Field,
|
||||
match parent.kind() {
|
||||
METHOD_CALL_EXPR => {
|
||||
let mut h = Highlight::new(HighlightTag::Function);
|
||||
let is_unsafe = ast::MethodCallExpr::cast(parent)
|
||||
.map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr))
|
||||
.unwrap_or(false);
|
||||
if is_unsafe {
|
||||
h |= HighlightModifier::Unsafe;
|
||||
}
|
||||
|
||||
h
|
||||
}
|
||||
FIELD_EXPR => {
|
||||
let h = HighlightTag::Field;
|
||||
let is_union = ast::FieldExpr::cast(parent)
|
||||
.and_then(|field_expr| {
|
||||
let field = sema.resolve_field(&field_expr)?;
|
||||
Some(if let VariantDef::Union(_) = field.parent_def(sema.db) {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
})
|
||||
})
|
||||
.unwrap_or(false);
|
||||
if is_union {
|
||||
h | HighlightModifier::Unsafe
|
||||
} else {
|
||||
h.into()
|
||||
}
|
||||
}
|
||||
PATH_SEGMENT => {
|
||||
let path = match parent.parent().and_then(ast::Path::cast) {
|
||||
Some(it) => it,
|
||||
|
@ -758,18 +859,15 @@ fn highlight_name_ref_by_syntax(name: ast::NameRef) -> Highlight {
|
|||
};
|
||||
|
||||
match parent.kind() {
|
||||
CALL_EXPR => HighlightTag::Function,
|
||||
_ => {
|
||||
if name.text().chars().next().unwrap_or_default().is_uppercase() {
|
||||
HighlightTag::Struct
|
||||
} else {
|
||||
HighlightTag::Constant
|
||||
}
|
||||
CALL_EXPR => HighlightTag::Function.into(),
|
||||
_ => if name.text().chars().next().unwrap_or_default().is_uppercase() {
|
||||
HighlightTag::Struct.into()
|
||||
} else {
|
||||
HighlightTag::Constant
|
||||
}
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
_ => default,
|
||||
};
|
||||
|
||||
tag.into()
|
||||
_ => default.into(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@ use std::{collections::BTreeMap, convert::TryFrom};
|
|||
|
||||
use ast::{HasQuotes, HasStringValue};
|
||||
use hir::Semantics;
|
||||
use itertools::Itertools;
|
||||
use ra_syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||
use stdx::SepBy;
|
||||
|
||||
use crate::{
|
||||
call_info::ActiveParameter, Analysis, Highlight, HighlightModifier, HighlightTag,
|
||||
|
@ -129,8 +129,7 @@ pub(super) fn extract_doc_comments(
|
|||
|
||||
line[pos..].to_owned()
|
||||
})
|
||||
.sep_by("\n")
|
||||
.to_string();
|
||||
.join("\n");
|
||||
|
||||
if doctest.is_empty() {
|
||||
return None;
|
||||
|
|
|
@ -275,19 +275,64 @@ fn test_unsafe_highlighting() {
|
|||
r#"
|
||||
unsafe fn unsafe_fn() {}
|
||||
|
||||
union Union {
|
||||
a: u32,
|
||||
b: f32,
|
||||
}
|
||||
|
||||
struct HasUnsafeFn;
|
||||
|
||||
impl HasUnsafeFn {
|
||||
unsafe fn unsafe_method(&self) {}
|
||||
}
|
||||
|
||||
struct TypeForStaticMut {
|
||||
a: u8
|
||||
}
|
||||
|
||||
static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 };
|
||||
|
||||
#[repr(packed)]
|
||||
struct Packed {
|
||||
a: u16,
|
||||
}
|
||||
|
||||
trait DoTheAutoref {
|
||||
fn calls_autoref(&self);
|
||||
}
|
||||
|
||||
impl DoTheAutoref for u16 {
|
||||
fn calls_autoref(&self) {}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = &5 as *const usize;
|
||||
let x = &5 as *const _ as *const usize;
|
||||
let u = Union { b: 0 };
|
||||
unsafe {
|
||||
// unsafe fn and method calls
|
||||
unsafe_fn();
|
||||
let b = u.b;
|
||||
match u {
|
||||
Union { b: 0 } => (),
|
||||
Union { a } => (),
|
||||
}
|
||||
HasUnsafeFn.unsafe_method();
|
||||
let y = *(x);
|
||||
let z = -x;
|
||||
|
||||
// unsafe deref
|
||||
let y = *x;
|
||||
|
||||
// unsafe access to a static mut
|
||||
let a = global_mut.a;
|
||||
|
||||
// unsafe ref of packed fields
|
||||
let packed = Packed { a: 0 };
|
||||
let a = &packed.a;
|
||||
let ref a = packed.a;
|
||||
let Packed { ref a } = packed;
|
||||
let Packed { a: ref _a } = packed;
|
||||
|
||||
// unsafe auto ref of packed field
|
||||
packed.a.calls_autoref();
|
||||
}
|
||||
}
|
||||
"#
|
||||
|
@ -373,6 +418,23 @@ macro_rules! noop {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extern_crate() {
|
||||
check_highlighting(
|
||||
r#"
|
||||
//- /main.rs
|
||||
extern crate std;
|
||||
extern crate alloc as abc;
|
||||
//- /std/lib.rs
|
||||
pub struct S;
|
||||
//- /alloc/lib.rs
|
||||
pub struct A
|
||||
"#,
|
||||
expect_file!["crates/ra_ide/test_data/highlight_extern_crate.html"],
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
/// Highlights the code given by the `ra_fixture` argument, renders the
|
||||
/// result as HTML, and compares it with the HTML file given as `snapshot`.
|
||||
/// Note that the `snapshot` file is overwritten by the rendered HTML.
|
||||
|
|
40
crates/ra_ide/test_data/highlight_extern_crate.html
Normal file
40
crates/ra_ide/test_data/highlight_extern_crate.html
Normal file
|
@ -0,0 +1,40 @@
|
|||
|
||||
<style>
|
||||
body { margin: 0; }
|
||||
pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
|
||||
|
||||
.lifetime { color: #DFAF8F; font-style: italic; }
|
||||
.comment { color: #7F9F7F; }
|
||||
.documentation { color: #629755; }
|
||||
.injected { opacity: 0.65 ; }
|
||||
.struct, .enum { color: #7CB8BB; }
|
||||
.enum_variant { color: #BDE0F3; }
|
||||
.string_literal { color: #CC9393; }
|
||||
.field { color: #94BFF3; }
|
||||
.function { color: #93E0E3; }
|
||||
.function.unsafe { color: #BC8383; }
|
||||
.operator.unsafe { color: #BC8383; }
|
||||
.parameter { color: #94BFF3; }
|
||||
.text { color: #DCDCCC; }
|
||||
.type { color: #7CB8BB; }
|
||||
.builtin_type { color: #8CD0D3; }
|
||||
.type_param { color: #DFAF8F; }
|
||||
.attribute { color: #94BFF3; }
|
||||
.numeric_literal { color: #BFEBBF; }
|
||||
.bool_literal { color: #BFE6EB; }
|
||||
.macro { color: #94BFF3; }
|
||||
.module { color: #AFD8AF; }
|
||||
.value_param { color: #DCDCCC; }
|
||||
.variable { color: #DCDCCC; }
|
||||
.format_specifier { color: #CC696B; }
|
||||
.mutable { text-decoration: underline; }
|
||||
.escape_sequence { color: #94BFF3; }
|
||||
.keyword { color: #F0DFAF; font-weight: bold; }
|
||||
.keyword.unsafe { color: #BC8383; font-weight: bold; }
|
||||
.control { font-style: italic; }
|
||||
|
||||
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
|
||||
</style>
|
||||
<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module">std</span><span class="punctuation">;</span>
|
||||
<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module">alloc</span> <span class="keyword">as</span> <span class="module">abc</span><span class="punctuation">;</span>
|
||||
</code></pre>
|
|
@ -37,18 +37,63 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
</style>
|
||||
<pre><code><span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_fn</span><span class="punctuation">(</span><span class="punctuation">)</span> <span class="punctuation">{</span><span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">union</span> <span class="union declaration">Union</span> <span class="punctuation">{</span>
|
||||
<span class="field declaration">a</span><span class="punctuation">:</span> <span class="builtin_type">u32</span><span class="punctuation">,</span>
|
||||
<span class="field declaration">b</span><span class="punctuation">:</span> <span class="builtin_type">f32</span><span class="punctuation">,</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">struct</span> <span class="struct declaration">HasUnsafeFn</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="keyword">impl</span> <span class="struct">HasUnsafeFn</span> <span class="punctuation">{</span>
|
||||
<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_method</span><span class="punctuation">(</span><span class="operator">&</span><span class="self_keyword">self</span><span class="punctuation">)</span> <span class="punctuation">{</span><span class="punctuation">}</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">struct</span> <span class="struct declaration">TypeForStaticMut</span> <span class="punctuation">{</span>
|
||||
<span class="field declaration">a</span><span class="punctuation">:</span> <span class="builtin_type">u8</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">global_mut</span><span class="punctuation">:</span> <span class="struct">TypeForStaticMut</span> <span class="operator">=</span> <span class="struct">TypeForStaticMut</span> <span class="punctuation">{</span> <span class="field">a</span><span class="punctuation">:</span> <span class="numeric_literal">0</span> <span class="punctuation">}</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="attribute">#</span><span class="attribute">[</span><span class="function attribute">repr</span><span class="punctuation">(</span><span class="attribute">packed</span><span class="punctuation">)</span><span class="attribute">]</span>
|
||||
<span class="keyword">struct</span> <span class="struct declaration">Packed</span> <span class="punctuation">{</span>
|
||||
<span class="field declaration">a</span><span class="punctuation">:</span> <span class="builtin_type">u16</span><span class="punctuation">,</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">trait</span> <span class="trait declaration">DoTheAutoref</span> <span class="punctuation">{</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">calls_autoref</span><span class="punctuation">(</span><span class="operator">&</span><span class="self_keyword">self</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">impl</span> <span class="trait">DoTheAutoref</span> <span class="keyword">for</span> <span class="builtin_type">u16</span> <span class="punctuation">{</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">calls_autoref</span><span class="punctuation">(</span><span class="operator">&</span><span class="self_keyword">self</span><span class="punctuation">)</span> <span class="punctuation">{</span><span class="punctuation">}</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="punctuation">(</span><span class="punctuation">)</span> <span class="punctuation">{</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">x</span> <span class="operator">=</span> <span class="operator">&</span><span class="numeric_literal">5</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="builtin_type">usize</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">x</span> <span class="operator">=</span> <span class="operator">&</span><span class="numeric_literal">5</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="punctuation">_</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="builtin_type">usize</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">u</span> <span class="operator">=</span> <span class="union">Union</span> <span class="punctuation">{</span> <span class="field">b</span><span class="punctuation">:</span> <span class="numeric_literal">0</span> <span class="punctuation">}</span><span class="punctuation">;</span>
|
||||
<span class="keyword unsafe">unsafe</span> <span class="punctuation">{</span>
|
||||
<span class="comment">// unsafe fn and method calls</span>
|
||||
<span class="function unsafe">unsafe_fn</span><span class="punctuation">(</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">b</span> <span class="operator">=</span> <span class="variable">u</span><span class="punctuation">.</span><span class="field unsafe">b</span><span class="punctuation">;</span>
|
||||
<span class="keyword control">match</span> <span class="variable">u</span> <span class="punctuation">{</span>
|
||||
<span class="union">Union</span> <span class="punctuation">{</span> <span class="field unsafe">b</span><span class="punctuation">:</span> <span class="numeric_literal">0</span> <span class="punctuation">}</span> <span class="operator">=></span> <span class="punctuation">(</span><span class="punctuation">)</span><span class="punctuation">,</span>
|
||||
<span class="union">Union</span> <span class="punctuation">{</span> <span class="field unsafe">a</span> <span class="punctuation">}</span> <span class="operator">=></span> <span class="punctuation">(</span><span class="punctuation">)</span><span class="punctuation">,</span>
|
||||
<span class="punctuation">}</span>
|
||||
<span class="struct">HasUnsafeFn</span><span class="punctuation">.</span><span class="function unsafe">unsafe_method</span><span class="punctuation">(</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">y</span> <span class="operator">=</span> <span class="operator unsafe">*</span><span class="punctuation">(</span><span class="variable">x</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">z</span> <span class="operator">=</span> <span class="numeric_literal">-</span><span class="variable">x</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="comment">// unsafe deref</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">y</span> <span class="operator">=</span> <span class="operator unsafe">*</span><span class="variable">x</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="comment">// unsafe access to a static mut</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="static mutable unsafe">global_mut</span><span class="punctuation">.</span><span class="field">a</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="comment">// unsafe ref of packed fields</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">packed</span> <span class="operator">=</span> <span class="struct">Packed</span> <span class="punctuation">{</span> <span class="field">a</span><span class="punctuation">:</span> <span class="numeric_literal">0</span> <span class="punctuation">}</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="operator unsafe">&</span><span class="variable">packed</span><span class="punctuation">.</span><span class="field">a</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="keyword unsafe">ref</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="variable">packed</span><span class="punctuation">.</span><span class="field">a</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="struct">Packed</span> <span class="punctuation">{</span> <span class="keyword unsafe">ref</span> <span class="field">a</span> <span class="punctuation">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="punctuation">;</span>
|
||||
<span class="keyword">let</span> <span class="struct">Packed</span> <span class="punctuation">{</span> <span class="field">a</span><span class="punctuation">:</span> <span class="keyword unsafe">ref</span> <span class="variable declaration">_a</span> <span class="punctuation">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="comment">// unsafe auto ref of packed field</span>
|
||||
<span class="variable">packed</span><span class="punctuation">.</span><span class="field">a</span><span class="punctuation">.</span><span class="function unsafe">calls_autoref</span><span class="punctuation">(</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="punctuation">}</span>
|
||||
<span class="punctuation">}</span></code></pre>
|
|
@ -64,7 +64,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="punctuation">}</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable">STATIC_MUT</span><span class="punctuation">:</span> <span class="builtin_type">i32</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="punctuation">;</span>
|
||||
<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">STATIC_MUT</span><span class="punctuation">:</span> <span class="builtin_type">i32</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="punctuation">;</span>
|
||||
|
||||
<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="punctuation"><</span><span class="lifetime declaration">'a</span><span class="punctuation">,</span> <span class="type_param declaration">T</span><span class="punctuation">></span><span class="punctuation">(</span><span class="punctuation">)</span> <span class="operator">-></span> <span class="type_param">T</span> <span class="punctuation">{</span>
|
||||
<span class="function">foo</span><span class="operator">::</span><span class="punctuation"><</span><span class="lifetime">'a</span><span class="punctuation">,</span> <span class="builtin_type">i32</span><span class="punctuation">></span><span class="punctuation">(</span><span class="punctuation">)</span>
|
||||
|
@ -97,7 +97,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="punctuation">}</span>
|
||||
<span class="keyword unsafe">unsafe</span> <span class="punctuation">{</span>
|
||||
<span class="variable mutable">vec</span><span class="punctuation">.</span><span class="unresolved_reference">set_len</span><span class="punctuation">(</span><span class="numeric_literal">0</span><span class="punctuation">)</span><span class="punctuation">;</span>
|
||||
<span class="static mutable">STATIC_MUT</span> <span class="operator">=</span> <span class="numeric_literal">1</span><span class="punctuation">;</span>
|
||||
<span class="static mutable unsafe">STATIC_MUT</span> <span class="operator">=</span> <span class="numeric_literal">1</span><span class="punctuation">;</span>
|
||||
<span class="punctuation">}</span>
|
||||
|
||||
<span class="keyword control">for</span> <span class="variable declaration">e</span> <span class="keyword control">in</span> <span class="variable mutable">vec</span> <span class="punctuation">{</span>
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
|
||||
|
||||
use hir::{
|
||||
Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, Name, PathResolution,
|
||||
Semantics, TypeParam, Visibility,
|
||||
db::HirDatabase, Crate, Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef,
|
||||
Name, PathResolution, Semantics, TypeParam, Visibility,
|
||||
};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{
|
||||
|
@ -80,6 +80,7 @@ impl Definition {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum NameClass {
|
||||
ExternCrate(Crate),
|
||||
Definition(Definition),
|
||||
/// `None` in `if let None = Some(82) {}`
|
||||
ConstReference(Definition),
|
||||
|
@ -90,16 +91,18 @@ pub enum NameClass {
|
|||
}
|
||||
|
||||
impl NameClass {
|
||||
pub fn into_definition(self) -> Option<Definition> {
|
||||
match self {
|
||||
NameClass::Definition(it) => Some(it),
|
||||
NameClass::ConstReference(_) => None,
|
||||
NameClass::FieldShorthand { local, field: _ } => Some(Definition::Local(local)),
|
||||
}
|
||||
pub fn into_definition(self, db: &dyn HirDatabase) -> Option<Definition> {
|
||||
Some(match self {
|
||||
NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()),
|
||||
NameClass::Definition(it) => it,
|
||||
NameClass::ConstReference(_) => return None,
|
||||
NameClass::FieldShorthand { local, field: _ } => Definition::Local(local),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn definition(self) -> Definition {
|
||||
pub fn definition(self, db: &dyn HirDatabase) -> Definition {
|
||||
match self {
|
||||
NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()),
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
NameClass::FieldShorthand { local: _, field } => field,
|
||||
}
|
||||
|
@ -120,32 +123,37 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option
|
|||
match_ast! {
|
||||
match parent {
|
||||
ast::Rename(it) => {
|
||||
let use_tree = it.syntax().parent().and_then(ast::UseTree::cast)?;
|
||||
let path = use_tree.path()?;
|
||||
let path_segment = path.segment()?;
|
||||
let name_ref_class = path_segment
|
||||
.name_ref()
|
||||
// The rename might be from a `self` token, so fallback to the name higher
|
||||
// in the use tree.
|
||||
.or_else(||{
|
||||
if path_segment.self_token().is_none() {
|
||||
return None;
|
||||
}
|
||||
if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) {
|
||||
let path = use_tree.path()?;
|
||||
let path_segment = path.segment()?;
|
||||
let name_ref_class = path_segment
|
||||
.name_ref()
|
||||
// The rename might be from a `self` token, so fallback to the name higher
|
||||
// in the use tree.
|
||||
.or_else(||{
|
||||
if path_segment.self_token().is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let use_tree = use_tree
|
||||
.syntax()
|
||||
.parent()
|
||||
.as_ref()
|
||||
// Skip over UseTreeList
|
||||
.and_then(SyntaxNode::parent)
|
||||
.and_then(ast::UseTree::cast)?;
|
||||
let path = use_tree.path()?;
|
||||
let path_segment = path.segment()?;
|
||||
path_segment.name_ref()
|
||||
})
|
||||
.and_then(|name_ref| classify_name_ref(sema, &name_ref))?;
|
||||
let use_tree = use_tree
|
||||
.syntax()
|
||||
.parent()
|
||||
.as_ref()
|
||||
// Skip over UseTreeList
|
||||
.and_then(SyntaxNode::parent)
|
||||
.and_then(ast::UseTree::cast)?;
|
||||
let path = use_tree.path()?;
|
||||
let path_segment = path.segment()?;
|
||||
path_segment.name_ref()
|
||||
})
|
||||
.and_then(|name_ref| classify_name_ref(sema, &name_ref))?;
|
||||
|
||||
Some(NameClass::Definition(name_ref_class.definition()))
|
||||
Some(NameClass::Definition(name_ref_class.definition(sema.db)))
|
||||
} else {
|
||||
let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?;
|
||||
let resolved = sema.resolve_extern_crate(&extern_crate)?;
|
||||
Some(NameClass::ExternCrate(resolved))
|
||||
}
|
||||
},
|
||||
ast::IdentPat(it) => {
|
||||
let local = sema.to_def(&it)?;
|
||||
|
@ -220,13 +228,15 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum NameRefClass {
|
||||
ExternCrate(Crate),
|
||||
Definition(Definition),
|
||||
FieldShorthand { local: Local, field: Definition },
|
||||
}
|
||||
|
||||
impl NameRefClass {
|
||||
pub fn definition(self) -> Definition {
|
||||
pub fn definition(self, db: &dyn HirDatabase) -> Definition {
|
||||
match self {
|
||||
NameRefClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()),
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { local, field: _ } => Definition::Local(local),
|
||||
}
|
||||
|
@ -307,9 +317,15 @@ pub fn classify_name_ref(
|
|||
}
|
||||
}
|
||||
|
||||
let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let resolved = sema.resolve_path(&path)?;
|
||||
Some(NameRefClass::Definition(resolved.into()))
|
||||
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||
if let Some(resolved) = sema.resolve_path(&path) {
|
||||
return Some(NameRefClass::Definition(resolved.into()));
|
||||
}
|
||||
}
|
||||
|
||||
let extern_crate = ast::ExternCrate::cast(parent)?;
|
||||
let resolved = sema.resolve_extern_crate(&extern_crate)?;
|
||||
Some(NameRefClass::ExternCrate(resolved))
|
||||
}
|
||||
|
||||
impl From<PathResolution> for Definition {
|
||||
|
|
|
@ -61,5 +61,5 @@ fn get_name_definition<'a>(
|
|||
candidate_node
|
||||
};
|
||||
let name = ast::Name::cast(candidate_name_node)?;
|
||||
classify_name(sema, &name)?.into_definition()
|
||||
classify_name(sema, &name)?.into_definition(sema.db)
|
||||
}
|
||||
|
|
|
@ -276,7 +276,7 @@ impl<'a> TtIter<'a> {
|
|||
Ok(tt::Subtree {
|
||||
delimiter: None,
|
||||
token_trees: vec![
|
||||
tt::Leaf::Punct(punct.clone()).into(),
|
||||
tt::Leaf::Punct(*punct).into(),
|
||||
tt::Leaf::Ident(ident.clone()).into(),
|
||||
],
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ pub(crate) mod fragments {
|
|||
}
|
||||
|
||||
pub(crate) fn item(p: &mut Parser) {
|
||||
items::item_or_macro(p, true, items::ItemFlavor::Mod)
|
||||
items::item_or_macro(p, true)
|
||||
}
|
||||
|
||||
pub(crate) fn macro_items(p: &mut Parser) {
|
||||
|
|
|
@ -73,7 +73,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) {
|
|||
|
||||
// test block_items
|
||||
// fn a() { fn b() {} }
|
||||
let m = match items::maybe_item(p, m, items::ItemFlavor::Mod) {
|
||||
let m = match items::maybe_item(p, m) {
|
||||
Ok(()) => return,
|
||||
Err(m) => m,
|
||||
};
|
||||
|
@ -509,7 +509,6 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
|||
// x.1i32;
|
||||
// x.0x01;
|
||||
// }
|
||||
#[allow(clippy::if_same_then_else)]
|
||||
fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(T![.]));
|
||||
let m = lhs.precede(p);
|
||||
|
|
|
@ -22,24 +22,19 @@ use super::*;
|
|||
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
||||
attributes::inner_attributes(p);
|
||||
while !(stop_on_r_curly && p.at(T!['}']) || p.at(EOF)) {
|
||||
item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod)
|
||||
item_or_macro(p, stop_on_r_curly)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) enum ItemFlavor {
|
||||
Mod,
|
||||
Trait,
|
||||
}
|
||||
|
||||
pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![
|
||||
FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW,
|
||||
CRATE_KW, USE_KW, MACRO_KW
|
||||
];
|
||||
|
||||
pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) {
|
||||
pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) {
|
||||
let m = p.start();
|
||||
attributes::outer_attributes(p);
|
||||
let m = match maybe_item(p, m, flavor) {
|
||||
let m = match maybe_item(p, m) {
|
||||
Ok(()) => {
|
||||
if p.at(T![;]) {
|
||||
p.err_and_bump(
|
||||
|
@ -76,7 +71,7 @@ pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemF
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Result<(), Marker> {
|
||||
pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> {
|
||||
// test_err pub_expr
|
||||
// fn foo() { pub 92; }
|
||||
let has_visibility = opt_visibility(p);
|
||||
|
@ -114,38 +109,31 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul
|
|||
has_mods = true;
|
||||
}
|
||||
|
||||
if p.at(IDENT)
|
||||
&& p.at_contextual_kw("default")
|
||||
&& (match p.nth(1) {
|
||||
T![impl] => true,
|
||||
// test default_item
|
||||
// default impl T for Foo {}
|
||||
if p.at(IDENT) && p.at_contextual_kw("default") {
|
||||
match p.nth(1) {
|
||||
T![fn] | T![type] | T![const] | T![impl] => {
|
||||
p.bump_remap(T![default]);
|
||||
has_mods = true;
|
||||
}
|
||||
T![unsafe] => {
|
||||
// test default_unsafe_impl
|
||||
// default unsafe impl Foo {}
|
||||
|
||||
// test default_unsafe_fn
|
||||
// impl T for Foo {
|
||||
// test default_unsafe_item
|
||||
// default unsafe impl T for Foo {
|
||||
// default unsafe fn foo() {}
|
||||
// }
|
||||
if p.nth(2) == T![impl] || p.nth(2) == T![fn] {
|
||||
if matches!(p.nth(2), T![impl] | T![fn]) {
|
||||
p.bump_remap(T![default]);
|
||||
p.bump(T![unsafe]);
|
||||
has_mods = true;
|
||||
}
|
||||
false
|
||||
}
|
||||
T![fn] | T![type] | T![const] => {
|
||||
if let ItemFlavor::Mod = flavor {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
{
|
||||
p.bump_remap(T![default]);
|
||||
has_mods = true;
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
// test existential_type
|
||||
// existential type Foo: Fn() -> usize;
|
||||
if p.at(IDENT) && p.at_contextual_kw("existential") && p.nth(1) == T![type] {
|
||||
p.bump_remap(T![existential]);
|
||||
has_mods = true;
|
||||
|
@ -153,79 +141,31 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul
|
|||
|
||||
// items
|
||||
match p.current() {
|
||||
// test async_fn
|
||||
// async fn foo() {}
|
||||
|
||||
// test extern_fn
|
||||
// extern fn foo() {}
|
||||
|
||||
// test const_fn
|
||||
// const fn foo() {}
|
||||
|
||||
// test const_unsafe_fn
|
||||
// const unsafe fn foo() {}
|
||||
|
||||
// test unsafe_extern_fn
|
||||
// unsafe extern "C" fn foo() {}
|
||||
|
||||
// test unsafe_fn
|
||||
// unsafe fn foo() {}
|
||||
|
||||
// test combined_fns
|
||||
// async unsafe fn foo() {}
|
||||
// const unsafe fn bar() {}
|
||||
|
||||
// test_err wrong_order_fns
|
||||
// unsafe async fn foo() {}
|
||||
// unsafe const fn bar() {}
|
||||
// test fn
|
||||
// fn foo() {}
|
||||
T![fn] => {
|
||||
fn_def(p);
|
||||
m.complete(p, FN);
|
||||
}
|
||||
|
||||
// test unsafe_trait
|
||||
// unsafe trait T {}
|
||||
|
||||
// test auto_trait
|
||||
// auto trait T {}
|
||||
|
||||
// test unsafe_auto_trait
|
||||
// unsafe auto trait T {}
|
||||
// test trait
|
||||
// trait T {}
|
||||
T![trait] => {
|
||||
traits::trait_def(p);
|
||||
m.complete(p, TRAIT);
|
||||
}
|
||||
|
||||
// test unsafe_impl
|
||||
// unsafe impl Foo {}
|
||||
|
||||
// test default_impl
|
||||
// default impl Foo {}
|
||||
|
||||
// test_err default_fn_type
|
||||
// trait T {
|
||||
// default type T = Bar;
|
||||
// default fn foo() {}
|
||||
// }
|
||||
|
||||
// test default_fn_type
|
||||
// impl T for Foo {
|
||||
// default type T = Bar;
|
||||
// default fn foo() {}
|
||||
// }
|
||||
T![const] => {
|
||||
consts::const_def(p, m);
|
||||
}
|
||||
|
||||
// test unsafe_default_impl
|
||||
// unsafe default impl Foo {}
|
||||
// test impl
|
||||
// impl T for S {}
|
||||
T![impl] => {
|
||||
traits::impl_def(p);
|
||||
m.complete(p, IMPL);
|
||||
}
|
||||
|
||||
// test existential_type
|
||||
// existential type Foo: Fn() -> usize;
|
||||
T![type] => {
|
||||
type_def(p, m);
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn trait_item_list(p: &mut Parser) {
|
|||
error_block(p, "expected an item");
|
||||
continue;
|
||||
}
|
||||
item_or_macro(p, true, ItemFlavor::Trait);
|
||||
item_or_macro(p, true);
|
||||
}
|
||||
p.expect(T!['}']);
|
||||
m.complete(p, ASSOC_ITEM_LIST);
|
||||
|
@ -104,7 +104,7 @@ pub(crate) fn impl_item_list(p: &mut Parser) {
|
|||
error_block(p, "expected an item");
|
||||
continue;
|
||||
}
|
||||
item_or_macro(p, true, ItemFlavor::Mod);
|
||||
item_or_macro(p, true);
|
||||
}
|
||||
p.expect(T!['}']);
|
||||
m.complete(p, ASSOC_ITEM_LIST);
|
||||
|
|
|
@ -269,8 +269,8 @@ impl Marker {
|
|||
pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker {
|
||||
self.bomb.defuse();
|
||||
let idx = self.pos as usize;
|
||||
match p.events[idx] {
|
||||
Event::Start { kind: ref mut slot, .. } => {
|
||||
match &mut p.events[idx] {
|
||||
Event::Start { kind: slot, .. } => {
|
||||
*slot = kind;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
|
@ -320,8 +320,8 @@ impl CompletedMarker {
|
|||
pub(crate) fn precede(self, p: &mut Parser) -> Marker {
|
||||
let new_pos = p.start();
|
||||
let idx = self.start_pos as usize;
|
||||
match p.events[idx] {
|
||||
Event::Start { ref mut forward_parent, .. } => {
|
||||
match &mut p.events[idx] {
|
||||
Event::Start { forward_parent, .. } => {
|
||||
*forward_parent = Some(new_pos.pos - self.start_pos);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
|
@ -333,12 +333,12 @@ impl CompletedMarker {
|
|||
pub(crate) fn undo_completion(self, p: &mut Parser) -> Marker {
|
||||
let start_idx = self.start_pos as usize;
|
||||
let finish_idx = self.finish_pos as usize;
|
||||
match p.events[start_idx] {
|
||||
Event::Start { ref mut kind, forward_parent: None } => *kind = TOMBSTONE,
|
||||
match &mut p.events[start_idx] {
|
||||
Event::Start { kind, forward_parent: None } => *kind = TOMBSTONE,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
match p.events[finish_idx] {
|
||||
ref mut slot @ Event::Finish => *slot = Event::tombstone(),
|
||||
match &mut p.events[finish_idx] {
|
||||
slot @ Event::Finish => *slot = Event::tombstone(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
Marker::new(self.start_pos)
|
||||
|
|
|
@ -90,7 +90,7 @@ impl ProcMacroProcessSrv {
|
|||
}
|
||||
Some(it) => it,
|
||||
};
|
||||
sender.send(Task { req: req.into(), result_tx }).unwrap();
|
||||
sender.send(Task { req, result_tx }).unwrap();
|
||||
let res = result_rx
|
||||
.recv()
|
||||
.map_err(|_| ra_tt::ExpansionError::Unknown("Proc macro thread is closed.".into()))?;
|
||||
|
|
|
@ -24,7 +24,7 @@ impl std::ops::Sub for MemoryUsage {
|
|||
impl MemoryUsage {
|
||||
pub fn current() -> MemoryUsage {
|
||||
cfg_if! {
|
||||
if #[cfg(target_os = "linux")] {
|
||||
if #[cfg(all(target_os = "linux", target_env = "gnu"))] {
|
||||
// Note: This is incredibly slow.
|
||||
let alloc = unsafe { libc::mallinfo() }.uordblks as isize;
|
||||
MemoryUsage { allocated: Bytes(alloc) }
|
||||
|
|
|
@ -5,7 +5,7 @@ use crate::{parsing, SsrError};
|
|||
use parsing::Placeholder;
|
||||
use ra_db::FilePosition;
|
||||
use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_utils::mark;
|
||||
|
||||
pub(crate) struct ResolutionScope<'db> {
|
||||
|
@ -124,8 +124,10 @@ impl Resolver<'_, '_> {
|
|||
.resolution_scope
|
||||
.resolve_path(&path)
|
||||
.ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
|
||||
resolved_paths.insert(node, ResolvedPath { resolution, depth });
|
||||
return Ok(());
|
||||
if self.ok_to_use_path_resolution(&resolution) {
|
||||
resolved_paths.insert(node, ResolvedPath { resolution, depth });
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
for node in node.children() {
|
||||
|
@ -149,6 +151,27 @@ impl Resolver<'_, '_> {
|
|||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
|
||||
match resolution {
|
||||
hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => {
|
||||
if function.has_self_param(self.resolution_scope.scope.db) {
|
||||
// If we don't use this path resolution, then we won't be able to match method
|
||||
// calls. e.g. `Foo::bar($s)` should match `x.bar()`.
|
||||
true
|
||||
} else {
|
||||
mark::hit!(replace_associated_trait_default_function_call);
|
||||
false
|
||||
}
|
||||
}
|
||||
hir::PathResolution::AssocItem(_) => {
|
||||
// Not a function. Could be a constant or an associated type.
|
||||
mark::hit!(replace_associated_trait_constant);
|
||||
false
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> ResolutionScope<'db> {
|
||||
|
@ -195,7 +218,7 @@ impl<'db> ResolutionScope<'db> {
|
|||
adt.ty(self.scope.db).iterate_path_candidates(
|
||||
self.scope.db,
|
||||
self.scope.module()?.krate(),
|
||||
&FxHashSet::default(),
|
||||
&self.scope.traits_in_scope(),
|
||||
Some(hir_path.segments().last()?.name),
|
||||
|_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
|
||||
)
|
||||
|
|
|
@ -549,6 +549,70 @@ fn replace_associated_function_call() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_associated_trait_default_function_call() {
|
||||
mark::check!(replace_associated_trait_default_function_call);
|
||||
assert_ssr_transform(
|
||||
"Bar2::foo() ==>> Bar2::foo2()",
|
||||
r#"
|
||||
trait Foo { fn foo() {} }
|
||||
pub struct Bar {}
|
||||
impl Foo for Bar {}
|
||||
pub struct Bar2 {}
|
||||
impl Foo for Bar2 {}
|
||||
impl Bar2 { fn foo2() {} }
|
||||
fn main() {
|
||||
Bar::foo();
|
||||
Bar2::foo();
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
trait Foo { fn foo() {} }
|
||||
pub struct Bar {}
|
||||
impl Foo for Bar {}
|
||||
pub struct Bar2 {}
|
||||
impl Foo for Bar2 {}
|
||||
impl Bar2 { fn foo2() {} }
|
||||
fn main() {
|
||||
Bar::foo();
|
||||
Bar2::foo2();
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_associated_trait_constant() {
|
||||
mark::check!(replace_associated_trait_constant);
|
||||
assert_ssr_transform(
|
||||
"Bar2::VALUE ==>> Bar2::VALUE_2222",
|
||||
r#"
|
||||
trait Foo { const VALUE: i32; const VALUE_2222: i32; }
|
||||
pub struct Bar {}
|
||||
impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
|
||||
pub struct Bar2 {}
|
||||
impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
|
||||
impl Bar2 { fn foo2() {} }
|
||||
fn main() {
|
||||
Bar::VALUE;
|
||||
Bar2::VALUE;
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
trait Foo { const VALUE: i32; const VALUE_2222: i32; }
|
||||
pub struct Bar {}
|
||||
impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
|
||||
pub struct Bar2 {}
|
||||
impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
|
||||
impl Bar2 { fn foo2() {} }
|
||||
fn main() {
|
||||
Bar::VALUE;
|
||||
Bar2::VALUE_2222;
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_path_in_different_contexts() {
|
||||
// Note the <|> inside module a::b which marks the point where the rule is interpreted. We
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Various traits that are implemented by ast nodes.
|
||||
//!
|
||||
//! The implementations are usually trivial, and live in generated.rs
|
||||
use stdx::SepBy;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
ast::{self, support, AstChildren, AstNode, AstToken},
|
||||
|
@ -119,8 +119,7 @@ impl CommentIter {
|
|||
// of a line in markdown.
|
||||
line[pos..end].to_owned()
|
||||
})
|
||||
.sep_by("\n")
|
||||
.to_string();
|
||||
.join("\n");
|
||||
|
||||
if has_comments {
|
||||
Some(docs)
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
SOURCE_FILE@0..39
|
||||
TRAIT@0..38
|
||||
TRAIT_KW@0..5 "trait"
|
||||
WHITESPACE@5..6 " "
|
||||
NAME@6..7
|
||||
IDENT@6..7 "T"
|
||||
WHITESPACE@7..8 " "
|
||||
ASSOC_ITEM_LIST@8..38
|
||||
L_CURLY@8..9 "{"
|
||||
WHITESPACE@9..12 "\n "
|
||||
MACRO_CALL@12..19
|
||||
PATH@12..19
|
||||
PATH_SEGMENT@12..19
|
||||
NAME_REF@12..19
|
||||
IDENT@12..19 "default"
|
||||
WHITESPACE@19..20 " "
|
||||
CONST@20..36
|
||||
CONST_KW@20..25 "const"
|
||||
WHITESPACE@25..26 " "
|
||||
NAME@26..27
|
||||
IDENT@26..27 "f"
|
||||
COLON@27..28 ":"
|
||||
WHITESPACE@28..29 " "
|
||||
PATH_TYPE@29..31
|
||||
PATH@29..31
|
||||
PATH_SEGMENT@29..31
|
||||
NAME_REF@29..31
|
||||
IDENT@29..31 "u8"
|
||||
WHITESPACE@31..32 " "
|
||||
EQ@32..33 "="
|
||||
WHITESPACE@33..34 " "
|
||||
LITERAL@34..35
|
||||
INT_NUMBER@34..35 "0"
|
||||
SEMICOLON@35..36 ";"
|
||||
WHITESPACE@36..37 "\n"
|
||||
R_CURLY@37..38 "}"
|
||||
WHITESPACE@38..39 "\n"
|
||||
error 19..19: expected BANG
|
||||
error 19..19: expected `{`, `[`, `(`
|
||||
error 19..19: expected SEMICOLON
|
|
@ -1,3 +0,0 @@
|
|||
trait T {
|
||||
default const f: u8 = 0;
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
SOURCE_FILE@0..62
|
||||
TRAIT@0..61
|
||||
TRAIT_KW@0..5 "trait"
|
||||
WHITESPACE@5..6 " "
|
||||
NAME@6..7
|
||||
IDENT@6..7 "T"
|
||||
WHITESPACE@7..8 " "
|
||||
ASSOC_ITEM_LIST@8..61
|
||||
L_CURLY@8..9 "{"
|
||||
WHITESPACE@9..14 "\n "
|
||||
MACRO_CALL@14..21
|
||||
PATH@14..21
|
||||
PATH_SEGMENT@14..21
|
||||
NAME_REF@14..21
|
||||
IDENT@14..21 "default"
|
||||
WHITESPACE@21..22 " "
|
||||
TYPE_ALIAS@22..35
|
||||
TYPE_KW@22..26 "type"
|
||||
WHITESPACE@26..27 " "
|
||||
NAME@27..28
|
||||
IDENT@27..28 "T"
|
||||
WHITESPACE@28..29 " "
|
||||
EQ@29..30 "="
|
||||
WHITESPACE@30..31 " "
|
||||
PATH_TYPE@31..34
|
||||
PATH@31..34
|
||||
PATH_SEGMENT@31..34
|
||||
NAME_REF@31..34
|
||||
IDENT@31..34 "Bar"
|
||||
SEMICOLON@34..35 ";"
|
||||
WHITESPACE@35..40 "\n "
|
||||
MACRO_CALL@40..47
|
||||
PATH@40..47
|
||||
PATH_SEGMENT@40..47
|
||||
NAME_REF@40..47
|
||||
IDENT@40..47 "default"
|
||||
WHITESPACE@47..48 " "
|
||||
FN@48..59
|
||||
FN_KW@48..50 "fn"
|
||||
WHITESPACE@50..51 " "
|
||||
NAME@51..54
|
||||
IDENT@51..54 "foo"
|
||||
PARAM_LIST@54..56
|
||||
L_PAREN@54..55 "("
|
||||
R_PAREN@55..56 ")"
|
||||
WHITESPACE@56..57 " "
|
||||
BLOCK_EXPR@57..59
|
||||
L_CURLY@57..58 "{"
|
||||
R_CURLY@58..59 "}"
|
||||
WHITESPACE@59..60 "\n"
|
||||
R_CURLY@60..61 "}"
|
||||
WHITESPACE@61..62 "\n"
|
||||
error 21..21: expected BANG
|
||||
error 21..21: expected `{`, `[`, `(`
|
||||
error 21..21: expected SEMICOLON
|
||||
error 47..47: expected BANG
|
||||
error 47..47: expected `{`, `[`, `(`
|
||||
error 47..47: expected SEMICOLON
|
|
@ -1,4 +0,0 @@
|
|||
trait T {
|
||||
default type T = Bar;
|
||||
default fn foo() {}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
SOURCE_FILE@0..18
|
||||
TRAIT@0..17
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
TRAIT_KW@7..12 "trait"
|
||||
WHITESPACE@12..13 " "
|
||||
NAME@13..14
|
||||
IDENT@13..14 "T"
|
||||
WHITESPACE@14..15 " "
|
||||
ASSOC_ITEM_LIST@15..17
|
||||
L_CURLY@15..16 "{"
|
||||
R_CURLY@16..17 "}"
|
||||
WHITESPACE@17..18 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe trait T {}
|
|
@ -1,21 +0,0 @@
|
|||
SOURCE_FILE@0..30
|
||||
FN@0..29
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
ABI@7..17
|
||||
EXTERN_KW@7..13 "extern"
|
||||
WHITESPACE@13..14 " "
|
||||
STRING@14..17 "\"C\""
|
||||
WHITESPACE@17..18 " "
|
||||
FN_KW@18..20 "fn"
|
||||
WHITESPACE@20..21 " "
|
||||
NAME@21..24
|
||||
IDENT@21..24 "foo"
|
||||
PARAM_LIST@24..26
|
||||
L_PAREN@24..25 "("
|
||||
R_PAREN@25..26 ")"
|
||||
WHITESPACE@26..27 " "
|
||||
BLOCK_EXPR@27..29
|
||||
L_CURLY@27..28 "{"
|
||||
R_CURLY@28..29 "}"
|
||||
WHITESPACE@29..30 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe extern "C" fn foo() {}
|
|
@ -1,18 +0,0 @@
|
|||
SOURCE_FILE@0..27
|
||||
IMPL@0..26
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
DEFAULT_KW@7..14 "default"
|
||||
WHITESPACE@14..15 " "
|
||||
IMPL_KW@15..19 "impl"
|
||||
WHITESPACE@19..20 " "
|
||||
PATH_TYPE@20..23
|
||||
PATH@20..23
|
||||
PATH_SEGMENT@20..23
|
||||
NAME_REF@20..23
|
||||
IDENT@20..23 "Foo"
|
||||
WHITESPACE@23..24 " "
|
||||
ASSOC_ITEM_LIST@24..26
|
||||
L_CURLY@24..25 "{"
|
||||
R_CURLY@25..26 "}"
|
||||
WHITESPACE@26..27 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe default impl Foo {}
|
|
@ -1,16 +0,0 @@
|
|||
SOURCE_FILE@0..18
|
||||
FN@0..17
|
||||
CONST_KW@0..5 "const"
|
||||
WHITESPACE@5..6 " "
|
||||
FN_KW@6..8 "fn"
|
||||
WHITESPACE@8..9 " "
|
||||
NAME@9..12
|
||||
IDENT@9..12 "foo"
|
||||
PARAM_LIST@12..14
|
||||
L_PAREN@12..13 "("
|
||||
R_PAREN@13..14 ")"
|
||||
WHITESPACE@14..15 " "
|
||||
BLOCK_EXPR@15..17
|
||||
L_CURLY@15..16 "{"
|
||||
R_CURLY@16..17 "}"
|
||||
WHITESPACE@17..18 "\n"
|
|
@ -1 +0,0 @@
|
|||
const fn foo() {}
|
|
@ -1,16 +0,0 @@
|
|||
SOURCE_FILE@0..19
|
||||
IMPL@0..18
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
IMPL_KW@7..11 "impl"
|
||||
WHITESPACE@11..12 " "
|
||||
PATH_TYPE@12..15
|
||||
PATH@12..15
|
||||
PATH_SEGMENT@12..15
|
||||
NAME_REF@12..15
|
||||
IDENT@12..15 "Foo"
|
||||
WHITESPACE@15..16 " "
|
||||
ASSOC_ITEM_LIST@16..18
|
||||
L_CURLY@16..17 "{"
|
||||
R_CURLY@17..18 "}"
|
||||
WHITESPACE@18..19 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe impl Foo {}
|
|
@ -1,17 +0,0 @@
|
|||
SOURCE_FILE@0..19
|
||||
FN@0..18
|
||||
ABI@0..6
|
||||
EXTERN_KW@0..6 "extern"
|
||||
WHITESPACE@6..7 " "
|
||||
FN_KW@7..9 "fn"
|
||||
WHITESPACE@9..10 " "
|
||||
NAME@10..13
|
||||
IDENT@10..13 "foo"
|
||||
PARAM_LIST@13..15
|
||||
L_PAREN@13..14 "("
|
||||
R_PAREN@14..15 ")"
|
||||
WHITESPACE@15..16 " "
|
||||
BLOCK_EXPR@16..18
|
||||
L_CURLY@16..17 "{"
|
||||
R_CURLY@17..18 "}"
|
||||
WHITESPACE@18..19 "\n"
|
|
@ -1 +0,0 @@
|
|||
extern fn foo() {}
|
|
@ -1,13 +0,0 @@
|
|||
SOURCE_FILE@0..16
|
||||
TRAIT@0..15
|
||||
AUTO_KW@0..4 "auto"
|
||||
WHITESPACE@4..5 " "
|
||||
TRAIT_KW@5..10 "trait"
|
||||
WHITESPACE@10..11 " "
|
||||
NAME@11..12
|
||||
IDENT@11..12 "T"
|
||||
WHITESPACE@12..13 " "
|
||||
ASSOC_ITEM_LIST@13..15
|
||||
L_CURLY@13..14 "{"
|
||||
R_CURLY@14..15 "}"
|
||||
WHITESPACE@15..16 "\n"
|
|
@ -1 +0,0 @@
|
|||
auto trait T {}
|
|
@ -1,15 +0,0 @@
|
|||
SOURCE_FILE@0..23
|
||||
TRAIT@0..22
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
AUTO_KW@7..11 "auto"
|
||||
WHITESPACE@11..12 " "
|
||||
TRAIT_KW@12..17 "trait"
|
||||
WHITESPACE@17..18 " "
|
||||
NAME@18..19
|
||||
IDENT@18..19 "T"
|
||||
WHITESPACE@19..20 " "
|
||||
ASSOC_ITEM_LIST@20..22
|
||||
L_CURLY@20..21 "{"
|
||||
R_CURLY@21..22 "}"
|
||||
WHITESPACE@22..23 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe auto trait T {}
|
|
@ -1,16 +0,0 @@
|
|||
SOURCE_FILE@0..20
|
||||
IMPL@0..19
|
||||
DEFAULT_KW@0..7 "default"
|
||||
WHITESPACE@7..8 " "
|
||||
IMPL_KW@8..12 "impl"
|
||||
WHITESPACE@12..13 " "
|
||||
PATH_TYPE@13..16
|
||||
PATH@13..16
|
||||
PATH_SEGMENT@13..16
|
||||
NAME_REF@13..16
|
||||
IDENT@13..16 "Foo"
|
||||
WHITESPACE@16..17 " "
|
||||
ASSOC_ITEM_LIST@17..19
|
||||
L_CURLY@17..18 "{"
|
||||
R_CURLY@18..19 "}"
|
||||
WHITESPACE@19..20 "\n"
|
|
@ -1 +0,0 @@
|
|||
default impl Foo {}
|
|
@ -1,18 +0,0 @@
|
|||
SOURCE_FILE@0..25
|
||||
FN@0..24
|
||||
CONST_KW@0..5 "const"
|
||||
WHITESPACE@5..6 " "
|
||||
UNSAFE_KW@6..12 "unsafe"
|
||||
WHITESPACE@12..13 " "
|
||||
FN_KW@13..15 "fn"
|
||||
WHITESPACE@15..16 " "
|
||||
NAME@16..19
|
||||
IDENT@16..19 "foo"
|
||||
PARAM_LIST@19..21
|
||||
L_PAREN@19..20 "("
|
||||
R_PAREN@20..21 ")"
|
||||
WHITESPACE@21..22 " "
|
||||
BLOCK_EXPR@22..24
|
||||
L_CURLY@22..23 "{"
|
||||
R_CURLY@23..24 "}"
|
||||
WHITESPACE@24..25 "\n"
|
|
@ -1 +0,0 @@
|
|||
const unsafe fn foo() {}
|
|
@ -1,16 +0,0 @@
|
|||
SOURCE_FILE@0..19
|
||||
FN@0..18
|
||||
UNSAFE_KW@0..6 "unsafe"
|
||||
WHITESPACE@6..7 " "
|
||||
FN_KW@7..9 "fn"
|
||||
WHITESPACE@9..10 " "
|
||||
NAME@10..13
|
||||
IDENT@10..13 "foo"
|
||||
PARAM_LIST@13..15
|
||||
L_PAREN@13..14 "("
|
||||
R_PAREN@14..15 ")"
|
||||
WHITESPACE@15..16 " "
|
||||
BLOCK_EXPR@16..18
|
||||
L_CURLY@16..17 "{"
|
||||
R_CURLY@17..18 "}"
|
||||
WHITESPACE@18..19 "\n"
|
|
@ -1 +0,0 @@
|
|||
unsafe fn foo() {}
|
|
@ -1,16 +0,0 @@
|
|||
SOURCE_FILE@0..18
|
||||
FN@0..17
|
||||
ASYNC_KW@0..5 "async"
|
||||
WHITESPACE@5..6 " "
|
||||
FN_KW@6..8 "fn"
|
||||
WHITESPACE@8..9 " "
|
||||
NAME@9..12
|
||||
IDENT@9..12 "foo"
|
||||
PARAM_LIST@12..14
|
||||
L_PAREN@12..13 "("
|
||||
R_PAREN@13..14 ")"
|
||||
WHITESPACE@14..15 " "
|
||||
BLOCK_EXPR@15..17
|
||||
L_CURLY@15..16 "{"
|
||||
R_CURLY@16..17 "}"
|
||||
WHITESPACE@17..18 "\n"
|
|
@ -1 +0,0 @@
|
|||
async fn foo() {}
|
|
@ -1,35 +0,0 @@
|
|||
SOURCE_FILE@0..50
|
||||
FN@0..24
|
||||
ASYNC_KW@0..5 "async"
|
||||
WHITESPACE@5..6 " "
|
||||
UNSAFE_KW@6..12 "unsafe"
|
||||
WHITESPACE@12..13 " "
|
||||
FN_KW@13..15 "fn"
|
||||
WHITESPACE@15..16 " "
|
||||
NAME@16..19
|
||||
IDENT@16..19 "foo"
|
||||
PARAM_LIST@19..21
|
||||
L_PAREN@19..20 "("
|
||||
R_PAREN@20..21 ")"
|
||||
WHITESPACE@21..22 " "
|
||||
BLOCK_EXPR@22..24
|
||||
L_CURLY@22..23 "{"
|
||||
R_CURLY@23..24 "}"
|
||||
WHITESPACE@24..25 "\n"
|
||||
FN@25..49
|
||||
CONST_KW@25..30 "const"
|
||||
WHITESPACE@30..31 " "
|
||||
UNSAFE_KW@31..37 "unsafe"
|
||||
WHITESPACE@37..38 " "
|
||||
FN_KW@38..40 "fn"
|
||||
WHITESPACE@40..41 " "
|
||||
NAME@41..44
|
||||
IDENT@41..44 "bar"
|
||||
PARAM_LIST@44..46
|
||||
L_PAREN@44..45 "("
|
||||
R_PAREN@45..46 ")"
|
||||
WHITESPACE@46..47 " "
|
||||
BLOCK_EXPR@47..49
|
||||
L_CURLY@47..48 "{"
|
||||
R_CURLY@48..49 "}"
|
||||
WHITESPACE@49..50 "\n"
|
|
@ -1,2 +0,0 @@
|
|||
async unsafe fn foo() {}
|
||||
const unsafe fn bar() {}
|
|
@ -1,55 +0,0 @@
|
|||
SOURCE_FILE@0..69
|
||||
IMPL@0..68
|
||||
IMPL_KW@0..4 "impl"
|
||||
WHITESPACE@4..5 " "
|
||||
PATH_TYPE@5..6
|
||||
PATH@5..6
|
||||
PATH_SEGMENT@5..6
|
||||
NAME_REF@5..6
|
||||
IDENT@5..6 "T"
|
||||
WHITESPACE@6..7 " "
|
||||
FOR_KW@7..10 "for"
|
||||
WHITESPACE@10..11 " "
|
||||
PATH_TYPE@11..14
|
||||
PATH@11..14
|
||||
PATH_SEGMENT@11..14
|
||||
NAME_REF@11..14
|
||||
IDENT@11..14 "Foo"
|
||||
WHITESPACE@14..15 " "
|
||||
ASSOC_ITEM_LIST@15..68
|
||||
L_CURLY@15..16 "{"
|
||||
WHITESPACE@16..21 "\n "
|
||||
TYPE_ALIAS@21..42
|
||||
DEFAULT_KW@21..28 "default"
|
||||
WHITESPACE@28..29 " "
|
||||
TYPE_KW@29..33 "type"
|
||||
WHITESPACE@33..34 " "
|
||||
NAME@34..35
|
||||
IDENT@34..35 "T"
|
||||
WHITESPACE@35..36 " "
|
||||
EQ@36..37 "="
|
||||
WHITESPACE@37..38 " "
|
||||
PATH_TYPE@38..41
|
||||
PATH@38..41
|
||||
PATH_SEGMENT@38..41
|
||||
NAME_REF@38..41
|
||||
IDENT@38..41 "Bar"
|
||||
SEMICOLON@41..42 ";"
|
||||
WHITESPACE@42..47 "\n "
|
||||
FN@47..66
|
||||
DEFAULT_KW@47..54 "default"
|
||||
WHITESPACE@54..55 " "
|
||||
FN_KW@55..57 "fn"
|
||||
WHITESPACE@57..58 " "
|
||||
NAME@58..61
|
||||
IDENT@58..61 "foo"
|
||||
PARAM_LIST@61..63
|
||||
L_PAREN@61..62 "("
|
||||
R_PAREN@62..63 ")"
|
||||
WHITESPACE@63..64 " "
|
||||
BLOCK_EXPR@64..66
|
||||
L_CURLY@64..65 "{"
|
||||
R_CURLY@65..66 "}"
|
||||
WHITESPACE@66..67 "\n"
|
||||
R_CURLY@67..68 "}"
|
||||
WHITESPACE@68..69 "\n"
|
|
@ -1,4 +0,0 @@
|
|||
impl T for Foo {
|
||||
default type T = Bar;
|
||||
default fn foo() {}
|
||||
}
|
14
crates/ra_syntax/test_data/parser/inline/ok/0151_fn.rast
Normal file
14
crates/ra_syntax/test_data/parser/inline/ok/0151_fn.rast
Normal file
|
@ -0,0 +1,14 @@
|
|||
SOURCE_FILE@0..12
|
||||
FN@0..11
|
||||
FN_KW@0..2 "fn"
|
||||
WHITESPACE@2..3 " "
|
||||
NAME@3..6
|
||||
IDENT@3..6 "foo"
|
||||
PARAM_LIST@6..8
|
||||
L_PAREN@6..7 "("
|
||||
R_PAREN@7..8 ")"
|
||||
WHITESPACE@8..9 " "
|
||||
BLOCK_EXPR@9..11
|
||||
L_CURLY@9..10 "{"
|
||||
R_CURLY@10..11 "}"
|
||||
WHITESPACE@11..12 "\n"
|
1
crates/ra_syntax/test_data/parser/inline/ok/0151_fn.rs
Normal file
1
crates/ra_syntax/test_data/parser/inline/ok/0151_fn.rs
Normal file
|
@ -0,0 +1 @@
|
|||
fn foo() {}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue