mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
⬆️ rust-analyzer
This commit is contained in:
parent
bc45c7659a
commit
7e711da2f0
98 changed files with 1801 additions and 943 deletions
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -711,6 +711,7 @@ dependencies = [
|
|||
"limit",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"oorandom",
|
||||
"parser",
|
||||
"profile",
|
||||
"rayon",
|
||||
|
@ -932,9 +933,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.93.2"
|
||||
version = "0.94.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
|
||||
checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"serde",
|
||||
|
@ -1173,6 +1174,7 @@ dependencies = [
|
|||
"limit",
|
||||
"rustc-ap-rustc_lexer",
|
||||
"sourcegen",
|
||||
"stdx",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -74,3 +74,5 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
|||
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
# non-local crates
|
||||
smallvec = { version = "1.10.0", features = ["const_new", "union", "const_generics"] }
|
||||
|
|
|
@ -27,7 +27,7 @@ itertools = "0.10.5"
|
|||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
||||
once_cell = "1.17.0"
|
||||
rustc-hash = "1.1.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
tracing = "0.1.35"
|
||||
|
||||
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
|
||||
use base_db::CrateId;
|
||||
use cfg::CfgOptions;
|
||||
use either::Either;
|
||||
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
HirFileId, InFile,
|
||||
|
@ -24,12 +25,12 @@ use crate::{
|
|||
src::HasChildSource,
|
||||
src::HasSource,
|
||||
trace::Trace,
|
||||
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
|
||||
type_ref::TypeRef,
|
||||
visibility::RawVisibility,
|
||||
EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
|
||||
VariantId,
|
||||
};
|
||||
use cfg::CfgOptions;
|
||||
|
||||
/// Note that we use `StructData` for unions as well!
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
|
|
@ -19,7 +19,7 @@ use la_arena::{Arena, ArenaMap};
|
|||
use limit::Limit;
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstPtr, SyntaxNodePtr};
|
||||
use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
|
@ -51,7 +51,8 @@ pub struct Expander {
|
|||
def_map: Arc<DefMap>,
|
||||
current_file_id: HirFileId,
|
||||
module: LocalModuleId,
|
||||
recursion_limit: usize,
|
||||
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
||||
recursion_depth: usize,
|
||||
}
|
||||
|
||||
impl CfgExpander {
|
||||
|
@ -84,7 +85,7 @@ impl Expander {
|
|||
def_map,
|
||||
current_file_id,
|
||||
module: module.local_id,
|
||||
recursion_limit: 0,
|
||||
recursion_depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,31 +94,37 @@ impl Expander {
|
|||
db: &dyn DefDatabase,
|
||||
macro_call: ast::MacroCall,
|
||||
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
|
||||
if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() {
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return Ok(ExpandResult::only_err(ExpandError::Other(
|
||||
"reached recursion limit during macro expansion".into(),
|
||||
)));
|
||||
let mut unresolved_macro_err = None;
|
||||
|
||||
let result = self.within_limit(db, |this| {
|
||||
let macro_call = InFile::new(this.current_file_id, ¯o_call);
|
||||
|
||||
let resolver =
|
||||
|path| this.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
|
||||
|
||||
let mut err = None;
|
||||
let call_id = match macro_call.as_call_id_with_errors(
|
||||
db,
|
||||
this.def_map.krate(),
|
||||
resolver,
|
||||
&mut |e| {
|
||||
err.get_or_insert(e);
|
||||
},
|
||||
) {
|
||||
Ok(call_id) => call_id,
|
||||
Err(resolve_err) => {
|
||||
unresolved_macro_err = Some(resolve_err);
|
||||
return ExpandResult { value: None, err: None };
|
||||
}
|
||||
};
|
||||
ExpandResult { value: call_id.ok(), err }
|
||||
});
|
||||
|
||||
if let Some(err) = unresolved_macro_err {
|
||||
Err(err)
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
let macro_call = InFile::new(self.current_file_id, ¯o_call);
|
||||
|
||||
let resolver =
|
||||
|path| self.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
|
||||
|
||||
let mut err = None;
|
||||
let call_id =
|
||||
macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| {
|
||||
err.get_or_insert(e);
|
||||
})?;
|
||||
let call_id = match call_id {
|
||||
Ok(it) => it,
|
||||
Err(_) => {
|
||||
return Ok(ExpandResult { value: None, err });
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self.enter_expand_inner(db, call_id, err))
|
||||
}
|
||||
|
||||
pub fn enter_expand_id<T: ast::AstNode>(
|
||||
|
@ -125,15 +132,14 @@ impl Expander {
|
|||
db: &dyn DefDatabase,
|
||||
call_id: MacroCallId,
|
||||
) -> ExpandResult<Option<(Mark, T)>> {
|
||||
self.enter_expand_inner(db, call_id, None)
|
||||
self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
|
||||
}
|
||||
|
||||
fn enter_expand_inner<T: ast::AstNode>(
|
||||
&mut self,
|
||||
fn enter_expand_inner(
|
||||
db: &dyn DefDatabase,
|
||||
call_id: MacroCallId,
|
||||
mut err: Option<ExpandError>,
|
||||
) -> ExpandResult<Option<(Mark, T)>> {
|
||||
) -> ExpandResult<Option<(HirFileId, SyntaxNode)>> {
|
||||
if err.is_none() {
|
||||
err = db.macro_expand_error(call_id);
|
||||
}
|
||||
|
@ -154,29 +160,21 @@ impl Expander {
|
|||
}
|
||||
};
|
||||
|
||||
let node = match T::cast(raw_node) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
// This can happen without being an error, so only forward previous errors.
|
||||
return ExpandResult { value: None, err };
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!("macro expansion {:#?}", node.syntax());
|
||||
|
||||
self.recursion_limit += 1;
|
||||
let mark =
|
||||
Mark { file_id: self.current_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
self.current_file_id = file_id;
|
||||
|
||||
ExpandResult { value: Some((mark, node)), err }
|
||||
ExpandResult { value: Some((file_id, raw_node)), err }
|
||||
}
|
||||
|
||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||
self.current_file_id = mark.file_id;
|
||||
self.recursion_limit -= 1;
|
||||
if self.recursion_depth == usize::MAX {
|
||||
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
||||
// depth only when we get out of the tree.
|
||||
if !self.current_file_id.is_macro() {
|
||||
self.recursion_depth = 0;
|
||||
}
|
||||
} else {
|
||||
self.recursion_depth -= 1;
|
||||
}
|
||||
mark.bomb.defuse();
|
||||
}
|
||||
|
||||
|
@ -215,6 +213,50 @@ impl Expander {
|
|||
#[cfg(test)]
|
||||
return Limit::new(std::cmp::min(32, limit));
|
||||
}
|
||||
|
||||
fn within_limit<F, T: ast::AstNode>(
|
||||
&mut self,
|
||||
db: &dyn DefDatabase,
|
||||
op: F,
|
||||
) -> ExpandResult<Option<(Mark, T)>>
|
||||
where
|
||||
F: FnOnce(&mut Self) -> ExpandResult<Option<MacroCallId>>,
|
||||
{
|
||||
if self.recursion_depth == usize::MAX {
|
||||
// Recursion limit has been reached somewhere in the macro expansion tree. We should
|
||||
// stop expanding other macro calls in this tree, or else this may result in
|
||||
// exponential number of macro expansions, leading to a hang.
|
||||
//
|
||||
// The overflow error should have been reported when it occurred (see the next branch),
|
||||
// so don't return overflow error here to avoid diagnostics duplication.
|
||||
cov_mark::hit!(overflow_but_not_me);
|
||||
return ExpandResult::only_err(ExpandError::RecursionOverflowPosioned);
|
||||
} else if self.recursion_limit(db).check(self.recursion_depth + 1).is_err() {
|
||||
self.recursion_depth = usize::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::Other(
|
||||
"reached recursion limit during macro expansion".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let ExpandResult { value, err } = op(self);
|
||||
let Some(call_id) = value else {
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
Self::enter_expand_inner(db, call_id, err).map(|value| {
|
||||
value.and_then(|(new_file_id, node)| {
|
||||
let node = T::cast(node)?;
|
||||
|
||||
self.recursion_depth += 1;
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), new_file_id);
|
||||
let old_file_id = std::mem::replace(&mut self.current_file_id, new_file_id);
|
||||
let mark =
|
||||
Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||
Some((mark, node))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -624,6 +624,10 @@ impl ExprCollector<'_> {
|
|||
krate: *krate,
|
||||
});
|
||||
}
|
||||
Some(ExpandError::RecursionOverflowPosioned) => {
|
||||
// Recursion limit has been reached in the macro expansion tree, but not in
|
||||
// this very macro call. Don't add diagnostics to avoid duplication.
|
||||
}
|
||||
Some(err) => {
|
||||
self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
|
||||
node: InFile::new(outer_file, syntax_ptr),
|
||||
|
@ -636,6 +640,8 @@ impl ExprCollector<'_> {
|
|||
|
||||
match res.value {
|
||||
Some((mark, expansion)) => {
|
||||
// Keep collecting even with expansion errors so we can provide completions and
|
||||
// other services in incomplete macro expressions.
|
||||
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id);
|
||||
let prev_ast_id_map = mem::replace(
|
||||
&mut self.ast_id_map,
|
||||
|
|
|
@ -61,6 +61,19 @@ fn main() { n_nuple!(1,2,3); }
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn your_stack_belongs_to_me2() {
|
||||
cov_mark::check!(overflow_but_not_me);
|
||||
lower(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
() => {{ foo!(); foo!(); }}
|
||||
}
|
||||
fn main() { foo!(); }
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recursion_limit() {
|
||||
cov_mark::check!(your_stack_belongs_to_me);
|
||||
|
|
|
@ -659,15 +659,16 @@ fn desugar_future_path(orig: TypeRef) -> Path {
|
|||
let path = path![core::future::Future];
|
||||
let mut generic_args: Vec<_> =
|
||||
std::iter::repeat(None).take(path.segments().len() - 1).collect();
|
||||
let mut last = GenericArgs::empty();
|
||||
let binding = AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(orig),
|
||||
bounds: Vec::new(),
|
||||
bounds: Box::default(),
|
||||
};
|
||||
last.bindings.push(binding);
|
||||
generic_args.push(Some(Interned::new(last)));
|
||||
generic_args.push(Some(Interned::new(GenericArgs {
|
||||
bindings: Box::new([binding]),
|
||||
..GenericArgs::empty()
|
||||
})));
|
||||
|
||||
Path::from_known_path(path, generic_args)
|
||||
}
|
||||
|
|
|
@ -1476,7 +1476,7 @@ macro_rules! m {
|
|||
/* parse error: expected identifier */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected expression */
|
||||
/* parse error: expected expression, item or let statement */
|
||||
fn f() {
|
||||
K::(C("0"));
|
||||
}
|
||||
|
|
|
@ -830,8 +830,7 @@ macro_rules! rgb_color {
|
|||
/* parse error: expected COMMA */
|
||||
/* parse error: expected R_ANGLE */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected expression */
|
||||
/* parse error: expected expression, item or let statement */
|
||||
pub fn new() {
|
||||
let _ = 0as u32<<(8+8);
|
||||
}
|
||||
|
@ -848,21 +847,21 @@ pub fn new() {
|
|||
// BLOCK_EXPR@10..31
|
||||
// STMT_LIST@10..31
|
||||
// L_CURLY@10..11 "{"
|
||||
// LET_STMT@11..27
|
||||
// LET_STMT@11..28
|
||||
// LET_KW@11..14 "let"
|
||||
// WILDCARD_PAT@14..15
|
||||
// UNDERSCORE@14..15 "_"
|
||||
// EQ@15..16 "="
|
||||
// CAST_EXPR@16..27
|
||||
// CAST_EXPR@16..28
|
||||
// LITERAL@16..17
|
||||
// INT_NUMBER@16..17 "0"
|
||||
// AS_KW@17..19 "as"
|
||||
// PATH_TYPE@19..27
|
||||
// PATH@19..27
|
||||
// PATH_SEGMENT@19..27
|
||||
// PATH_TYPE@19..28
|
||||
// PATH@19..28
|
||||
// PATH_SEGMENT@19..28
|
||||
// NAME_REF@19..22
|
||||
// IDENT@19..22 "u32"
|
||||
// GENERIC_ARG_LIST@22..27
|
||||
// GENERIC_ARG_LIST@22..28
|
||||
// L_ANGLE@22..23 "<"
|
||||
// TYPE_ARG@23..27
|
||||
// DYN_TRAIT_TYPE@23..27
|
||||
|
@ -877,9 +876,9 @@ pub fn new() {
|
|||
// ERROR@25..26
|
||||
// INT_NUMBER@25..26 "8"
|
||||
// PLUS@26..27 "+"
|
||||
// EXPR_STMT@27..28
|
||||
// LITERAL@27..28
|
||||
// INT_NUMBER@27..28 "8"
|
||||
// CONST_ARG@27..28
|
||||
// LITERAL@27..28
|
||||
// INT_NUMBER@27..28 "8"
|
||||
// ERROR@28..29
|
||||
// R_PAREN@28..29 ")"
|
||||
// SEMICOLON@29..30 ";"
|
||||
|
|
|
@ -38,18 +38,18 @@ impl Display for ImportAlias {
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Path {
|
||||
/// Type based path like `<T>::foo`.
|
||||
/// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`.
|
||||
/// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`.
|
||||
type_anchor: Option<Interned<TypeRef>>,
|
||||
mod_path: Interned<ModPath>,
|
||||
/// Invariant: the same len as `self.mod_path.segments`
|
||||
generic_args: Box<[Option<Interned<GenericArgs>>]>,
|
||||
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
|
||||
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
|
||||
}
|
||||
|
||||
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
|
||||
/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct GenericArgs {
|
||||
pub args: Vec<GenericArg>,
|
||||
pub args: Box<[GenericArg]>,
|
||||
/// This specifies whether the args contain a Self type as the first
|
||||
/// element. This is the case for path segments like `<T as Trait>`, where
|
||||
/// `T` is actually a type parameter for the path `Trait` specifying the
|
||||
|
@ -57,7 +57,7 @@ pub struct GenericArgs {
|
|||
/// is left out.
|
||||
pub has_self_type: bool,
|
||||
/// Associated type bindings like in `Iterator<Item = T>`.
|
||||
pub bindings: Vec<AssociatedTypeBinding>,
|
||||
pub bindings: Box<[AssociatedTypeBinding]>,
|
||||
/// Whether these generic args were desugared from `Trait(Arg) -> Output`
|
||||
/// parenthesis notation typically used for the `Fn` traits.
|
||||
pub desugared_from_fn: bool,
|
||||
|
@ -77,7 +77,7 @@ pub struct AssociatedTypeBinding {
|
|||
/// Bounds for the associated type, like in `Iterator<Item:
|
||||
/// SomeOtherTrait>`. (This is the unstable `associated_type_bounds`
|
||||
/// feature.)
|
||||
pub bounds: Vec<Interned<TypeBound>>,
|
||||
pub bounds: Box<[Interned<TypeBound>]>,
|
||||
}
|
||||
|
||||
/// A single generic argument.
|
||||
|
@ -102,7 +102,7 @@ impl Path {
|
|||
) -> Path {
|
||||
let generic_args = generic_args.into();
|
||||
assert_eq!(path.len(), generic_args.len());
|
||||
Path { type_anchor: None, mod_path: Interned::new(path), generic_args }
|
||||
Path { type_anchor: None, mod_path: Interned::new(path), generic_args: Some(generic_args) }
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &PathKind {
|
||||
|
@ -114,7 +114,14 @@ impl Path {
|
|||
}
|
||||
|
||||
pub fn segments(&self) -> PathSegments<'_> {
|
||||
PathSegments { segments: self.mod_path.segments(), generic_args: &self.generic_args }
|
||||
let s = PathSegments {
|
||||
segments: self.mod_path.segments(),
|
||||
generic_args: self.generic_args.as_deref(),
|
||||
};
|
||||
if let Some(generic_args) = s.generic_args {
|
||||
assert_eq!(s.segments.len(), generic_args.len());
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
pub fn mod_path(&self) -> &ModPath {
|
||||
|
@ -131,13 +138,15 @@ impl Path {
|
|||
self.mod_path.kind,
|
||||
self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(),
|
||||
)),
|
||||
generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec().into(),
|
||||
generic_args: self.generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn is_self_type(&self) -> bool {
|
||||
self.type_anchor.is_none() && *self.generic_args == [None] && self.mod_path.is_Self()
|
||||
self.type_anchor.is_none()
|
||||
&& self.generic_args.as_deref().is_none()
|
||||
&& self.mod_path.is_Self()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,11 +158,11 @@ pub struct PathSegment<'a> {
|
|||
|
||||
pub struct PathSegments<'a> {
|
||||
segments: &'a [Name],
|
||||
generic_args: &'a [Option<Interned<GenericArgs>>],
|
||||
generic_args: Option<&'a [Option<Interned<GenericArgs>>]>,
|
||||
}
|
||||
|
||||
impl<'a> PathSegments<'a> {
|
||||
pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] };
|
||||
pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: None };
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
@ -167,26 +176,29 @@ impl<'a> PathSegments<'a> {
|
|||
self.get(self.len().checked_sub(1)?)
|
||||
}
|
||||
pub fn get(&self, idx: usize) -> Option<PathSegment<'a>> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
let res = PathSegment {
|
||||
name: self.segments.get(idx)?,
|
||||
args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it),
|
||||
args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_deref()),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
pub fn skip(&self, len: usize) -> PathSegments<'a> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] }
|
||||
PathSegments {
|
||||
segments: &self.segments.get(len..).unwrap_or(&[]),
|
||||
generic_args: self.generic_args.and_then(|it| it.get(len..)),
|
||||
}
|
||||
}
|
||||
pub fn take(&self, len: usize) -> PathSegments<'a> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] }
|
||||
PathSegments {
|
||||
segments: &self.segments.get(..len).unwrap_or(&self.segments),
|
||||
generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)),
|
||||
}
|
||||
}
|
||||
pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
|
||||
self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment {
|
||||
name,
|
||||
args_and_bindings: args.as_ref().map(|it| &**it),
|
||||
})
|
||||
self.segments
|
||||
.iter()
|
||||
.zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None)))
|
||||
.map(|(name, args)| PathSegment { name, args_and_bindings: args.as_deref() })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,9 +212,9 @@ impl GenericArgs {
|
|||
|
||||
pub(crate) fn empty() -> GenericArgs {
|
||||
GenericArgs {
|
||||
args: Vec::new(),
|
||||
args: Box::default(),
|
||||
has_self_type: false,
|
||||
bindings: Vec::new(),
|
||||
bindings: Box::default(),
|
||||
desugared_from_fn: false,
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +225,7 @@ impl From<Name> for Path {
|
|||
Path {
|
||||
type_anchor: None,
|
||||
mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
|
||||
generic_args: Box::new([None]),
|
||||
generic_args: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! Transforms syntax into `Path` objects, ideally with accounting for hygiene
|
||||
|
||||
use std::iter;
|
||||
|
||||
use crate::type_ref::ConstScalarOrPath;
|
||||
|
||||
use either::Either;
|
||||
|
@ -45,8 +47,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
)
|
||||
})
|
||||
.map(Interned::new);
|
||||
if let Some(_) = args {
|
||||
generic_args.resize(segments.len(), None);
|
||||
generic_args.push(args);
|
||||
}
|
||||
segments.push(name);
|
||||
generic_args.push(args)
|
||||
}
|
||||
Either::Right(crate_id) => {
|
||||
kind = PathKind::DollarCrate(crate_id);
|
||||
|
@ -56,7 +61,6 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
}
|
||||
ast::PathSegmentKind::SelfTypeKw => {
|
||||
segments.push(name![Self]);
|
||||
generic_args.push(None)
|
||||
}
|
||||
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
|
||||
assert!(path.qualifier().is_none()); // this can only occur at the first segment
|
||||
|
@ -77,18 +81,33 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
kind = mod_path.kind;
|
||||
|
||||
segments.extend(mod_path.segments().iter().cloned().rev());
|
||||
generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
|
||||
if let Some(path_generic_args) = path_generic_args {
|
||||
generic_args.resize(segments.len() - num_segments, None);
|
||||
generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
|
||||
} else {
|
||||
generic_args.resize(segments.len(), None);
|
||||
}
|
||||
|
||||
let self_type = GenericArg::Type(self_type);
|
||||
|
||||
// Insert the type reference (T in the above example) as Self parameter for the trait
|
||||
let last_segment =
|
||||
generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?;
|
||||
let mut args_inner = match last_segment {
|
||||
Some(it) => it.as_ref().clone(),
|
||||
None => GenericArgs::empty(),
|
||||
};
|
||||
args_inner.has_self_type = true;
|
||||
args_inner.args.insert(0, GenericArg::Type(self_type));
|
||||
*last_segment = Some(Interned::new(args_inner));
|
||||
let last_segment = generic_args.get_mut(segments.len() - num_segments)?;
|
||||
*last_segment = Some(Interned::new(match last_segment.take() {
|
||||
Some(it) => GenericArgs {
|
||||
args: iter::once(self_type)
|
||||
.chain(it.args.iter().cloned())
|
||||
.collect(),
|
||||
|
||||
has_self_type: true,
|
||||
bindings: it.bindings.clone(),
|
||||
desugared_from_fn: it.desugared_from_fn,
|
||||
},
|
||||
None => GenericArgs {
|
||||
args: Box::new([self_type]),
|
||||
has_self_type: true,
|
||||
..GenericArgs::empty()
|
||||
},
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +134,10 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
};
|
||||
}
|
||||
segments.reverse();
|
||||
generic_args.reverse();
|
||||
if !generic_args.is_empty() {
|
||||
generic_args.resize(segments.len(), None);
|
||||
generic_args.reverse();
|
||||
}
|
||||
|
||||
if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
|
||||
// plain empty paths don't exist, this means we got a single `self` segment as our path
|
||||
|
@ -135,7 +157,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
}
|
||||
|
||||
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
|
||||
return Some(Path { type_anchor, mod_path, generic_args: generic_args.into() });
|
||||
return Some(Path {
|
||||
type_anchor,
|
||||
mod_path,
|
||||
generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) },
|
||||
});
|
||||
|
||||
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
|
||||
if let Some(q) = path.qualifier() {
|
||||
|
@ -174,7 +200,7 @@ pub(super) fn lower_generic_args(
|
|||
.map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it)))
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
Box::default()
|
||||
};
|
||||
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
|
||||
}
|
||||
|
@ -195,7 +221,12 @@ pub(super) fn lower_generic_args(
|
|||
if args.is_empty() && bindings.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: false })
|
||||
Some(GenericArgs {
|
||||
args: args.into_boxed_slice(),
|
||||
has_self_type: false,
|
||||
bindings: bindings.into_boxed_slice(),
|
||||
desugared_from_fn: false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
|
||||
|
@ -205,33 +236,30 @@ fn lower_generic_args_from_fn_path(
|
|||
params: Option<ast::ParamList>,
|
||||
ret_type: Option<ast::RetType>,
|
||||
) -> Option<GenericArgs> {
|
||||
let mut args = Vec::new();
|
||||
let mut bindings = Vec::new();
|
||||
let params = params?;
|
||||
let mut param_types = Vec::new();
|
||||
for param in params.params() {
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
|
||||
param_types.push(type_ref);
|
||||
}
|
||||
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
|
||||
args.push(arg);
|
||||
if let Some(ret_type) = ret_type {
|
||||
let args = Box::new([GenericArg::Type(TypeRef::Tuple(param_types))]);
|
||||
let bindings = if let Some(ret_type) = ret_type {
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
|
||||
bindings.push(AssociatedTypeBinding {
|
||||
Box::new([AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(type_ref),
|
||||
bounds: Vec::new(),
|
||||
});
|
||||
bounds: Box::default(),
|
||||
}])
|
||||
} else {
|
||||
// -> ()
|
||||
let type_ref = TypeRef::Tuple(Vec::new());
|
||||
bindings.push(AssociatedTypeBinding {
|
||||
Box::new([AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(type_ref),
|
||||
bounds: Vec::new(),
|
||||
});
|
||||
}
|
||||
bounds: Box::default(),
|
||||
}])
|
||||
};
|
||||
Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true })
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) ->
|
|||
first = false;
|
||||
print_generic_arg(arg, buf)?;
|
||||
}
|
||||
for binding in &generics.bindings {
|
||||
for binding in generics.bindings.iter() {
|
||||
if !first {
|
||||
write!(buf, ", ")?;
|
||||
}
|
||||
|
|
|
@ -292,7 +292,7 @@ impl TypeRef {
|
|||
}
|
||||
for segment in path.segments().iter() {
|
||||
if let Some(args_and_bindings) = segment.args_and_bindings {
|
||||
for arg in &args_and_bindings.args {
|
||||
for arg in args_and_bindings.args.iter() {
|
||||
match arg {
|
||||
crate::path::GenericArg::Type(type_ref) => {
|
||||
go(type_ref, f);
|
||||
|
@ -301,11 +301,11 @@ impl TypeRef {
|
|||
| crate::path::GenericArg::Lifetime(_) => {}
|
||||
}
|
||||
}
|
||||
for binding in &args_and_bindings.bindings {
|
||||
for binding in args_and_bindings.bindings.iter() {
|
||||
if let Some(type_ref) = &binding.type_ref {
|
||||
go(type_ref, f);
|
||||
}
|
||||
for bound in &binding.bounds {
|
||||
for bound in binding.bounds.iter() {
|
||||
match bound.as_ref() {
|
||||
TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
|
||||
go_path(path, f)
|
||||
|
|
|
@ -21,7 +21,7 @@ itertools = "0.10.5"
|
|||
hashbrown = { version = "0.12.1", features = [
|
||||
"inline-more",
|
||||
], default-features = false }
|
||||
smallvec = { version = "1.10.0", features = ["const_new"] }
|
||||
smallvec.workspace = true
|
||||
|
||||
# local deps
|
||||
stdx.workspace = true
|
||||
|
|
|
@ -55,6 +55,7 @@ pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
|||
pub enum ExpandError {
|
||||
UnresolvedProcMacro(CrateId),
|
||||
Mbe(mbe::ExpandError),
|
||||
RecursionOverflowPosioned,
|
||||
Other(Box<str>),
|
||||
}
|
||||
|
||||
|
@ -69,6 +70,9 @@ impl fmt::Display for ExpandError {
|
|||
match self {
|
||||
ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
|
||||
ExpandError::Mbe(it) => it.fmt(f),
|
||||
ExpandError::RecursionOverflowPosioned => {
|
||||
f.write_str("overflow expanding the original macro")
|
||||
}
|
||||
ExpandError::Other(it) => f.write_str(it),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
itertools = "0.10.5"
|
||||
arrayvec = "0.7.2"
|
||||
bitflags = "1.3.2"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
ena = "0.14.0"
|
||||
tracing = "0.1.35"
|
||||
rustc-hash = "1.1.0"
|
||||
|
|
|
@ -1419,7 +1419,7 @@ impl HirDisplay for Path {
|
|||
|
||||
write!(f, "<")?;
|
||||
let mut first = true;
|
||||
for arg in &generic_args.args {
|
||||
for arg in generic_args.args.iter() {
|
||||
if first {
|
||||
first = false;
|
||||
if generic_args.has_self_type {
|
||||
|
@ -1431,7 +1431,7 @@ impl HirDisplay for Path {
|
|||
}
|
||||
arg.hir_fmt(f)?;
|
||||
}
|
||||
for binding in &generic_args.bindings {
|
||||
for binding in generic_args.bindings.iter() {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
|
@ -1445,7 +1445,7 @@ impl HirDisplay for Path {
|
|||
}
|
||||
None => {
|
||||
write!(f, ": ")?;
|
||||
f.write_joined(&binding.bounds, " + ")?;
|
||||
f.write_joined(binding.bounds.iter(), " + ")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1025,7 +1025,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
last_segment
|
||||
.into_iter()
|
||||
.filter_map(|segment| segment.args_and_bindings)
|
||||
.flat_map(|args_and_bindings| &args_and_bindings.bindings)
|
||||
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
|
||||
.flat_map(move |binding| {
|
||||
let found = associated_type_by_name_including_super_traits(
|
||||
self.db,
|
||||
|
@ -1068,7 +1068,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
}
|
||||
for bound in &binding.bounds {
|
||||
for bound in binding.bounds.iter() {
|
||||
preds.extend(self.lower_type_bound(
|
||||
bound,
|
||||
TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
|
||||
|
|
|
@ -16,7 +16,7 @@ rustc-hash = "1.1.0"
|
|||
either = "1.7.0"
|
||||
arrayvec = "0.7.2"
|
||||
itertools = "0.10.5"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
once_cell = "1.17.0"
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
|
||||
itertools = "0.10.5"
|
||||
either = "1.7.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
|
||||
# local deps
|
||||
stdx.workspace = true
|
||||
|
|
|
@ -180,7 +180,9 @@ pub(crate) fn generate_getter_impl(
|
|||
|
||||
// Insert `$0` only for last getter we generate
|
||||
if i == record_fields_count - 1 {
|
||||
getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
|
||||
if ctx.config.snippet_cap.is_some() {
|
||||
getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
|
||||
}
|
||||
}
|
||||
|
||||
// For first element we do not merge with '\n', as
|
||||
|
@ -330,7 +332,7 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<R
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||
use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -377,6 +379,49 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_from_field_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter_mut,
|
||||
r#"
|
||||
struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data_mut(&mut self) -> &mut Data {
|
||||
&mut self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_already_implemented() {
|
||||
check_assist_not_applicable(
|
||||
|
@ -433,6 +478,29 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_from_field_with_visibility_marker_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
pub(crate) struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
pub(crate) struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
pub(crate) fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_generate_getter() {
|
||||
check_assist(
|
||||
|
@ -468,6 +536,41 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_generate_getter_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
cou$0nt: usize,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
|
||||
fn count(&self) -> &usize {
|
||||
&self.count
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_a_special_case() {
|
||||
cov_mark::check_count!(convert_reference_type, 0);
|
||||
|
|
|
@ -33,6 +33,20 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
|
|||
assist_emit_must_use: false,
|
||||
};
|
||||
|
||||
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||
snippet_cap: None,
|
||||
allowed: None,
|
||||
insert_use: InsertUseConfig {
|
||||
granularity: ImportGranularity::Crate,
|
||||
prefix_kind: hir::PrefixKind::Plain,
|
||||
enforce_granularity: true,
|
||||
group: true,
|
||||
skip_glob_imports: true,
|
||||
},
|
||||
prefer_no_std: false,
|
||||
assist_emit_must_use: false,
|
||||
};
|
||||
|
||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
||||
RootDatabase::with_single_file(text)
|
||||
}
|
||||
|
@ -43,6 +57,22 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_
|
|||
check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_assist_no_snippet_cap(
|
||||
assist: Handler,
|
||||
ra_fixture_before: &str,
|
||||
ra_fixture_after: &str,
|
||||
) {
|
||||
let ra_fixture_after = trim_indent(ra_fixture_after);
|
||||
check_with_config(
|
||||
TEST_CONFIG_NO_SNIPPET_CAP,
|
||||
assist,
|
||||
ra_fixture_before,
|
||||
ExpectedResult::After(&ra_fixture_after),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
// There is no way to choose what assist within a group you want to test against,
|
||||
// so this is here to allow you choose.
|
||||
pub(crate) fn check_assist_by_label(
|
||||
|
@ -119,6 +149,17 @@ enum ExpectedResult<'a> {
|
|||
|
||||
#[track_caller]
|
||||
fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_label: Option<&str>) {
|
||||
check_with_config(TEST_CONFIG, handler, before, expected, assist_label);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_with_config(
|
||||
config: AssistConfig,
|
||||
handler: Handler,
|
||||
before: &str,
|
||||
expected: ExpectedResult<'_>,
|
||||
assist_label: Option<&str>,
|
||||
) {
|
||||
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
|
||||
db.set_enable_proc_attr_macros(true);
|
||||
let text_without_caret = db.file_text(file_with_caret_id).to_string();
|
||||
|
@ -126,7 +167,6 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la
|
|||
let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
|
||||
|
||||
let sema = Semantics::new(&db);
|
||||
let config = TEST_CONFIG;
|
||||
let ctx = AssistContext::new(sema, &config, frange);
|
||||
let resolve = match expected {
|
||||
ExpectedResult::Unresolved => AssistResolveStrategy::None,
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
itertools = "0.10.5"
|
||||
|
||||
once_cell = "1.17.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -747,4 +747,16 @@ fn main() {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_postfix_completions_in_if_block_that_has_an_else() {
|
||||
check(
|
||||
r#"
|
||||
fn test() {
|
||||
if true {}.$0 else {}
|
||||
}
|
||||
"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -571,28 +571,25 @@ impl<'a> CompletionContext<'a> {
|
|||
|
||||
// try to skip completions on path with invalid colons
|
||||
// this approach works in normal path and inside token tree
|
||||
match original_token.kind() {
|
||||
T![:] => {
|
||||
// return if no prev token before colon
|
||||
let prev_token = original_token.prev_token()?;
|
||||
if original_token.kind() == T![:] {
|
||||
// return if no prev token before colon
|
||||
let prev_token = original_token.prev_token()?;
|
||||
|
||||
// only has a single colon
|
||||
if prev_token.kind() != T![:] {
|
||||
return None;
|
||||
}
|
||||
|
||||
// has 3 colon or 2 coloncolon in a row
|
||||
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
|
||||
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
|
||||
if prev_token
|
||||
.prev_token()
|
||||
.map(|t| t.kind() == T![:] || t.kind() == T![::])
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
// only has a single colon
|
||||
if prev_token.kind() != T![:] {
|
||||
return None;
|
||||
}
|
||||
|
||||
// has 3 colon or 2 coloncolon in a row
|
||||
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
|
||||
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
|
||||
if prev_token
|
||||
.prev_token()
|
||||
.map(|t| t.kind() == T![:] || t.kind() == T![::])
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let AnalysisResult {
|
||||
|
|
|
@ -29,6 +29,7 @@ pub(super) struct AnalysisResult {
|
|||
pub(super) analysis: CompletionAnalysis,
|
||||
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
|
||||
pub(super) qualifier_ctx: QualifierCtx,
|
||||
/// the original token of the expanded file
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) offset: TextSize,
|
||||
}
|
||||
|
@ -213,15 +214,6 @@ fn analyze(
|
|||
let _p = profile::span("CompletionContext::analyze");
|
||||
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
|
||||
expansion_result;
|
||||
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
||||
if is_in_token_of_for_loop(syntax_element.clone()) {
|
||||
// for pat $0
|
||||
// there is nothing to complete here except `in` keyword
|
||||
// don't bother populating the context
|
||||
// FIXME: the completion calculations should end up good enough
|
||||
// such that this special case becomes unnecessary
|
||||
return None;
|
||||
}
|
||||
|
||||
// Overwrite the path kind for derives
|
||||
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
|
||||
|
@ -249,37 +241,35 @@ fn analyze(
|
|||
return None;
|
||||
}
|
||||
|
||||
let name_like = match find_node_at_offset(&speculative_file, offset) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
|
||||
CompletionAnalysis::String {
|
||||
original,
|
||||
expanded: ast::String::cast(self_token.clone()),
|
||||
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
|
||||
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
|
||||
CompletionAnalysis::String {
|
||||
original,
|
||||
expanded: ast::String::cast(self_token.clone()),
|
||||
}
|
||||
} else {
|
||||
// Fix up trailing whitespace problem
|
||||
// #[attr(foo = $0
|
||||
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
|
||||
let p = token.parent()?;
|
||||
if p.kind() == SyntaxKind::TOKEN_TREE
|
||||
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
||||
{
|
||||
let colon_prefix = previous_non_trivia_token(self_token.clone())
|
||||
.map_or(false, |it| T![:] == it.kind());
|
||||
CompletionAnalysis::UnexpandedAttrTT {
|
||||
fake_attribute_under_caret: fake_ident_token
|
||||
.parent_ancestors()
|
||||
.find_map(ast::Attr::cast),
|
||||
colon_prefix,
|
||||
}
|
||||
} else {
|
||||
// Fix up trailing whitespace problem
|
||||
// #[attr(foo = $0
|
||||
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
|
||||
let p = token.parent()?;
|
||||
if p.kind() == SyntaxKind::TOKEN_TREE
|
||||
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
||||
{
|
||||
let colon_prefix = previous_non_trivia_token(self_token.clone())
|
||||
.map_or(false, |it| T![:] == it.kind());
|
||||
CompletionAnalysis::UnexpandedAttrTT {
|
||||
fake_attribute_under_caret: syntax_element
|
||||
.ancestors()
|
||||
.find_map(ast::Attr::cast),
|
||||
colon_prefix,
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some((analysis, (None, None), QualifierCtx::default()));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some((analysis, (None, None), QualifierCtx::default()));
|
||||
};
|
||||
|
||||
let expected = expected_type_and_name(sema, self_token, &name_like);
|
||||
let mut qual_ctx = QualifierCtx::default();
|
||||
let analysis = match name_like {
|
||||
|
@ -290,6 +280,22 @@ fn analyze(
|
|||
let parent = name_ref.syntax().parent()?;
|
||||
let (nameref_ctx, qualifier_ctx) =
|
||||
classify_name_ref(sema, &original_file, name_ref, parent)?;
|
||||
|
||||
if let NameRefContext {
|
||||
kind:
|
||||
NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
|
||||
..
|
||||
} = &nameref_ctx
|
||||
{
|
||||
if is_in_token_of_for_loop(path) {
|
||||
// for pat $0
|
||||
// there is nothing to complete here except `in` keyword
|
||||
// don't bother populating the context
|
||||
// Ideally this special casing wouldn't be needed, but the parser recovers
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
qual_ctx = qualifier_ctx;
|
||||
CompletionAnalysis::NameRef(nameref_ctx)
|
||||
}
|
||||
|
@ -323,16 +329,14 @@ fn expected_type_and_name(
|
|||
ast::FieldExpr(e) => e
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.map_while(ast::FieldExpr::cast)
|
||||
.last()
|
||||
.map(|it| it.syntax().clone()),
|
||||
.take_while(|it| ast::FieldExpr::can_cast(it.kind()))
|
||||
.last(),
|
||||
ast::PathSegment(e) => e
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.skip(1)
|
||||
.take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
|
||||
.find_map(ast::PathExpr::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
.find(|it| ast::PathExpr::can_cast(it.kind())),
|
||||
_ => None
|
||||
}
|
||||
};
|
||||
|
@ -605,6 +609,18 @@ fn classify_name_ref(
|
|||
},
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let reciever_is_part_of_indivisible_expression = match &receiver {
|
||||
Some(ast::Expr::IfExpr(_)) => {
|
||||
let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
|
||||
next_token_kind == Some(SyntaxKind::ELSE_KW)
|
||||
},
|
||||
_ => false
|
||||
};
|
||||
if reciever_is_part_of_indivisible_expression {
|
||||
return None;
|
||||
}
|
||||
|
||||
let kind = NameRefKind::DotAccess(DotAccess {
|
||||
receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
|
||||
kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
|
||||
|
@ -656,8 +672,15 @@ fn classify_name_ref(
|
|||
};
|
||||
let after_if_expr = |node: SyntaxNode| {
|
||||
let prev_expr = (|| {
|
||||
let node = match node.parent().and_then(ast::ExprStmt::cast) {
|
||||
Some(stmt) => stmt.syntax().clone(),
|
||||
None => node,
|
||||
};
|
||||
let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
|
||||
ast::ExprStmt::cast(prev_sibling)?.expr()
|
||||
|
||||
ast::ExprStmt::cast(prev_sibling.clone())
|
||||
.and_then(|it| it.expr())
|
||||
.or_else(|| ast::Expr::cast(prev_sibling))
|
||||
})();
|
||||
matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
|
||||
};
|
||||
|
@ -1251,40 +1274,29 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
|
|||
Some((use_tree.path()?, true))
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
|
||||
fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
|
||||
// oh my ...
|
||||
(|| {
|
||||
let syntax_token = element.into_token()?;
|
||||
let range = syntax_token.text_range();
|
||||
let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
|
||||
|
||||
// check if the current token is the `in` token of a for loop
|
||||
if let Some(token) = for_expr.in_token() {
|
||||
return Some(syntax_token == token);
|
||||
let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
|
||||
let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
|
||||
if for_expr.in_token().is_some() {
|
||||
return Some(false);
|
||||
}
|
||||
let pat = for_expr.pat()?;
|
||||
if range.end() < pat.syntax().text_range().end() {
|
||||
// if we are inside or before the pattern we can't be at the `in` token position
|
||||
return None;
|
||||
}
|
||||
let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
|
||||
Some(match next_sibl {
|
||||
// the loop body is some node, if our token is at the start we are at the `in` position,
|
||||
// otherwise we could be in a recovered expression, we don't wanna ruin completions there
|
||||
syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
|
||||
// the loop body consists of a single token, if we are this we are certainly at the `in` token position
|
||||
syntax::NodeOrToken::Token(t) => t == syntax_token,
|
||||
syntax::NodeOrToken::Node(n) => {
|
||||
n.text_range().start() == path.syntax().text_range().start()
|
||||
}
|
||||
syntax::NodeOrToken::Token(t) => {
|
||||
t.text_range().start() == path.syntax().text_range().start()
|
||||
}
|
||||
})
|
||||
})()
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_for_is_prev2() {
|
||||
crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
|
||||
fn is_in_loop_body(node: &SyntaxNode) -> bool {
|
||||
node.ancestors()
|
||||
.take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
|
||||
.find_map(|it| {
|
||||
|
@ -1317,6 +1329,22 @@ fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken>
|
|||
None
|
||||
}
|
||||
|
||||
fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
|
||||
let mut token = match e.into() {
|
||||
SyntaxElement::Node(n) => n.last_token()?,
|
||||
SyntaxElement::Token(t) => t,
|
||||
}
|
||||
.next_token();
|
||||
while let Some(inner) = token {
|
||||
if !inner.kind().is_trivia() {
|
||||
return Some(inner);
|
||||
} else {
|
||||
token = inner.next_token();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
|
||||
let mut e = ele.next_sibling_or_token();
|
||||
while let Some(inner) = e {
|
||||
|
|
|
@ -14,13 +14,14 @@ use crate::{
|
|||
render::{render_path_resolution, RenderContext},
|
||||
};
|
||||
|
||||
/// `CompletionItem` describes a single completion variant in the editor pop-up.
|
||||
/// It is basically a POD with various properties. To construct a
|
||||
/// `CompletionItem`, use `new` method and the `Builder` struct.
|
||||
/// `CompletionItem` describes a single completion entity which expands to 1 or more entries in the
|
||||
/// editor pop-up. It is basically a POD with various properties. To construct a
|
||||
/// [`CompletionItem`], use [`Builder::new`] method and the [`Builder`] struct.
|
||||
#[derive(Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct CompletionItem {
|
||||
/// Label in the completion pop up which identifies completion.
|
||||
label: SmolStr,
|
||||
pub label: SmolStr,
|
||||
/// Range of identifier that is being completed.
|
||||
///
|
||||
/// It should be used primarily for UI, but we also use this to convert
|
||||
|
@ -29,33 +30,33 @@ pub struct CompletionItem {
|
|||
/// `source_range` must contain the completion offset. `text_edit` should
|
||||
/// start with what `source_range` points to, or VSCode will filter out the
|
||||
/// completion silently.
|
||||
source_range: TextRange,
|
||||
pub source_range: TextRange,
|
||||
/// What happens when user selects this item.
|
||||
///
|
||||
/// Typically, replaces `source_range` with new identifier.
|
||||
text_edit: TextEdit,
|
||||
is_snippet: bool,
|
||||
pub text_edit: TextEdit,
|
||||
pub is_snippet: bool,
|
||||
|
||||
/// What item (struct, function, etc) are we completing.
|
||||
kind: CompletionItemKind,
|
||||
pub kind: CompletionItemKind,
|
||||
|
||||
/// Lookup is used to check if completion item indeed can complete current
|
||||
/// ident.
|
||||
///
|
||||
/// That is, in `foo.bar$0` lookup of `abracadabra` will be accepted (it
|
||||
/// contains `bar` sub sequence), and `quux` will rejected.
|
||||
lookup: Option<SmolStr>,
|
||||
pub lookup: Option<SmolStr>,
|
||||
|
||||
/// Additional info to show in the UI pop up.
|
||||
detail: Option<String>,
|
||||
documentation: Option<Documentation>,
|
||||
pub detail: Option<String>,
|
||||
pub documentation: Option<Documentation>,
|
||||
|
||||
/// Whether this item is marked as deprecated
|
||||
deprecated: bool,
|
||||
pub deprecated: bool,
|
||||
|
||||
/// If completing a function call, ask the editor to show parameter popup
|
||||
/// after completion.
|
||||
trigger_call_info: bool,
|
||||
pub trigger_call_info: bool,
|
||||
|
||||
/// We use this to sort completion. Relevance records facts like "do the
|
||||
/// types align precisely?". We can't sort by relevances directly, they are
|
||||
|
@ -64,36 +65,39 @@ pub struct CompletionItem {
|
|||
/// Note that Relevance ignores fuzzy match score. We compute Relevance for
|
||||
/// all possible items, and then separately build an ordered completion list
|
||||
/// based on relevance and fuzzy matching with the already typed identifier.
|
||||
relevance: CompletionRelevance,
|
||||
pub relevance: CompletionRelevance,
|
||||
|
||||
/// Indicates that a reference or mutable reference to this variable is a
|
||||
/// possible match.
|
||||
ref_match: Option<(Mutability, TextSize)>,
|
||||
// FIXME: We shouldn't expose Mutability here (that is HIR types at all), its fine for now though
|
||||
// until we have more splitting completions in which case we should think about
|
||||
// generalizing this. See https://github.com/rust-lang/rust-analyzer/issues/12571
|
||||
pub ref_match: Option<(Mutability, TextSize)>,
|
||||
|
||||
/// The import data to add to completion's edits.
|
||||
import_to_add: SmallVec<[LocatedImport; 1]>,
|
||||
pub import_to_add: SmallVec<[LocatedImport; 1]>,
|
||||
}
|
||||
|
||||
// We use custom debug for CompletionItem to make snapshot tests more readable.
|
||||
impl fmt::Debug for CompletionItem {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut s = f.debug_struct("CompletionItem");
|
||||
s.field("label", &self.label()).field("source_range", &self.source_range());
|
||||
if self.text_edit().len() == 1 {
|
||||
let atom = &self.text_edit().iter().next().unwrap();
|
||||
s.field("label", &self.label).field("source_range", &self.source_range);
|
||||
if self.text_edit.len() == 1 {
|
||||
let atom = &self.text_edit.iter().next().unwrap();
|
||||
s.field("delete", &atom.delete);
|
||||
s.field("insert", &atom.insert);
|
||||
} else {
|
||||
s.field("text_edit", &self.text_edit);
|
||||
}
|
||||
s.field("kind", &self.kind());
|
||||
if self.lookup() != self.label() {
|
||||
s.field("kind", &self.kind);
|
||||
if self.lookup() != self.label {
|
||||
s.field("lookup", &self.lookup());
|
||||
}
|
||||
if let Some(detail) = self.detail() {
|
||||
if let Some(detail) = &self.detail {
|
||||
s.field("detail", &detail);
|
||||
}
|
||||
if let Some(documentation) = self.documentation() {
|
||||
if let Some(documentation) = &self.documentation {
|
||||
s.field("documentation", &documentation);
|
||||
}
|
||||
if self.deprecated {
|
||||
|
@ -351,63 +355,25 @@ impl CompletionItem {
|
|||
}
|
||||
}
|
||||
|
||||
/// What user sees in pop-up in the UI.
|
||||
pub fn label(&self) -> &str {
|
||||
&self.label
|
||||
}
|
||||
pub fn source_range(&self) -> TextRange {
|
||||
self.source_range
|
||||
}
|
||||
|
||||
pub fn text_edit(&self) -> &TextEdit {
|
||||
&self.text_edit
|
||||
}
|
||||
/// Whether `text_edit` is a snippet (contains `$0` markers).
|
||||
pub fn is_snippet(&self) -> bool {
|
||||
self.is_snippet
|
||||
}
|
||||
|
||||
/// Short one-line additional information, like a type
|
||||
pub fn detail(&self) -> Option<&str> {
|
||||
self.detail.as_deref()
|
||||
}
|
||||
/// A doc-comment
|
||||
pub fn documentation(&self) -> Option<Documentation> {
|
||||
self.documentation.clone()
|
||||
}
|
||||
/// What string is used for filtering.
|
||||
pub fn lookup(&self) -> &str {
|
||||
self.lookup.as_deref().unwrap_or(&self.label)
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> CompletionItemKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn deprecated(&self) -> bool {
|
||||
self.deprecated
|
||||
}
|
||||
|
||||
pub fn relevance(&self) -> CompletionRelevance {
|
||||
self.relevance
|
||||
}
|
||||
|
||||
pub fn trigger_call_info(&self) -> bool {
|
||||
self.trigger_call_info
|
||||
}
|
||||
|
||||
pub fn ref_match(&self) -> Option<(Mutability, TextSize, CompletionRelevance)> {
|
||||
pub fn ref_match(&self) -> Option<(String, text_edit::Indel, CompletionRelevance)> {
|
||||
// Relevance of the ref match should be the same as the original
|
||||
// match, but with exact type match set because self.ref_match
|
||||
// is only set if there is an exact type match.
|
||||
let mut relevance = self.relevance;
|
||||
relevance.type_match = Some(CompletionRelevanceTypeMatch::Exact);
|
||||
|
||||
self.ref_match.map(|(mutability, offset)| (mutability, offset, relevance))
|
||||
}
|
||||
|
||||
pub fn imports_to_add(&self) -> &[LocatedImport] {
|
||||
&self.import_to_add
|
||||
self.ref_match.map(|(mutability, offset)| {
|
||||
(
|
||||
format!("&{}{}", mutability.as_keyword_for_ref(), self.label),
|
||||
text_edit::Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
|
||||
relevance,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -156,13 +156,15 @@ pub fn completions(
|
|||
|
||||
// prevent `(` from triggering unwanted completion noise
|
||||
if trigger_character == Some('(') {
|
||||
if let CompletionAnalysis::NameRef(NameRefContext { kind, .. }) = &analysis {
|
||||
if let NameRefKind::Path(
|
||||
path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
|
||||
) = kind
|
||||
{
|
||||
completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
|
||||
}
|
||||
if let CompletionAnalysis::NameRef(NameRefContext {
|
||||
kind:
|
||||
NameRefKind::Path(
|
||||
path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
|
||||
),
|
||||
..
|
||||
}) = analysis
|
||||
{
|
||||
completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
|
||||
}
|
||||
return Some(completions.into());
|
||||
}
|
||||
|
@ -170,7 +172,7 @@ pub fn completions(
|
|||
{
|
||||
let acc = &mut completions;
|
||||
|
||||
match &analysis {
|
||||
match analysis {
|
||||
CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx),
|
||||
CompletionAnalysis::NameRef(name_ref_ctx) => {
|
||||
completions::complete_name_ref(acc, ctx, name_ref_ctx)
|
||||
|
|
|
@ -503,18 +503,18 @@ mod tests {
|
|||
#[track_caller]
|
||||
fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
|
||||
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
|
||||
actual.retain(|it| kinds.contains(&it.kind()));
|
||||
actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
|
||||
actual.retain(|it| kinds.contains(&it.kind));
|
||||
actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
|
||||
check_relevance_(actual, expect);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_relevance(ra_fixture: &str, expect: Expect) {
|
||||
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
|
||||
actual.retain(|it| it.kind() != CompletionItemKind::Snippet);
|
||||
actual.retain(|it| it.kind() != CompletionItemKind::Keyword);
|
||||
actual.retain(|it| it.kind() != CompletionItemKind::BuiltinType);
|
||||
actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
|
||||
actual.retain(|it| it.kind != CompletionItemKind::Snippet);
|
||||
actual.retain(|it| it.kind != CompletionItemKind::Keyword);
|
||||
actual.retain(|it| it.kind != CompletionItemKind::BuiltinType);
|
||||
actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
|
||||
check_relevance_(actual, expect);
|
||||
}
|
||||
|
||||
|
@ -525,12 +525,11 @@ mod tests {
|
|||
.flat_map(|it| {
|
||||
let mut items = vec![];
|
||||
|
||||
let tag = it.kind().tag();
|
||||
let relevance = display_relevance(it.relevance());
|
||||
items.push(format!("{tag} {} {relevance}\n", it.label()));
|
||||
let tag = it.kind.tag();
|
||||
let relevance = display_relevance(it.relevance);
|
||||
items.push(format!("{tag} {} {relevance}\n", it.label));
|
||||
|
||||
if let Some((mutability, _offset, relevance)) = it.ref_match() {
|
||||
let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label());
|
||||
if let Some((label, _indel, relevance)) = it.ref_match() {
|
||||
let relevance = display_relevance(relevance);
|
||||
|
||||
items.push(format!("{tag} {label} {relevance}\n"));
|
||||
|
@ -587,6 +586,7 @@ fn main() { Foo::Fo$0 }
|
|||
),
|
||||
lookup: "Foo{}",
|
||||
detail: "Foo { x: i32, y: i32 }",
|
||||
trigger_call_info: true,
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
|
@ -614,6 +614,7 @@ fn main() { Foo::Fo$0 }
|
|||
),
|
||||
lookup: "Foo()",
|
||||
detail: "Foo(i32, i32)",
|
||||
trigger_call_info: true,
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
|
@ -679,6 +680,7 @@ fn main() { Foo::Fo$0 }
|
|||
Variant,
|
||||
),
|
||||
detail: "Foo",
|
||||
trigger_call_info: true,
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
|
@ -745,6 +747,7 @@ fn main() { let _: m::Spam = S$0 }
|
|||
postfix_match: None,
|
||||
is_definite: false,
|
||||
},
|
||||
trigger_call_info: true,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "m::Spam::Foo",
|
||||
|
@ -770,6 +773,7 @@ fn main() { let _: m::Spam = S$0 }
|
|||
postfix_match: None,
|
||||
is_definite: false,
|
||||
},
|
||||
trigger_call_info: true,
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
|
@ -942,6 +946,7 @@ use self::E::*;
|
|||
documentation: Documentation(
|
||||
"variant docs",
|
||||
),
|
||||
trigger_call_info: true,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "E",
|
||||
|
|
|
@ -113,7 +113,7 @@ fn render(
|
|||
item.detail(rendered.detail);
|
||||
|
||||
match snippet_cap {
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal),
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal).trigger_call_info(),
|
||||
None => item.insert_text(rendered.literal),
|
||||
};
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ pub(crate) fn render_union_literal(
|
|||
.set_relevance(ctx.completion_relevance());
|
||||
|
||||
match ctx.snippet_cap() {
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, literal),
|
||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, literal).trigger_call_info(),
|
||||
None => item.insert_text(literal),
|
||||
};
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ mod type_pos;
|
|||
mod use_tree;
|
||||
mod visibility;
|
||||
|
||||
use hir::{db::DefDatabase, PrefixKind, Semantics};
|
||||
use hir::{db::DefDatabase, PrefixKind};
|
||||
use ide_db::{
|
||||
base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
|
@ -31,7 +31,6 @@ use ide_db::{
|
|||
};
|
||||
use itertools::Itertools;
|
||||
use stdx::{format_to, trim_indent};
|
||||
use syntax::{AstNode, NodeOrToken, SyntaxElement};
|
||||
use test_utils::assert_eq_text;
|
||||
|
||||
use crate::{
|
||||
|
@ -109,10 +108,10 @@ fn completion_list_with_config(
|
|||
let items = get_all_items(config, ra_fixture, trigger_character);
|
||||
let items = items
|
||||
.into_iter()
|
||||
.filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32")
|
||||
.filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword)
|
||||
.filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet)
|
||||
.sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned)))
|
||||
.filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32")
|
||||
.filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword)
|
||||
.filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet)
|
||||
.sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned)))
|
||||
.collect();
|
||||
render_completion_list(items)
|
||||
}
|
||||
|
@ -139,8 +138,8 @@ pub(crate) fn do_completion_with_config(
|
|||
) -> Vec<CompletionItem> {
|
||||
get_all_items(config, code, None)
|
||||
.into_iter()
|
||||
.filter(|c| c.kind() == kind)
|
||||
.sorted_by(|l, r| l.label().cmp(r.label()))
|
||||
.filter(|c| c.kind == kind)
|
||||
.sorted_by(|l, r| l.label.cmp(&r.label))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -149,18 +148,18 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
|
|||
s.chars().count()
|
||||
}
|
||||
let label_width =
|
||||
completions.iter().map(|it| monospace_width(it.label())).max().unwrap_or_default().min(22);
|
||||
completions.iter().map(|it| monospace_width(&it.label)).max().unwrap_or_default().min(22);
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|it| {
|
||||
let tag = it.kind().tag();
|
||||
let var_name = format!("{tag} {}", it.label());
|
||||
let tag = it.kind.tag();
|
||||
let var_name = format!("{tag} {}", it.label);
|
||||
let mut buf = var_name;
|
||||
if let Some(detail) = it.detail() {
|
||||
let width = label_width.saturating_sub(monospace_width(it.label()));
|
||||
if let Some(detail) = it.detail {
|
||||
let width = label_width.saturating_sub(monospace_width(&it.label));
|
||||
format_to!(buf, "{:width$} {}", "", detail, width = width);
|
||||
}
|
||||
if it.deprecated() {
|
||||
if it.deprecated {
|
||||
format_to!(buf, " DEPRECATED");
|
||||
}
|
||||
format_to!(buf, "\n");
|
||||
|
@ -192,13 +191,13 @@ pub(crate) fn check_edit_with_config(
|
|||
.unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
|
||||
let mut actual = db.file_text(position.file_id).to_string();
|
||||
|
||||
let mut combined_edit = completion.text_edit().to_owned();
|
||||
let mut combined_edit = completion.text_edit.clone();
|
||||
|
||||
resolve_completion_edits(
|
||||
&db,
|
||||
&config,
|
||||
position,
|
||||
completion.imports_to_add().iter().filter_map(|import_edit| {
|
||||
completion.import_to_add.iter().filter_map(|import_edit| {
|
||||
let import_path = &import_edit.import_path;
|
||||
let import_name = import_path.segments().last()?;
|
||||
Some((import_path.to_string(), import_name.to_string()))
|
||||
|
@ -216,15 +215,6 @@ pub(crate) fn check_edit_with_config(
|
|||
assert_eq_text!(&ra_fixture_after, &actual)
|
||||
}
|
||||
|
||||
pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) {
|
||||
let (db, pos) = position(code);
|
||||
|
||||
let sema = Semantics::new(&db);
|
||||
let original_file = sema.parse(pos.file_id);
|
||||
let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap();
|
||||
assert!(check(NodeOrToken::Token(token)));
|
||||
}
|
||||
|
||||
pub(crate) fn get_all_items(
|
||||
config: CompletionConfig,
|
||||
code: &str,
|
||||
|
@ -235,7 +225,7 @@ pub(crate) fn get_all_items(
|
|||
.map_or_else(Vec::default, Into::into);
|
||||
// validate
|
||||
res.iter().for_each(|it| {
|
||||
let sr = it.source_range();
|
||||
let sr = it.source_range;
|
||||
assert!(
|
||||
sr.contains_inclusive(position.offset),
|
||||
"source range {sr:?} does not contain the offset {:?} of the completion request: {it:?}",
|
||||
|
@ -246,8 +236,9 @@ pub(crate) fn get_all_items(
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_completions_required() {
|
||||
fn test_no_completions_in_for_loop_in_kw_pos() {
|
||||
assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new());
|
||||
assert_eq!(completion_list(r#"fn foo() { for i in$0 }"#), String::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -745,3 +745,255 @@ fn return_value_no_block() {
|
|||
r#"fn f() -> i32 { match () { () => return $0 } }"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn else_completion_after_if() {
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { if foo {} $0 }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw const
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw let
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { if foo {} el$0 }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw const
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw let
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { bar(if foo {} $0) }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw false
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw loop
|
||||
kw match
|
||||
kw return
|
||||
kw self::
|
||||
kw true
|
||||
kw unsafe
|
||||
kw while
|
||||
kw while let
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { bar(if foo {} el$0) }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw false
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw loop
|
||||
kw match
|
||||
kw return
|
||||
kw self::
|
||||
kw true
|
||||
kw unsafe
|
||||
kw while
|
||||
kw while let
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { if foo {} $0 let x = 92; }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw const
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw let
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { if foo {} el$0 let x = 92; }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw const
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw let
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
check_empty(
|
||||
r#"
|
||||
fn foo() { if foo {} el$0 { let x = 92; } }
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo() fn()
|
||||
bt u32
|
||||
kw const
|
||||
kw crate::
|
||||
kw else
|
||||
kw else if
|
||||
kw enum
|
||||
kw extern
|
||||
kw false
|
||||
kw fn
|
||||
kw for
|
||||
kw if
|
||||
kw if let
|
||||
kw impl
|
||||
kw let
|
||||
kw loop
|
||||
kw match
|
||||
kw mod
|
||||
kw return
|
||||
kw self::
|
||||
kw static
|
||||
kw struct
|
||||
kw trait
|
||||
kw true
|
||||
kw type
|
||||
kw union
|
||||
kw unsafe
|
||||
kw use
|
||||
kw while
|
||||
kw while let
|
||||
sn macro_rules
|
||||
sn pd
|
||||
sn ppd
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2,10 +2,17 @@
|
|||
|
||||
use expect_test::{expect, Expect};
|
||||
|
||||
use crate::tests::{check_edit, completion_list_no_kw, completion_list_with_trigger_character};
|
||||
use crate::tests::{
|
||||
check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
|
||||
};
|
||||
|
||||
fn check_no_kw(ra_fixture: &str, expect: Expect) {
|
||||
let actual = completion_list_no_kw(ra_fixture);
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
fn check(ra_fixture: &str, expect: Expect) {
|
||||
let actual = completion_list_no_kw(ra_fixture);
|
||||
let actual = completion_list(ra_fixture);
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
|
@ -59,7 +66,7 @@ fn _alpha() {}
|
|||
|
||||
#[test]
|
||||
fn completes_prelude() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /main.rs edition:2018 crate:main deps:std
|
||||
fn foo() { let x: $0 }
|
||||
|
@ -81,7 +88,7 @@ pub mod prelude {
|
|||
|
||||
#[test]
|
||||
fn completes_prelude_macros() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /main.rs edition:2018 crate:main deps:std
|
||||
fn f() {$0}
|
||||
|
@ -110,7 +117,7 @@ mod macros {
|
|||
|
||||
#[test]
|
||||
fn completes_std_prelude_if_core_is_defined() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:core,std
|
||||
fn foo() { let x: $0 }
|
||||
|
@ -140,7 +147,7 @@ pub mod prelude {
|
|||
|
||||
#[test]
|
||||
fn respects_doc_hidden() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /lib.rs crate:lib deps:std
|
||||
fn f() {
|
||||
|
@ -168,7 +175,7 @@ pub mod prelude {
|
|||
|
||||
#[test]
|
||||
fn respects_doc_hidden_in_assoc_item_list() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /lib.rs crate:lib deps:std
|
||||
struct S;
|
||||
|
@ -195,7 +202,7 @@ pub mod prelude {
|
|||
|
||||
#[test]
|
||||
fn associated_item_visibility() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /lib.rs crate:lib new_source_root:library
|
||||
pub struct S;
|
||||
|
@ -222,7 +229,7 @@ fn foo() { let _ = lib::S::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_union_associated_method() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
union U {};
|
||||
impl U { fn m() { } }
|
||||
|
@ -237,7 +244,7 @@ fn foo() { let _ = U::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_trait_associated_method_1() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Trait { fn m(); }
|
||||
|
||||
|
@ -251,7 +258,7 @@ fn foo() { let _ = Trait::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_trait_associated_method_2() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Trait { fn m(); }
|
||||
|
||||
|
@ -268,7 +275,7 @@ fn foo() { let _ = S::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_trait_associated_method_3() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Trait { fn m(); }
|
||||
|
||||
|
@ -285,7 +292,7 @@ fn foo() { let _ = <S as Trait>::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_ty_param_assoc_ty() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Super {
|
||||
type Ty;
|
||||
|
@ -318,7 +325,7 @@ fn foo<T: Sub>() { T::$0 }
|
|||
|
||||
#[test]
|
||||
fn completes_self_param_assoc_ty() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Super {
|
||||
type Ty;
|
||||
|
@ -358,7 +365,7 @@ impl<T> Sub for Wrap<T> {
|
|||
|
||||
#[test]
|
||||
fn completes_type_alias() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
struct S;
|
||||
impl S { fn foo() {} }
|
||||
|
@ -376,7 +383,7 @@ fn main() { T::$0; }
|
|||
|
||||
#[test]
|
||||
fn completes_qualified_macros() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
#[macro_export]
|
||||
macro_rules! foo { () => {} }
|
||||
|
@ -392,7 +399,7 @@ fn main() { let _ = crate::$0 }
|
|||
|
||||
#[test]
|
||||
fn does_not_complete_non_fn_macros() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
mod m {
|
||||
#[rustc_builtin_macro]
|
||||
|
@ -403,7 +410,7 @@ fn f() {m::$0}
|
|||
"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
mod m {
|
||||
#[rustc_builtin_macro]
|
||||
|
@ -418,7 +425,7 @@ fn f() {m::$0}
|
|||
|
||||
#[test]
|
||||
fn completes_reexported_items_under_correct_name() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
fn foo() { self::m::$0 }
|
||||
|
||||
|
@ -475,7 +482,7 @@ mod p {
|
|||
|
||||
#[test]
|
||||
fn completes_in_simple_macro_call() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
fn main() { m!(self::f$0); }
|
||||
|
@ -490,7 +497,7 @@ fn foo() {}
|
|||
|
||||
#[test]
|
||||
fn function_mod_share_name() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
fn foo() { self::m::$0 }
|
||||
|
||||
|
@ -508,7 +515,7 @@ mod m {
|
|||
|
||||
#[test]
|
||||
fn completes_hashmap_new() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
struct RandomState;
|
||||
struct HashMap<K, V, S = RandomState> {}
|
||||
|
@ -529,7 +536,7 @@ fn foo() {
|
|||
#[test]
|
||||
fn completes_variant_through_self() {
|
||||
cov_mark::check!(completes_variant_through_self);
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
enum Foo {
|
||||
Bar,
|
||||
|
@ -552,7 +559,7 @@ impl Foo {
|
|||
|
||||
#[test]
|
||||
fn completes_non_exhaustive_variant_within_the_defining_crate() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
enum Foo {
|
||||
#[non_exhaustive]
|
||||
|
@ -570,7 +577,7 @@ fn foo(self) {
|
|||
"#]],
|
||||
);
|
||||
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:e
|
||||
fn foo(self) {
|
||||
|
@ -593,7 +600,7 @@ enum Foo {
|
|||
#[test]
|
||||
fn completes_primitive_assoc_const() {
|
||||
cov_mark::check!(completes_primitive_assoc_const);
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /lib.rs crate:lib deps:core
|
||||
fn f() {
|
||||
|
@ -618,7 +625,7 @@ impl u8 {
|
|||
#[test]
|
||||
fn completes_variant_through_alias() {
|
||||
cov_mark::check!(completes_variant_through_alias);
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
enum Foo {
|
||||
Bar
|
||||
|
@ -636,7 +643,7 @@ fn main() {
|
|||
|
||||
#[test]
|
||||
fn respects_doc_hidden2() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
//- /lib.rs crate:lib deps:dep
|
||||
fn f() {
|
||||
|
@ -665,7 +672,7 @@ pub mod m {}
|
|||
|
||||
#[test]
|
||||
fn type_anchor_empty() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Foo {
|
||||
fn foo() -> Self;
|
||||
|
@ -688,7 +695,7 @@ fn bar() -> Bar {
|
|||
|
||||
#[test]
|
||||
fn type_anchor_type() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Foo {
|
||||
fn foo() -> Self;
|
||||
|
@ -715,7 +722,7 @@ fn bar() -> Bar {
|
|||
|
||||
#[test]
|
||||
fn type_anchor_type_trait() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
trait Foo {
|
||||
fn foo() -> Self;
|
||||
|
@ -741,7 +748,7 @@ fn bar() -> Bar {
|
|||
|
||||
#[test]
|
||||
fn completes_fn_in_pub_trait_generated_by_macro() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
mod other_mod {
|
||||
macro_rules! make_method {
|
||||
|
@ -775,7 +782,7 @@ fn main() {
|
|||
|
||||
#[test]
|
||||
fn completes_fn_in_pub_trait_generated_by_recursive_macro() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
mod other_mod {
|
||||
macro_rules! make_method {
|
||||
|
@ -815,7 +822,7 @@ fn main() {
|
|||
|
||||
#[test]
|
||||
fn completes_const_in_pub_trait_generated_by_macro() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
mod other_mod {
|
||||
macro_rules! make_const {
|
||||
|
@ -847,7 +854,7 @@ fn main() {
|
|||
|
||||
#[test]
|
||||
fn completes_locals_from_macros() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
|
||||
macro_rules! x {
|
||||
|
@ -875,7 +882,7 @@ fn main() {
|
|||
|
||||
#[test]
|
||||
fn regression_12644() {
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
macro_rules! __rust_force_expr {
|
||||
($e:expr) => {
|
||||
|
@ -974,7 +981,7 @@ fn foo { crate:::$0 }
|
|||
"#,
|
||||
expect![""],
|
||||
);
|
||||
check(
|
||||
check_no_kw(
|
||||
r#"
|
||||
fn foo { crate::::$0 }
|
||||
"#,
|
||||
|
|
|
@ -37,8 +37,9 @@ text-edit.workspace = true
|
|||
hir.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
xshell = "0.2.2"
|
||||
expect-test = "1.4.0"
|
||||
oorandom = "11.1.3"
|
||||
xshell = "0.2.2"
|
||||
|
||||
# local deps
|
||||
test-utils.workspace = true
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
use either::Either;
|
||||
use hir::{Semantics, Type};
|
||||
use parser::T;
|
||||
use syntax::{
|
||||
ast::{self, HasArgList, HasName},
|
||||
AstNode, SyntaxToken,
|
||||
match_ast, AstNode, NodeOrToken, SyntaxToken,
|
||||
};
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
@ -58,7 +59,7 @@ pub fn callable_for_node(
|
|||
calling_node: &ast::CallableExpr,
|
||||
token: &SyntaxToken,
|
||||
) -> Option<(hir::Callable, Option<usize>)> {
|
||||
let callable = match &calling_node {
|
||||
let callable = match calling_node {
|
||||
ast::CallableExpr::Call(call) => {
|
||||
let expr = call.expr()?;
|
||||
sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db)
|
||||
|
@ -66,13 +67,78 @@ pub fn callable_for_node(
|
|||
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
|
||||
}?;
|
||||
let active_param = if let Some(arg_list) = calling_node.arg_list() {
|
||||
let param = arg_list
|
||||
.args()
|
||||
.take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
|
||||
.count();
|
||||
Some(param)
|
||||
Some(
|
||||
arg_list
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.filter(|t| t.kind() == T![,])
|
||||
.take_while(|t| t.text_range().start() <= token.text_range().start())
|
||||
.count(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Some((callable, active_param))
|
||||
}
|
||||
|
||||
pub fn generic_def_for_node(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
generic_arg_list: &ast::GenericArgList,
|
||||
token: &SyntaxToken,
|
||||
) -> Option<(hir::GenericDef, usize, bool)> {
|
||||
let parent = generic_arg_list.syntax().parent()?;
|
||||
let def = match_ast! {
|
||||
match parent {
|
||||
ast::PathSegment(ps) => {
|
||||
let res = sema.resolve_path(&ps.parent_path())?;
|
||||
let generic_def: hir::GenericDef = match res {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Const(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Macro(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Module(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None,
|
||||
hir::PathResolution::BuiltinAttr(_)
|
||||
| hir::PathResolution::ToolModule(_)
|
||||
| hir::PathResolution::Local(_)
|
||||
| hir::PathResolution::TypeParam(_)
|
||||
| hir::PathResolution::ConstParam(_)
|
||||
| hir::PathResolution::SelfType(_)
|
||||
| hir::PathResolution::DeriveHelper(_) => return None,
|
||||
};
|
||||
|
||||
generic_def
|
||||
},
|
||||
ast::AssocTypeArg(_) => {
|
||||
// FIXME: We don't record the resolutions for this anywhere atm
|
||||
return None;
|
||||
},
|
||||
ast::MethodCallExpr(mcall) => {
|
||||
// recv.method::<$0>()
|
||||
let method = sema.resolve_method_call(&mcall)?;
|
||||
method.into()
|
||||
},
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
|
||||
let active_param = generic_arg_list
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.filter(|t| t.kind() == T![,])
|
||||
.take_while(|t| t.text_range().start() <= token.text_range().start())
|
||||
.count();
|
||||
|
||||
let first_arg_is_non_lifetime = generic_arg_list
|
||||
.generic_args()
|
||||
.next()
|
||||
.map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_)));
|
||||
|
||||
Some((def, active_param, first_arg_is_non_lifetime))
|
||||
}
|
||||
|
|
|
@ -7,20 +7,13 @@ use syntax::{TextRange, TextSize};
|
|||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct LineIndex {
|
||||
/// Offset the the beginning of each line, zero-based
|
||||
/// Offset the beginning of each line, zero-based.
|
||||
pub(crate) newlines: Vec<TextSize>,
|
||||
/// List of non-ASCII characters on each line
|
||||
pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineColUtf16 {
|
||||
/// Zero-based
|
||||
pub line: u32,
|
||||
/// Zero-based
|
||||
pub col: u32,
|
||||
/// List of non-ASCII characters on each line.
|
||||
pub(crate) line_wide_chars: NoHashHashMap<u32, Vec<WideChar>>,
|
||||
}
|
||||
|
||||
/// Line/Column information in native, utf8 format.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineCol {
|
||||
/// Zero-based
|
||||
|
@ -29,34 +22,57 @@ pub struct LineCol {
|
|||
pub col: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum WideEncoding {
|
||||
Utf16,
|
||||
Utf32,
|
||||
}
|
||||
|
||||
/// Line/Column information in legacy encodings.
|
||||
///
|
||||
/// Deliberately not a generic type and different from `LineCol`.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct WideLineCol {
|
||||
/// Zero-based
|
||||
pub line: u32,
|
||||
/// Zero-based
|
||||
pub col: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub(crate) struct Utf16Char {
|
||||
pub(crate) struct WideChar {
|
||||
/// Start offset of a character inside a line, zero-based
|
||||
pub(crate) start: TextSize,
|
||||
/// End offset of a character inside a line, zero-based
|
||||
pub(crate) end: TextSize,
|
||||
}
|
||||
|
||||
impl Utf16Char {
|
||||
impl WideChar {
|
||||
/// Returns the length in 8-bit UTF-8 code units.
|
||||
fn len(&self) -> TextSize {
|
||||
self.end - self.start
|
||||
}
|
||||
|
||||
/// Returns the length in 16-bit UTF-16 code units.
|
||||
fn len_utf16(&self) -> usize {
|
||||
if self.len() == TextSize::from(4) {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
/// Returns the length in UTF-16 or UTF-32 code units.
|
||||
fn wide_len(&self, enc: WideEncoding) -> usize {
|
||||
match enc {
|
||||
WideEncoding::Utf16 => {
|
||||
if self.len() == TextSize::from(4) {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
WideEncoding::Utf32 => 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> LineIndex {
|
||||
let mut utf16_lines = NoHashHashMap::default();
|
||||
let mut utf16_chars = Vec::new();
|
||||
let mut line_wide_chars = NoHashHashMap::default();
|
||||
let mut wide_chars = Vec::new();
|
||||
|
||||
let mut newlines = Vec::with_capacity(16);
|
||||
newlines.push(TextSize::from(0));
|
||||
|
@ -71,8 +87,8 @@ impl LineIndex {
|
|||
newlines.push(curr_row);
|
||||
|
||||
// Save any utf-16 characters seen in the previous line
|
||||
if !utf16_chars.is_empty() {
|
||||
utf16_lines.insert(line, mem::take(&mut utf16_chars));
|
||||
if !wide_chars.is_empty() {
|
||||
line_wide_chars.insert(line, mem::take(&mut wide_chars));
|
||||
}
|
||||
|
||||
// Prepare for processing the next line
|
||||
|
@ -82,18 +98,18 @@ impl LineIndex {
|
|||
}
|
||||
|
||||
if !c.is_ascii() {
|
||||
utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
|
||||
wide_chars.push(WideChar { start: curr_col, end: curr_col + c_len });
|
||||
}
|
||||
|
||||
curr_col += c_len;
|
||||
}
|
||||
|
||||
// Save any utf-16 characters seen in the last line
|
||||
if !utf16_chars.is_empty() {
|
||||
utf16_lines.insert(line, utf16_chars);
|
||||
if !wide_chars.is_empty() {
|
||||
line_wide_chars.insert(line, wide_chars);
|
||||
}
|
||||
|
||||
LineIndex { newlines, utf16_lines }
|
||||
LineIndex { newlines, line_wide_chars }
|
||||
}
|
||||
|
||||
pub fn line_col(&self, offset: TextSize) -> LineCol {
|
||||
|
@ -109,13 +125,13 @@ impl LineIndex {
|
|||
.map(|offset| offset + TextSize::from(line_col.col))
|
||||
}
|
||||
|
||||
pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
|
||||
let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
|
||||
LineColUtf16 { line: line_col.line, col: col as u32 }
|
||||
pub fn to_wide(&self, enc: WideEncoding, line_col: LineCol) -> WideLineCol {
|
||||
let col = self.utf8_to_wide_col(enc, line_col.line, line_col.col.into());
|
||||
WideLineCol { line: line_col.line, col: col as u32 }
|
||||
}
|
||||
|
||||
pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
|
||||
let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
|
||||
pub fn to_utf8(&self, enc: WideEncoding, line_col: WideLineCol) -> LineCol {
|
||||
let col = self.wide_to_utf8_col(enc, line_col.line, line_col.col);
|
||||
LineCol { line: line_col.line, col: col.into() }
|
||||
}
|
||||
|
||||
|
@ -132,12 +148,12 @@ impl LineIndex {
|
|||
.filter(|it| !it.is_empty())
|
||||
}
|
||||
|
||||
fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
|
||||
fn utf8_to_wide_col(&self, enc: WideEncoding, line: u32, col: TextSize) -> usize {
|
||||
let mut res: usize = col.into();
|
||||
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
|
||||
for c in utf16_chars {
|
||||
if let Some(wide_chars) = self.line_wide_chars.get(&line) {
|
||||
for c in wide_chars {
|
||||
if c.end <= col {
|
||||
res -= usize::from(c.len()) - c.len_utf16();
|
||||
res -= usize::from(c.len()) - c.wide_len(enc);
|
||||
} else {
|
||||
// From here on, all utf16 characters come *after* the character we are mapping,
|
||||
// so we don't need to take them into account
|
||||
|
@ -148,11 +164,11 @@ impl LineIndex {
|
|||
res
|
||||
}
|
||||
|
||||
fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
|
||||
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
|
||||
for c in utf16_chars {
|
||||
fn wide_to_utf8_col(&self, enc: WideEncoding, line: u32, mut col: u32) -> TextSize {
|
||||
if let Some(wide_chars) = self.line_wide_chars.get(&line) {
|
||||
for c in wide_chars {
|
||||
if col > u32::from(c.start) {
|
||||
col += u32::from(c.len()) - c.len_utf16() as u32;
|
||||
col += u32::from(c.len()) - c.wide_len(enc) as u32;
|
||||
} else {
|
||||
// From here on, all utf16 characters come *after* the character we are mapping,
|
||||
// so we don't need to take them into account
|
||||
|
@ -167,6 +183,9 @@ impl LineIndex {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::skip_slow_tests;
|
||||
|
||||
use super::WideEncoding::{Utf16, Utf32};
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
|
@ -210,67 +229,59 @@ mod tests {
|
|||
const C: char = 'x';
|
||||
",
|
||||
);
|
||||
assert_eq!(col_index.utf16_lines.len(), 0);
|
||||
assert_eq!(col_index.line_wide_chars.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_char() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = 'メ';
|
||||
",
|
||||
);
|
||||
fn test_every_chars() {
|
||||
if skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
let text: String = {
|
||||
let mut chars: Vec<char> = ((0 as char)..char::MAX).collect(); // Neat!
|
||||
chars.extend("\n".repeat(chars.len() / 16).chars());
|
||||
let mut rng = oorandom::Rand32::new(stdx::rand::seed());
|
||||
stdx::rand::shuffle(&mut chars, |i| rng.rand_range(0..i as u32) as usize);
|
||||
chars.into_iter().collect()
|
||||
};
|
||||
assert!(text.contains('💩')); // Sanity check.
|
||||
|
||||
// UTF-8 to UTF-16, no changes
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
let line_index = LineIndex::new(&text);
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
|
||||
let mut lin_col = LineCol { line: 0, col: 0 };
|
||||
let mut col_utf16 = 0;
|
||||
let mut col_utf32 = 0;
|
||||
for (offset, c) in text.char_indices() {
|
||||
let got_offset = line_index.offset(lin_col).unwrap();
|
||||
assert_eq!(usize::from(got_offset), offset);
|
||||
|
||||
// UTF-16 to UTF-8, no changes
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
let got_lin_col = line_index.line_col(got_offset);
|
||||
assert_eq!(got_lin_col, lin_col);
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
|
||||
for enc in [Utf16, Utf32] {
|
||||
let wide_lin_col = line_index.to_wide(enc, lin_col);
|
||||
let got_lin_col = line_index.to_utf8(enc, wide_lin_col);
|
||||
assert_eq!(got_lin_col, lin_col);
|
||||
|
||||
let col_index = LineIndex::new("a𐐏b");
|
||||
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
|
||||
}
|
||||
let want_col = match enc {
|
||||
Utf16 => col_utf16,
|
||||
Utf32 => col_utf32,
|
||||
};
|
||||
assert_eq!(wide_lin_col.col, want_col)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = \"メ メ\";
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 2);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
|
||||
|
||||
assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
|
||||
// メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
|
||||
|
||||
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
|
||||
if c == '\n' {
|
||||
lin_col.line += 1;
|
||||
lin_col.col = 0;
|
||||
col_utf16 = 0;
|
||||
col_utf32 = 0;
|
||||
} else {
|
||||
lin_col.col += c.len_utf8() as u32;
|
||||
col_utf16 += c.len_utf16() as u32;
|
||||
col_utf32 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -455,15 +455,21 @@ impl<'a> FindUsages<'a> {
|
|||
}
|
||||
|
||||
let find_nodes = move |name: &str, node: &syntax::SyntaxNode, offset: TextSize| {
|
||||
node.token_at_offset(offset).find(|it| it.text() == name).map(|token| {
|
||||
// FIXME: There should be optimization potential here
|
||||
// Currently we try to descend everything we find which
|
||||
// means we call `Semantics::descend_into_macros` on
|
||||
// every textual hit. That function is notoriously
|
||||
// expensive even for things that do not get down mapped
|
||||
// into macros.
|
||||
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent())
|
||||
})
|
||||
node.token_at_offset(offset)
|
||||
.find(|it| {
|
||||
// `name` is stripped of raw ident prefix. See the comment on name retrieval above.
|
||||
it.text().trim_start_matches("r#") == name
|
||||
})
|
||||
.into_iter()
|
||||
.flat_map(|token| {
|
||||
// FIXME: There should be optimization potential here
|
||||
// Currently we try to descend everything we find which
|
||||
// means we call `Semantics::descend_into_macros` on
|
||||
// every textual hit. That function is notoriously
|
||||
// expensive even for things that do not get down mapped
|
||||
// into macros.
|
||||
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent())
|
||||
})
|
||||
};
|
||||
|
||||
for (text, file_id, search_range) in scope_files(sema, &search_scope) {
|
||||
|
@ -471,30 +477,23 @@ impl<'a> FindUsages<'a> {
|
|||
|
||||
// Search for occurrences of the items name
|
||||
for offset in match_indices(&text, finder, search_range) {
|
||||
if let Some(iter) = find_nodes(name, &tree, offset) {
|
||||
for name in iter.filter_map(ast::NameLike::cast) {
|
||||
if match name {
|
||||
ast::NameLike::NameRef(name_ref) => {
|
||||
self.found_name_ref(&name_ref, sink)
|
||||
}
|
||||
ast::NameLike::Name(name) => self.found_name(&name, sink),
|
||||
ast::NameLike::Lifetime(lifetime) => {
|
||||
self.found_lifetime(&lifetime, sink)
|
||||
}
|
||||
} {
|
||||
return;
|
||||
}
|
||||
for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) {
|
||||
if match name {
|
||||
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
|
||||
ast::NameLike::Name(name) => self.found_name(&name, sink),
|
||||
ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink),
|
||||
} {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Search for occurrences of the `Self` referring to our type
|
||||
if let Some((self_ty, finder)) = &include_self_kw_refs {
|
||||
for offset in match_indices(&text, finder, search_range) {
|
||||
if let Some(iter) = find_nodes("Self", &tree, offset) {
|
||||
for name_ref in iter.filter_map(ast::NameRef::cast) {
|
||||
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
for name_ref in find_nodes("Self", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -513,21 +512,21 @@ impl<'a> FindUsages<'a> {
|
|||
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
|
||||
|
||||
for offset in match_indices(&text, finder, search_range) {
|
||||
if let Some(iter) = find_nodes("super", &tree, offset) {
|
||||
for name_ref in iter.filter_map(ast::NameRef::cast) {
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
for name_ref in
|
||||
find_nodes("super", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(finder) = &is_crate_root {
|
||||
for offset in match_indices(&text, finder, search_range) {
|
||||
if let Some(iter) = find_nodes("crate", &tree, offset) {
|
||||
for name_ref in iter.filter_map(ast::NameRef::cast) {
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
for name_ref in
|
||||
find_nodes("crate", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -566,11 +565,10 @@ impl<'a> FindUsages<'a> {
|
|||
let finder = &Finder::new("self");
|
||||
|
||||
for offset in match_indices(&text, finder, search_range) {
|
||||
if let Some(iter) = find_nodes("self", &tree, offset) {
|
||||
for name_ref in iter.filter_map(ast::NameRef::cast) {
|
||||
if self.found_self_module_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
for name_ref in find_nodes("self", &tree, offset).filter_map(ast::NameRef::cast)
|
||||
{
|
||||
if self.found_self_module_name_ref(&name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ pulldown-cmark-to-cmark = "10.0.4"
|
|||
pulldown-cmark = { version = "0.9.1", default-features = false }
|
||||
url = "2.3.1"
|
||||
dot = "0.1.4"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
|
||||
# local deps
|
||||
cfg.workspace = true
|
||||
|
|
|
@ -201,6 +201,23 @@ fn hover_simple(
|
|||
|
||||
Some(render::struct_rest_pat(sema, config, &record_pat))
|
||||
})
|
||||
})
|
||||
// try () call hovers
|
||||
.or_else(|| {
|
||||
descended().find_map(|token| {
|
||||
if token.kind() != T!['('] && token.kind() != T![')'] {
|
||||
return None;
|
||||
}
|
||||
let arg_list = token.parent().and_then(ast::ArgList::cast)?.syntax().parent()?;
|
||||
let call_expr = syntax::match_ast! {
|
||||
match arg_list {
|
||||
ast::CallExpr(expr) => expr.into(),
|
||||
ast::MethodCallExpr(expr) => expr.into(),
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
render::type_info_of(sema, config, &Either::Left(call_expr))
|
||||
})
|
||||
});
|
||||
|
||||
result.map(|mut res: HoverResult| {
|
||||
|
|
|
@ -5612,3 +5612,38 @@ fn main() {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_call_parens() {
|
||||
check(
|
||||
r#"
|
||||
fn foo() -> i32 {}
|
||||
fn main() {
|
||||
foo($0);
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*)*
|
||||
```rust
|
||||
i32
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
check(
|
||||
r#"
|
||||
struct S;
|
||||
impl S {
|
||||
fn foo(self) -> i32 {}
|
||||
}
|
||||
fn main() {
|
||||
S.foo($0);
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*)*
|
||||
```rust
|
||||
i32
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -29,8 +29,17 @@ pub(super) fn hints(
|
|||
_ => None,
|
||||
})
|
||||
.last();
|
||||
let range =
|
||||
outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range();
|
||||
let range = outer_paren_pat.as_ref().map_or_else(
|
||||
|| match pat {
|
||||
// for ident patterns that @ bind a name, render the un-ref patterns in front of the inner pattern
|
||||
// instead of the name as that makes it more clear and doesn't really change the outcome
|
||||
ast::Pat::IdentPat(it) => {
|
||||
it.pat().map_or_else(|| it.syntax().text_range(), |it| it.syntax().text_range())
|
||||
}
|
||||
it => it.syntax().text_range(),
|
||||
},
|
||||
|it| it.syntax().text_range(),
|
||||
);
|
||||
let pattern_adjustments = sema.pattern_adjustments(pat);
|
||||
pattern_adjustments.iter().for_each(|ty| {
|
||||
let reference = ty.is_reference();
|
||||
|
@ -123,4 +132,20 @@ fn __(
|
|||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hints_binding_modes_complex_ident_pat() {
|
||||
check_with_config(
|
||||
InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG },
|
||||
r#"
|
||||
struct Struct {
|
||||
field: &'static str,
|
||||
}
|
||||
fn foo(s @ Struct { field, .. }: &Struct) {}
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^ref
|
||||
//^^^^^^^^^^^^^^^^^^^^&
|
||||
//^^^^^ref
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ pub use ide_db::{
|
|||
SourceRoot, SourceRootId,
|
||||
},
|
||||
label::Label,
|
||||
line_index::{LineCol, LineColUtf16, LineIndex},
|
||||
line_index::{LineCol, LineIndex},
|
||||
search::{ReferenceCategory, SearchScope},
|
||||
source_change::{FileSystemEdit, SourceChange},
|
||||
symbol_index::Query,
|
||||
|
|
|
@ -2016,4 +2016,19 @@ fn method$0() {}
|
|||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_identifier() {
|
||||
check(
|
||||
r#"
|
||||
fn r#fn$0() {}
|
||||
fn main() { r#fn(); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
r#fn Function FileId(0) 0..12 3..7
|
||||
|
||||
FileId(0) 25..29
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1371,7 +1371,6 @@ pub fn baz() {}
|
|||
|
||||
#[test]
|
||||
fn test_rename_mod_from_raw_ident() {
|
||||
// FIXME: `r#fn` in path expression is not renamed.
|
||||
check_expect(
|
||||
"foo",
|
||||
r#"
|
||||
|
@ -1397,6 +1396,10 @@ pub fn baz() {}
|
|||
insert: "foo",
|
||||
delete: 4..8,
|
||||
},
|
||||
Indel {
|
||||
insert: "foo",
|
||||
delete: 23..27,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -18,7 +18,9 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
|
|||
let crate_graph = db.crate_graph();
|
||||
|
||||
let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>();
|
||||
shuffle(&mut shuffled_ids);
|
||||
|
||||
let mut rng = oorandom::Rand32::new(stdx::rand::seed());
|
||||
stdx::rand::shuffle(&mut shuffled_ids, |i| rng.rand_range(0..i as u32) as usize);
|
||||
|
||||
let mut new_graph = CrateGraph::default();
|
||||
|
||||
|
@ -52,21 +54,3 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
|
|||
|
||||
db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH);
|
||||
}
|
||||
|
||||
fn shuffle<T>(slice: &mut [T]) {
|
||||
let mut rng = oorandom::Rand32::new(seed());
|
||||
|
||||
let mut remaining = slice.len() - 1;
|
||||
while remaining > 0 {
|
||||
let index = rng.rand_range(0..remaining as u32);
|
||||
slice.swap(remaining, index as usize);
|
||||
remaining -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn seed() -> u64 {
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::hash::{BuildHasher, Hasher};
|
||||
|
||||
RandomState::new().build_hasher().finish()
|
||||
}
|
||||
|
|
|
@ -7,12 +7,16 @@ use either::Either;
|
|||
use hir::{
|
||||
AssocItem, GenericParam, HasAttrs, HirDisplay, ModuleDef, PathResolution, Semantics, Trait,
|
||||
};
|
||||
use ide_db::{active_parameter::callable_for_node, base_db::FilePosition, FxIndexMap};
|
||||
use ide_db::{
|
||||
active_parameter::{callable_for_node, generic_def_for_node},
|
||||
base_db::FilePosition,
|
||||
FxIndexMap,
|
||||
};
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
algo,
|
||||
ast::{self, HasArgList},
|
||||
match_ast, AstNode, Direction, SyntaxKind, SyntaxToken, TextRange, TextSize,
|
||||
match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
|
||||
};
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
@ -105,10 +109,10 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio
|
|||
// Stop at multi-line expressions, since the signature of the outer call is not very
|
||||
// helpful inside them.
|
||||
if let Some(expr) = ast::Expr::cast(node.clone()) {
|
||||
if expr.syntax().text().contains_char('\n')
|
||||
&& expr.syntax().kind() != SyntaxKind::RECORD_EXPR
|
||||
if !matches!(expr, ast::Expr::RecordExpr(..))
|
||||
&& expr.syntax().text().contains_char('\n')
|
||||
{
|
||||
return None;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -122,18 +126,16 @@ fn signature_help_for_call(
|
|||
token: SyntaxToken,
|
||||
) -> Option<SignatureHelp> {
|
||||
// Find the calling expression and its NameRef
|
||||
let mut node = arg_list.syntax().parent()?;
|
||||
let mut nodes = arg_list.syntax().ancestors().skip(1);
|
||||
let calling_node = loop {
|
||||
if let Some(callable) = ast::CallableExpr::cast(node.clone()) {
|
||||
if callable
|
||||
if let Some(callable) = ast::CallableExpr::cast(nodes.next()?) {
|
||||
let inside_callable = callable
|
||||
.arg_list()
|
||||
.map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
|
||||
{
|
||||
.map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()));
|
||||
if inside_callable {
|
||||
break callable;
|
||||
}
|
||||
}
|
||||
|
||||
node = node.parent()?;
|
||||
};
|
||||
|
||||
let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?;
|
||||
|
@ -216,59 +218,11 @@ fn signature_help_for_call(
|
|||
|
||||
fn signature_help_for_generics(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
garg_list: ast::GenericArgList,
|
||||
arg_list: ast::GenericArgList,
|
||||
token: SyntaxToken,
|
||||
) -> Option<SignatureHelp> {
|
||||
let arg_list = garg_list
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.filter_map(ast::GenericArgList::cast)
|
||||
.find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
|
||||
|
||||
let mut active_parameter = arg_list
|
||||
.generic_args()
|
||||
.take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
|
||||
.count();
|
||||
|
||||
let first_arg_is_non_lifetime = arg_list
|
||||
.generic_args()
|
||||
.next()
|
||||
.map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_)));
|
||||
|
||||
let mut generics_def = if let Some(path) =
|
||||
arg_list.syntax().ancestors().find_map(ast::Path::cast)
|
||||
{
|
||||
let res = sema.resolve_path(&path)?;
|
||||
let generic_def: hir::GenericDef = match res {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
|
||||
hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Const(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Macro(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Module(_))
|
||||
| hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None,
|
||||
hir::PathResolution::BuiltinAttr(_)
|
||||
| hir::PathResolution::ToolModule(_)
|
||||
| hir::PathResolution::Local(_)
|
||||
| hir::PathResolution::TypeParam(_)
|
||||
| hir::PathResolution::ConstParam(_)
|
||||
| hir::PathResolution::SelfType(_)
|
||||
| hir::PathResolution::DeriveHelper(_) => return None,
|
||||
};
|
||||
|
||||
generic_def
|
||||
} else if let Some(method_call) = arg_list.syntax().parent().and_then(ast::MethodCallExpr::cast)
|
||||
{
|
||||
// recv.method::<$0>()
|
||||
let method = sema.resolve_method_call(&method_call)?;
|
||||
method.into()
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let (mut generics_def, mut active_parameter, first_arg_is_non_lifetime) =
|
||||
generic_def_for_node(sema, &arg_list, &token)?;
|
||||
let mut res = SignatureHelp {
|
||||
doc: None,
|
||||
signature: String::new(),
|
||||
|
@ -307,9 +261,9 @@ fn signature_help_for_generics(
|
|||
// eg. `None::<u8>`
|
||||
// We'll use the signature of the enum, but include the docs of the variant.
|
||||
res.doc = it.docs(db).map(|it| it.into());
|
||||
let it = it.parent_enum(db);
|
||||
format_to!(res.signature, "enum {}", it.name(db));
|
||||
generics_def = it.into();
|
||||
let enum_ = it.parent_enum(db);
|
||||
format_to!(res.signature, "enum {}", enum_.name(db));
|
||||
generics_def = enum_.into();
|
||||
}
|
||||
// These don't have generic args that can be specified
|
||||
hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None,
|
||||
|
@ -388,16 +342,13 @@ fn signature_help_for_record_lit(
|
|||
record: ast::RecordExpr,
|
||||
token: SyntaxToken,
|
||||
) -> Option<SignatureHelp> {
|
||||
let arg_list = record
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.filter_map(ast::RecordExpr::cast)
|
||||
.find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
|
||||
|
||||
let active_parameter = arg_list
|
||||
let active_parameter = record
|
||||
.record_expr_field_list()?
|
||||
.fields()
|
||||
.take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(syntax::NodeOrToken::into_token)
|
||||
.filter(|t| t.kind() == syntax::T![,])
|
||||
.take_while(|t| t.text_range().start() <= token.text_range().start())
|
||||
.count();
|
||||
|
||||
let mut res = SignatureHelp {
|
||||
|
@ -1594,4 +1545,27 @@ impl S {
|
|||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enum_in_nested_method_in_lambda() {
|
||||
check(
|
||||
r#"
|
||||
enum A {
|
||||
A,
|
||||
B
|
||||
}
|
||||
|
||||
fn bar(_: A) { }
|
||||
|
||||
fn main() {
|
||||
let foo = Foo;
|
||||
std::thread::spawn(move || { bar(A:$0) } );
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn bar(_: A)
|
||||
^^^^
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1126,5 +1126,5 @@ fn benchmark_syntax_highlighting_parser() {
|
|||
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
|
||||
.count()
|
||||
};
|
||||
assert_eq!(hash, 1609);
|
||||
assert_eq!(hash, 1608);
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ doctest = false
|
|||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
rustc-hash = "1.1.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
tracing = "0.1.35"
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -20,4 +20,5 @@ limit.workspace = true
|
|||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
|
||||
stdx.workspace = true
|
||||
sourcegen.workspace = true
|
||||
|
|
|
@ -200,6 +200,8 @@ impl BlockLike {
|
|||
}
|
||||
}
|
||||
|
||||
const VISIBILITY_FIRST: TokenSet = TokenSet::new(&[T![pub], T![crate]]);
|
||||
|
||||
fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
|
||||
match p.current() {
|
||||
T![pub] => {
|
||||
|
@ -340,3 +342,31 @@ fn error_block(p: &mut Parser<'_>, message: &str) {
|
|||
p.eat(T!['}']);
|
||||
m.complete(p, ERROR);
|
||||
}
|
||||
|
||||
/// The `parser` passed this is required to at least consume one token if it returns `true`.
|
||||
/// If the `parser` returns false, parsing will stop.
|
||||
fn delimited(
|
||||
p: &mut Parser<'_>,
|
||||
bra: SyntaxKind,
|
||||
ket: SyntaxKind,
|
||||
delim: SyntaxKind,
|
||||
first_set: TokenSet,
|
||||
mut parser: impl FnMut(&mut Parser<'_>) -> bool,
|
||||
) {
|
||||
p.bump(bra);
|
||||
while !p.at(ket) && !p.at(EOF) {
|
||||
if !parser(p) {
|
||||
break;
|
||||
}
|
||||
if !p.at(delim) {
|
||||
if p.at_ts(first_set) {
|
||||
p.error(format!("expected {:?}", delim));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
p.bump(delim);
|
||||
}
|
||||
}
|
||||
p.expect(ket);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const ATTRIBUTE_FIRST: TokenSet = TokenSet::new(&[T![#]]);
|
||||
|
||||
pub(super) fn inner_attrs(p: &mut Parser<'_>) {
|
||||
while p.at(T![#]) && p.nth(1) == T![!] {
|
||||
attr(p, true);
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
mod atom;
|
||||
|
||||
use crate::grammar::attributes::ATTRIBUTE_FIRST;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(crate) use self::atom::{block_expr, match_arm_list};
|
||||
|
@ -68,6 +70,12 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
|
|||
Err(m) => m,
|
||||
};
|
||||
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.err_and_bump("expected expression, item or let statement");
|
||||
m.abandon(p);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) {
|
||||
if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) {
|
||||
// test no_semi_after_block
|
||||
|
@ -227,6 +235,12 @@ fn expr_bp(
|
|||
attributes::outer_attrs(p);
|
||||
m
|
||||
});
|
||||
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.err_recover("expected expression", atom::EXPR_RECOVERY_SET);
|
||||
m.abandon(p);
|
||||
return None;
|
||||
}
|
||||
let mut lhs = match lhs(p, r) {
|
||||
Some((lhs, blocklike)) => {
|
||||
let lhs = lhs.extend_to(p, m);
|
||||
|
@ -551,23 +565,20 @@ fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
|
|||
m.complete(p, CAST_EXPR)
|
||||
}
|
||||
|
||||
// test_err arg_list_recovery
|
||||
// fn main() {
|
||||
// foo(bar::);
|
||||
// foo(bar:);
|
||||
// foo(bar+);
|
||||
// }
|
||||
fn arg_list(p: &mut Parser<'_>) {
|
||||
assert!(p.at(T!['(']));
|
||||
let m = p.start();
|
||||
p.bump(T!['(']);
|
||||
while !p.at(T![')']) && !p.at(EOF) {
|
||||
// test arg_with_attr
|
||||
// fn main() {
|
||||
// foo(#[attr] 92)
|
||||
// }
|
||||
if !expr(p) {
|
||||
break;
|
||||
}
|
||||
if !p.at(T![')']) && !p.expect(T![,]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.eat(T![')']);
|
||||
// test arg_with_attr
|
||||
// fn main() {
|
||||
// foo(#[attr] 92)
|
||||
// }
|
||||
delimited(p, T!['('], T![')'], T![,], EXPR_FIRST.union(ATTRIBUTE_FIRST), expr);
|
||||
m.complete(p, ARG_LIST);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,26 +40,28 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
|||
T!['{'],
|
||||
T!['['],
|
||||
T![|],
|
||||
T![move],
|
||||
T![box],
|
||||
T![if],
|
||||
T![while],
|
||||
T![match],
|
||||
T![unsafe],
|
||||
T![return],
|
||||
T![yield],
|
||||
T![do],
|
||||
T![break],
|
||||
T![continue],
|
||||
T![async],
|
||||
T![try],
|
||||
T![box],
|
||||
T![break],
|
||||
T![const],
|
||||
T![loop],
|
||||
T![continue],
|
||||
T![do],
|
||||
T![for],
|
||||
T![if],
|
||||
T![let],
|
||||
T![loop],
|
||||
T![match],
|
||||
T![move],
|
||||
T![return],
|
||||
T![static],
|
||||
T![try],
|
||||
T![unsafe],
|
||||
T![while],
|
||||
T![yield],
|
||||
LIFETIME_IDENT,
|
||||
]));
|
||||
|
||||
const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
|
||||
pub(super) const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![')'], T![']']]);
|
||||
|
||||
pub(super) fn atom_expr(
|
||||
p: &mut Parser<'_>,
|
||||
|
@ -116,7 +118,7 @@ pub(super) fn atom_expr(
|
|||
// fn main() {
|
||||
// 'loop: impl
|
||||
// }
|
||||
p.error("expected a loop");
|
||||
p.error("expected a loop or block");
|
||||
m.complete(p, ERROR);
|
||||
return None;
|
||||
}
|
||||
|
@ -157,7 +159,7 @@ pub(super) fn atom_expr(
|
|||
T![for] => for_expr(p, None),
|
||||
|
||||
_ => {
|
||||
p.err_recover("expected expression", EXPR_RECOVERY_SET);
|
||||
p.err_and_bump("expected expression");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -5,27 +5,35 @@ pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: boo
|
|||
if p.at(T![::]) && p.nth(2) == T![<] {
|
||||
m = p.start();
|
||||
p.bump(T![::]);
|
||||
p.bump(T![<]);
|
||||
} else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] {
|
||||
m = p.start();
|
||||
p.bump(T![<]);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
while !p.at(EOF) && !p.at(T![>]) {
|
||||
generic_arg(p);
|
||||
if !p.at(T![>]) && !p.expect(T![,]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.expect(T![>]);
|
||||
delimited(p, T![<], T![>], T![,], GENERIC_ARG_FIRST, generic_arg);
|
||||
m.complete(p, GENERIC_ARG_LIST);
|
||||
}
|
||||
|
||||
const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[
|
||||
LIFETIME_IDENT,
|
||||
IDENT,
|
||||
T!['{'],
|
||||
T![true],
|
||||
T![false],
|
||||
T![-],
|
||||
INT_NUMBER,
|
||||
FLOAT_NUMBER,
|
||||
CHAR,
|
||||
BYTE,
|
||||
STRING,
|
||||
BYTE_STRING,
|
||||
])
|
||||
.union(types::TYPE_FIRST);
|
||||
|
||||
// test generic_arg
|
||||
// type T = S<i32>;
|
||||
fn generic_arg(p: &mut Parser<'_>) {
|
||||
fn generic_arg(p: &mut Parser<'_>) -> bool {
|
||||
match p.current() {
|
||||
LIFETIME_IDENT => lifetime_arg(p),
|
||||
T!['{'] | T![true] | T![false] | T![-] => const_arg(p),
|
||||
|
@ -68,8 +76,10 @@ fn generic_arg(p: &mut Parser<'_>) {
|
|||
}
|
||||
}
|
||||
}
|
||||
_ => type_arg(p),
|
||||
_ if p.at_ts(types::TYPE_FIRST) => type_arg(p),
|
||||
_ => return false,
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// test lifetime_arg
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use crate::grammar::attributes::ATTRIBUTE_FIRST;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) {
|
||||
|
@ -11,32 +13,31 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) {
|
|||
fn generic_param_list(p: &mut Parser<'_>) {
|
||||
assert!(p.at(T![<]));
|
||||
let m = p.start();
|
||||
p.bump(T![<]);
|
||||
delimited(p, T![<], T![>], T![,], GENERIC_PARAM_FIRST.union(ATTRIBUTE_FIRST), |p| {
|
||||
// test generic_param_attribute
|
||||
// fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
|
||||
let m = p.start();
|
||||
attributes::outer_attrs(p);
|
||||
generic_param(p, m)
|
||||
});
|
||||
|
||||
while !p.at(EOF) && !p.at(T![>]) {
|
||||
generic_param(p);
|
||||
if !p.at(T![>]) && !p.expect(T![,]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.expect(T![>]);
|
||||
m.complete(p, GENERIC_PARAM_LIST);
|
||||
}
|
||||
|
||||
fn generic_param(p: &mut Parser<'_>) {
|
||||
let m = p.start();
|
||||
// test generic_param_attribute
|
||||
// fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
|
||||
attributes::outer_attrs(p);
|
||||
const GENERIC_PARAM_FIRST: TokenSet = TokenSet::new(&[IDENT, LIFETIME_IDENT, T![const]]);
|
||||
|
||||
fn generic_param(p: &mut Parser<'_>, m: Marker) -> bool {
|
||||
match p.current() {
|
||||
LIFETIME_IDENT => lifetime_param(p, m),
|
||||
IDENT => type_param(p, m),
|
||||
T![const] => const_param(p, m),
|
||||
_ => {
|
||||
m.abandon(p);
|
||||
p.err_and_bump("expected type parameter");
|
||||
p.err_and_bump("expected generic parameter");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// test lifetime_param
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use crate::grammar::attributes::ATTRIBUTE_FIRST;
|
||||
|
||||
use super::*;
|
||||
|
||||
// test struct_item
|
||||
|
@ -141,28 +143,31 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) {
|
|||
}
|
||||
}
|
||||
|
||||
const TUPLE_FIELD_FIRST: TokenSet =
|
||||
types::TYPE_FIRST.union(ATTRIBUTE_FIRST).union(VISIBILITY_FIRST);
|
||||
|
||||
fn tuple_field_list(p: &mut Parser<'_>) {
|
||||
assert!(p.at(T!['(']));
|
||||
let m = p.start();
|
||||
p.bump(T!['(']);
|
||||
while !p.at(T![')']) && !p.at(EOF) {
|
||||
delimited(p, T!['('], T![')'], T![,], TUPLE_FIELD_FIRST, |p| {
|
||||
let m = p.start();
|
||||
// test tuple_field_attrs
|
||||
// struct S (#[attr] f32);
|
||||
attributes::outer_attrs(p);
|
||||
opt_visibility(p, true);
|
||||
let has_vis = opt_visibility(p, true);
|
||||
if !p.at_ts(types::TYPE_FIRST) {
|
||||
p.error("expected a type");
|
||||
m.complete(p, ERROR);
|
||||
break;
|
||||
if has_vis {
|
||||
m.complete(p, ERROR);
|
||||
} else {
|
||||
m.abandon(p);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
types::type_(p);
|
||||
m.complete(p, TUPLE_FIELD);
|
||||
true
|
||||
});
|
||||
|
||||
if !p.at(T![')']) {
|
||||
p.expect(T![,]);
|
||||
}
|
||||
}
|
||||
p.expect(T![')']);
|
||||
m.complete(p, TUPLE_FIELD_LIST);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use crate::grammar::attributes::ATTRIBUTE_FIRST;
|
||||
|
||||
use super::*;
|
||||
|
||||
// test param_list
|
||||
|
@ -66,14 +68,20 @@ fn list_(p: &mut Parser<'_>, flavor: Flavor) {
|
|||
}
|
||||
};
|
||||
|
||||
if !p.at_ts(PARAM_FIRST) {
|
||||
if !p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) {
|
||||
p.error("expected value parameter");
|
||||
m.abandon(p);
|
||||
break;
|
||||
}
|
||||
param(p, m, flavor);
|
||||
if !p.at(ket) {
|
||||
p.expect(T![,]);
|
||||
if !p.at(T![,]) {
|
||||
if p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) {
|
||||
p.error("expected `,`");
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
p.bump(T![,]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -67,6 +67,10 @@ fn path_for_qualifier(
|
|||
}
|
||||
}
|
||||
|
||||
const EXPR_PATH_SEGMENT_RECOVERY_SET: TokenSet =
|
||||
items::ITEM_RECOVERY_SET.union(TokenSet::new(&[T![')'], T![,], T![let]]));
|
||||
const TYPE_PATH_SEGMENT_RECOVERY_SET: TokenSet = types::TYPE_RECOVERY_SET;
|
||||
|
||||
fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
|
||||
let m = p.start();
|
||||
// test qual_paths
|
||||
|
@ -102,7 +106,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
|
|||
m.complete(p, NAME_REF);
|
||||
}
|
||||
_ => {
|
||||
p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
|
||||
let recover_set = match mode {
|
||||
Mode::Use => items::ITEM_RECOVERY_SET,
|
||||
Mode::Type => TYPE_PATH_SEGMENT_RECOVERY_SET,
|
||||
Mode::Expr => EXPR_PATH_SEGMENT_RECOVERY_SET,
|
||||
};
|
||||
p.err_recover("expected identifier", recover_set);
|
||||
if empty {
|
||||
// test_err empty_segment
|
||||
// use crate::;
|
||||
|
|
|
@ -17,8 +17,9 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
|
|||
T![Self],
|
||||
]));
|
||||
|
||||
const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||
pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||
T![')'],
|
||||
T![>],
|
||||
T![,],
|
||||
// test_err struct_field_recover
|
||||
// struct S { f pub g: () }
|
||||
|
|
|
@ -15,6 +15,7 @@ use crate::{LexedStr, TopEntryPoint};
|
|||
#[test]
|
||||
fn lex_ok() {
|
||||
for case in TestCase::list("lexer/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let actual = lex(&case.text);
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
|
@ -23,6 +24,7 @@ fn lex_ok() {
|
|||
#[test]
|
||||
fn lex_err() {
|
||||
for case in TestCase::list("lexer/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let actual = lex(&case.text);
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
|
@ -46,6 +48,7 @@ fn lex(text: &str) -> String {
|
|||
#[test]
|
||||
fn parse_ok() {
|
||||
for case in TestCase::list("parser/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
|
@ -55,6 +58,7 @@ fn parse_ok() {
|
|||
#[test]
|
||||
fn parse_inline_ok() {
|
||||
for case in TestCase::list("parser/inline/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
|
@ -64,6 +68,7 @@ fn parse_inline_ok() {
|
|||
#[test]
|
||||
fn parse_err() {
|
||||
for case in TestCase::list("parser/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
|
@ -73,6 +78,7 @@ fn parse_err() {
|
|||
#[test]
|
||||
fn parse_inline_err() {
|
||||
for case in TestCase::list("parser/inline/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
|
|
|
@ -65,7 +65,7 @@ fn macro_stmt() {
|
|||
MACRO_STMTS
|
||||
ERROR
|
||||
SHEBANG "#!/usr/bin/rust"
|
||||
error 0: expected expression
|
||||
error 0: expected expression, item or let statement
|
||||
"##]],
|
||||
);
|
||||
check(
|
||||
|
|
|
@ -44,8 +44,7 @@ SOURCE_FILE
|
|||
IDENT "T"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
error 9: expected type parameter
|
||||
error 11: expected COMMA
|
||||
error 9: expected generic parameter
|
||||
error 11: expected R_ANGLE
|
||||
error 11: expected `;`, `{`, or `(`
|
||||
error 12: expected an item
|
||||
|
|
|
@ -43,17 +43,14 @@ SOURCE_FILE
|
|||
IDENT "Box"
|
||||
GENERIC_ARG_LIST
|
||||
L_ANGLE "<"
|
||||
TYPE_ARG
|
||||
ERROR
|
||||
AT "@"
|
||||
WHITESPACE " "
|
||||
TUPLE_FIELD
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Any"
|
||||
ERROR
|
||||
ERROR
|
||||
AT "@"
|
||||
WHITESPACE " "
|
||||
MACRO_CALL
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Any"
|
||||
ERROR
|
||||
R_ANGLE ">"
|
||||
ERROR
|
||||
|
@ -69,17 +66,14 @@ SOURCE_FILE
|
|||
ERROR
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n\n"
|
||||
error 67: expected type
|
||||
error 68: expected COMMA
|
||||
error 68: expected R_ANGLE
|
||||
error 68: expected COMMA
|
||||
error 68: expected R_ANGLE
|
||||
error 68: expected COMMA
|
||||
error 68: expected R_ANGLE
|
||||
error 68: expected COMMA
|
||||
error 72: expected COMMA
|
||||
error 72: expected a type
|
||||
error 72: expected R_PAREN
|
||||
error 67: expected R_ANGLE
|
||||
error 67: expected R_ANGLE
|
||||
error 67: expected R_ANGLE
|
||||
error 67: expected R_PAREN
|
||||
error 67: expected SEMICOLON
|
||||
error 67: expected an item
|
||||
error 72: expected BANG
|
||||
error 72: expected `{`, `[`, `(`
|
||||
error 72: expected SEMICOLON
|
||||
error 72: expected an item
|
||||
error 73: expected an item
|
||||
|
|
|
@ -145,27 +145,29 @@ SOURCE_FILE
|
|||
error 16: expected expression
|
||||
error 17: expected R_BRACK
|
||||
error 17: expected SEMICOLON
|
||||
error 17: expected expression
|
||||
error 17: expected expression, item or let statement
|
||||
error 25: expected a name
|
||||
error 26: expected `;`, `{`, or `(`
|
||||
error 30: expected pattern
|
||||
error 31: expected SEMICOLON
|
||||
error 53: expected expression
|
||||
error 54: expected R_PAREN
|
||||
error 54: expected SEMICOLON
|
||||
error 54: expected expression
|
||||
error 54: expected expression, item or let statement
|
||||
error 60: expected type
|
||||
error 60: expected `{`
|
||||
error 60: expected expression
|
||||
error 60: expected expression, item or let statement
|
||||
error 65: expected pattern
|
||||
error 65: expected SEMICOLON
|
||||
error 65: expected expression
|
||||
error 65: expected expression, item or let statement
|
||||
error 92: expected expression
|
||||
error 93: expected R_PAREN
|
||||
error 93: expected SEMICOLON
|
||||
error 93: expected expression
|
||||
error 95: expected expression
|
||||
error 96: expected expression
|
||||
error 93: expected expression, item or let statement
|
||||
error 95: expected expression, item or let statement
|
||||
error 96: expected expression, item or let statement
|
||||
error 103: expected a name
|
||||
error 104: expected `{`
|
||||
error 108: expected pattern
|
||||
error 108: expected SEMICOLON
|
||||
error 108: expected expression
|
||||
error 108: expected expression, item or let statement
|
||||
|
|
|
@ -168,44 +168,44 @@ SOURCE_FILE
|
|||
L_PAREN "("
|
||||
ERROR
|
||||
QUESTION "?"
|
||||
EXPR_STMT
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Sized"
|
||||
TYPE_ARG
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Sized"
|
||||
ERROR
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
ERROR
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
CLOSURE_EXPR
|
||||
FOR_KW "for"
|
||||
GENERIC_PARAM_LIST
|
||||
L_ANGLE "<"
|
||||
LIFETIME_PARAM
|
||||
LIFETIME
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
WHITESPACE " "
|
||||
EXPR_STMT
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
CLOSURE_EXPR
|
||||
FOR_KW "for"
|
||||
GENERIC_PARAM_LIST
|
||||
L_ANGLE "<"
|
||||
LIFETIME_PARAM
|
||||
LIFETIME
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
WHITESPACE " "
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Trait"
|
||||
L_ANGLE "<"
|
||||
ERROR
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
ERROR
|
||||
R_PAREN ")"
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Trait"
|
||||
L_ANGLE "<"
|
||||
ERROR
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
|
@ -220,108 +220,93 @@ SOURCE_FILE
|
|||
R_ANGLE ">"
|
||||
ERROR
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
LET_EXPR
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
WILDCARD_PAT
|
||||
UNDERSCORE "_"
|
||||
ERROR
|
||||
COLON ":"
|
||||
WHITESPACE "\n "
|
||||
LET_STMT
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Box"
|
||||
L_ANGLE "<"
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
CLOSURE_EXPR
|
||||
FOR_KW "for"
|
||||
GENERIC_PARAM_LIST
|
||||
L_ANGLE "<"
|
||||
LIFETIME_PARAM
|
||||
LIFETIME
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
WHITESPACE " "
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Trait"
|
||||
WILDCARD_PAT
|
||||
UNDERSCORE "_"
|
||||
COLON ":"
|
||||
WHITESPACE " "
|
||||
DYN_TRAIT_TYPE
|
||||
TYPE_BOUND_LIST
|
||||
TYPE_BOUND
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Box"
|
||||
GENERIC_ARG_LIST
|
||||
L_ANGLE "<"
|
||||
ERROR
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
ERROR
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
PAREN_EXPR
|
||||
L_PAREN "("
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Copy"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
PAREN_EXPR
|
||||
L_PAREN "("
|
||||
ERROR
|
||||
QUESTION "?"
|
||||
PATH_EXPR
|
||||
TYPE_ARG
|
||||
PAREN_TYPE
|
||||
L_PAREN "("
|
||||
FOR_TYPE
|
||||
FOR_KW "for"
|
||||
GENERIC_PARAM_LIST
|
||||
L_ANGLE "<"
|
||||
LIFETIME_PARAM
|
||||
LIFETIME
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
WHITESPACE " "
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Trait"
|
||||
GENERIC_ARG_LIST
|
||||
L_ANGLE "<"
|
||||
LIFETIME_ARG
|
||||
LIFETIME
|
||||
LIFETIME_IDENT "'a"
|
||||
R_ANGLE ">"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
TYPE_BOUND
|
||||
L_PAREN "("
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Copy"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
PLUS "+"
|
||||
WHITESPACE " "
|
||||
TYPE_BOUND
|
||||
L_PAREN "("
|
||||
QUESTION "?"
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "Sized"
|
||||
R_PAREN ")"
|
||||
R_ANGLE ">"
|
||||
ERROR
|
||||
SEMICOLON ";"
|
||||
ERROR
|
||||
R_ANGLE ">"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
error 88: expected COMMA
|
||||
error 88: expected R_ANGLE
|
||||
error 121: expected SEMICOLON
|
||||
error 121: expected expression
|
||||
error 121: expected expression, item or let statement
|
||||
error 140: expected type
|
||||
error 141: expected R_PAREN
|
||||
error 141: expected COMMA
|
||||
error 141: expected R_ANGLE
|
||||
error 141: expected SEMICOLON
|
||||
error 146: expected R_ANGLE
|
||||
error 146: expected SEMICOLON
|
||||
error 146: expected expression
|
||||
error 148: expected expression
|
||||
error 146: expected expression, item or let statement
|
||||
error 148: expected expression, item or let statement
|
||||
error 158: expected `|`
|
||||
error 158: expected COMMA
|
||||
error 165: expected expression
|
||||
error 168: expected expression
|
||||
error 179: expected expression
|
||||
error 180: expected COMMA
|
||||
error 190: expected EQ
|
||||
error 190: expected expression
|
||||
error 191: expected COMMA
|
||||
error 204: expected `|`
|
||||
error 204: expected COMMA
|
||||
error 211: expected expression
|
||||
error 214: expected expression
|
||||
error 228: expected expression
|
||||
error 229: expected R_PAREN
|
||||
error 229: expected COMMA
|
||||
error 236: expected expression
|
||||
error 237: expected COMMA
|
||||
error 237: expected expression
|
||||
error 237: expected R_PAREN
|
||||
error 180: expected SEMICOLON
|
||||
error 215: expected R_ANGLE
|
||||
error 235: expected SEMICOLON
|
||||
error 235: expected expression, item or let statement
|
||||
|
|
|
@ -156,8 +156,7 @@ SOURCE_FILE
|
|||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "i32"
|
||||
WHITESPACE " "
|
||||
ERROR
|
||||
WHITESPACE " "
|
||||
ERROR
|
||||
L_CURLY "{"
|
||||
R_CURLY "}"
|
||||
|
@ -199,10 +198,8 @@ error 95: expected type
|
|||
error 95: expected COMMA
|
||||
error 96: expected field
|
||||
error 98: expected field declaration
|
||||
error 371: expected R_PAREN
|
||||
error 371: expected COMMA
|
||||
error 372: expected a type
|
||||
error 372: expected R_PAREN
|
||||
error 372: expected COMMA
|
||||
error 372: expected enum variant
|
||||
error 374: expected enum variant
|
||||
error 494: expected pattern
|
||||
|
|
|
@ -72,4 +72,4 @@ SOURCE_FILE
|
|||
error 24: expected existential, fn, trait or impl
|
||||
error 41: expected existential, fn, trait or impl
|
||||
error 56: expected a block
|
||||
error 75: expected a loop
|
||||
error 75: expected a loop or block
|
||||
|
|
|
@ -12,7 +12,7 @@ SOURCE_FILE
|
|||
STMT_LIST
|
||||
L_CURLY "{"
|
||||
WHITESPACE "\n "
|
||||
EXPR_STMT
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
|
@ -41,13 +41,14 @@ SOURCE_FILE
|
|||
COLON2 "::"
|
||||
ERROR
|
||||
L_ANGLE "<"
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "nope"
|
||||
SHR ">>"
|
||||
TYPE_ARG
|
||||
PATH_TYPE
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "nope"
|
||||
R_ANGLE ">"
|
||||
R_ANGLE ">"
|
||||
ERROR
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
|
@ -114,8 +115,6 @@ SOURCE_FILE
|
|||
WHITESPACE "\n"
|
||||
error 30: expected identifier
|
||||
error 31: expected COMMA
|
||||
error 31: expected R_ANGLE
|
||||
error 31: expected SEMICOLON
|
||||
error 37: expected expression
|
||||
error 75: expected identifier
|
||||
error 76: expected SEMICOLON
|
||||
|
|
|
@ -23,6 +23,6 @@ SOURCE_FILE
|
|||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
error 22: expected a loop
|
||||
error 22: expected a loop or block
|
||||
error 27: expected type
|
||||
error 27: expected `{`
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
SOURCE_FILE
|
||||
FN
|
||||
FN_KW "fn"
|
||||
WHITESPACE " "
|
||||
NAME
|
||||
IDENT "main"
|
||||
PARAM_LIST
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
BLOCK_EXPR
|
||||
STMT_LIST
|
||||
L_CURLY "{"
|
||||
WHITESPACE "\n "
|
||||
EXPR_STMT
|
||||
CALL_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "foo"
|
||||
ARG_LIST
|
||||
L_PAREN "("
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "bar"
|
||||
COLON2 "::"
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
EXPR_STMT
|
||||
CALL_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "foo"
|
||||
ARG_LIST
|
||||
L_PAREN "("
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "bar"
|
||||
ERROR
|
||||
COLON ":"
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
EXPR_STMT
|
||||
CALL_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "foo"
|
||||
ARG_LIST
|
||||
L_PAREN "("
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "bar"
|
||||
PLUS "+"
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
error 25: expected identifier
|
||||
error 39: expected COMMA
|
||||
error 39: expected expression
|
||||
error 55: expected expression
|
|
@ -0,0 +1,5 @@
|
|||
fn main() {
|
||||
foo(bar::);
|
||||
foo(bar:);
|
||||
foo(bar+);
|
||||
}
|
|
@ -49,5 +49,5 @@ SOURCE_FILE
|
|||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
error 6: missing type for function parameter
|
||||
error 6: expected COMMA
|
||||
error 6: expected `,`
|
||||
error 16: missing type for function parameter
|
||||
|
|
|
@ -120,17 +120,20 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
|
|||
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
|
||||
// Last supported version is:
|
||||
// https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
|
||||
match version {
|
||||
5 | 6 => {}
|
||||
let snappy_portion = match version {
|
||||
5 | 6 => &dot_rustc[8..],
|
||||
7 => {
|
||||
let len_bytes = &dot_rustc[8..12];
|
||||
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
|
||||
&dot_rustc[12..data_len + 12]
|
||||
}
|
||||
_ => {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("unsupported metadata version {version}"),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let snappy_portion = &dot_rustc[8..];
|
||||
};
|
||||
|
||||
let mut snappy_decoder = SnapDecoder::new(snappy_portion);
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ crossbeam-channel = "0.5.5"
|
|||
dissimilar = "1.0.4"
|
||||
itertools = "0.10.5"
|
||||
scip = "0.1.1"
|
||||
lsp-types = { version = "=0.93.2", features = ["proposed"] }
|
||||
lsp-types = { version = "=0.94", features = ["proposed"] }
|
||||
parking_lot = "0.12.1"
|
||||
xflags = "0.3.0"
|
||||
oorandom = "11.1.3"
|
||||
|
|
BIN
crates/rust-analyzer/default_12483297303756020505_0.profraw
Normal file
BIN
crates/rust-analyzer/default_12483297303756020505_0.profraw
Normal file
Binary file not shown.
|
@ -1,4 +1,5 @@
|
|||
//! Advertises the capabilities of the LSP Server.
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use lsp_types::{
|
||||
CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions,
|
||||
CodeActionProviderCapability, CodeLensOptions, CompletionOptions,
|
||||
|
@ -16,16 +17,19 @@ use lsp_types::{
|
|||
use serde_json::json;
|
||||
|
||||
use crate::config::{Config, RustfmtConfig};
|
||||
use crate::lsp_ext::supports_utf8;
|
||||
use crate::line_index::PositionEncoding;
|
||||
use crate::lsp_ext::negotiated_encoding;
|
||||
use crate::semantic_tokens;
|
||||
|
||||
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
|
||||
ServerCapabilities {
|
||||
position_encoding: if supports_utf8(config.caps()) {
|
||||
Some(PositionEncodingKind::UTF8)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
position_encoding: Some(match negotiated_encoding(config.caps()) {
|
||||
PositionEncoding::Utf8 => PositionEncodingKind::UTF8,
|
||||
PositionEncoding::Wide(wide) => match wide {
|
||||
WideEncoding::Utf16 => PositionEncodingKind::UTF16,
|
||||
WideEncoding::Utf32 => PositionEncodingKind::UTF32,
|
||||
},
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
|
@ -134,6 +138,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
|
|||
resolve_provider: Some(true),
|
||||
},
|
||||
))),
|
||||
inline_value_provider: None,
|
||||
experimental: Some(json!({
|
||||
"externalDocs": true,
|
||||
"hoverRange": true,
|
||||
|
|
|
@ -11,6 +11,7 @@ use ide::{
|
|||
use ide_db::LineIndexDatabase;
|
||||
|
||||
use ide_db::base_db::salsa::{self, ParallelDatabase};
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use lsp_types::{self, lsif};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use vfs::{AbsPathBuf, Vfs};
|
||||
|
@ -127,7 +128,7 @@ impl LsifManager<'_> {
|
|||
let line_index = self.db.line_index(file_id);
|
||||
let line_index = LineIndex {
|
||||
index: line_index,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
encoding: PositionEncoding::Wide(WideEncoding::Utf16),
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
let range_id = self.add_vertex(lsif::Vertex::Range {
|
||||
|
@ -249,7 +250,7 @@ impl LsifManager<'_> {
|
|||
let line_index = self.db.line_index(file_id);
|
||||
let line_index = LineIndex {
|
||||
index: line_index,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
encoding: PositionEncoding::Wide(WideEncoding::Utf16),
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
let result = folds
|
||||
|
|
|
@ -33,7 +33,7 @@ use crate::{
|
|||
caps::completion_item_edit_resolve,
|
||||
diagnostics::DiagnosticsMapConfig,
|
||||
line_index::PositionEncoding,
|
||||
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
|
||||
lsp_ext::{self, negotiated_encoding, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
|
||||
};
|
||||
|
||||
mod patch_old_style;
|
||||
|
@ -999,11 +999,7 @@ impl Config {
|
|||
}
|
||||
|
||||
pub fn position_encoding(&self) -> PositionEncoding {
|
||||
if supports_utf8(&self.caps) {
|
||||
PositionEncoding::Utf8
|
||||
} else {
|
||||
PositionEncoding::Utf16
|
||||
}
|
||||
negotiated_encoding(&self.caps)
|
||||
}
|
||||
|
||||
fn experimental(&self, index: &'static str) -> bool {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use itertools::Itertools;
|
||||
use stdx::format_to;
|
||||
use vfs::{AbsPath, AbsPathBuf};
|
||||
|
@ -95,7 +96,8 @@ fn position(
|
|||
let mut char_offset = 0;
|
||||
let len_func = match position_encoding {
|
||||
PositionEncoding::Utf8 => char::len_utf8,
|
||||
PositionEncoding::Utf16 => char::len_utf16,
|
||||
PositionEncoding::Wide(WideEncoding::Utf16) => char::len_utf16,
|
||||
PositionEncoding::Wide(WideEncoding::Utf32) => |_| 1,
|
||||
};
|
||||
for c in line.text.chars() {
|
||||
char_offset += 1;
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
//! Conversion lsp_types types to rust-analyzer specific ones.
|
||||
use anyhow::format_err;
|
||||
use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
|
||||
use ide_db::base_db::{FileId, FilePosition, FileRange};
|
||||
use ide::{Annotation, AnnotationKind, AssistKind, LineCol};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FilePosition, FileRange},
|
||||
line_index::WideLineCol,
|
||||
};
|
||||
use syntax::{TextRange, TextSize};
|
||||
use vfs::AbsPathBuf;
|
||||
|
||||
|
@ -26,9 +29,9 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
|
|||
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
|
||||
let line_col = match line_index.encoding {
|
||||
PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character },
|
||||
PositionEncoding::Utf16 => {
|
||||
let line_col = LineColUtf16 { line: position.line, col: position.character };
|
||||
line_index.index.to_utf8(line_col)
|
||||
PositionEncoding::Wide(enc) => {
|
||||
let line_col = WideLineCol { line: position.line, col: position.character };
|
||||
line_index.index.to_utf8(enc, line_col)
|
||||
}
|
||||
};
|
||||
let text_size =
|
||||
|
|
|
@ -7,9 +7,12 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ide_db::line_index::WideEncoding;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum PositionEncoding {
|
||||
Utf8,
|
||||
Utf16,
|
||||
Wide(WideEncoding),
|
||||
}
|
||||
|
||||
pub(crate) struct LineIndex {
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use lsp_types::request::Request;
|
||||
use lsp_types::PositionEncodingKind;
|
||||
use lsp_types::{
|
||||
|
@ -10,6 +11,8 @@ use lsp_types::{
|
|||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::line_index::PositionEncoding;
|
||||
|
||||
pub enum AnalyzerStatus {}
|
||||
|
||||
impl Request for AnalyzerStatus {
|
||||
|
@ -481,16 +484,22 @@ pub(crate) enum CodeLensResolveData {
|
|||
References(lsp_types::TextDocumentPositionParams),
|
||||
}
|
||||
|
||||
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
|
||||
match &caps.general {
|
||||
Some(general) => general
|
||||
.position_encodings
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.any(|it| it == &PositionEncodingKind::UTF8),
|
||||
_ => false,
|
||||
pub fn negotiated_encoding(caps: &lsp_types::ClientCapabilities) -> PositionEncoding {
|
||||
let client_encodings = match &caps.general {
|
||||
Some(general) => general.position_encodings.as_deref().unwrap_or_default(),
|
||||
None => &[],
|
||||
};
|
||||
|
||||
for enc in client_encodings {
|
||||
if enc == &PositionEncodingKind::UTF8 {
|
||||
return PositionEncoding::Utf8;
|
||||
} else if enc == &PositionEncodingKind::UTF32 {
|
||||
return PositionEncoding::Wide(WideEncoding::Utf32);
|
||||
}
|
||||
// NB: intentionally prefer just about anything else to utf-16.
|
||||
}
|
||||
|
||||
PositionEncoding::Wide(WideEncoding::Utf16)
|
||||
}
|
||||
|
||||
pub enum MoveItem {}
|
||||
|
|
|
@ -161,6 +161,7 @@ impl GlobalState {
|
|||
}
|
||||
|
||||
pub(crate) fn apply_document_changes(
|
||||
encoding: PositionEncoding,
|
||||
file_contents: impl FnOnce() -> String,
|
||||
mut content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
|
||||
) -> String {
|
||||
|
@ -192,9 +193,9 @@ pub(crate) fn apply_document_changes(
|
|||
let mut line_index = LineIndex {
|
||||
// the index will be overwritten in the bottom loop's first iteration
|
||||
index: Arc::new(ide::LineIndex::new(&text)),
|
||||
// We don't care about line endings or offset encoding here.
|
||||
// We don't care about line endings here.
|
||||
endings: LineEndings::Unix,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
encoding,
|
||||
};
|
||||
|
||||
// The changes we got must be applied sequentially, but can cross lines so we
|
||||
|
@ -256,6 +257,7 @@ pub(crate) fn all_edits_are_disjoint(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use lsp_types::{
|
||||
CompletionItem, CompletionTextEdit, InsertReplaceEdit, Position, Range,
|
||||
TextDocumentContentChangeEvent,
|
||||
|
@ -278,9 +280,11 @@ mod tests {
|
|||
};
|
||||
}
|
||||
|
||||
let text = apply_document_changes(|| String::new(), vec![]);
|
||||
let encoding = PositionEncoding::Wide(WideEncoding::Utf16);
|
||||
let text = apply_document_changes(encoding, || String::new(), vec![]);
|
||||
assert_eq!(text, "");
|
||||
let text = apply_document_changes(
|
||||
encoding,
|
||||
|| text,
|
||||
vec![TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
|
@ -289,39 +293,49 @@ mod tests {
|
|||
}],
|
||||
);
|
||||
assert_eq!(text, "the");
|
||||
let text = apply_document_changes(|| text, c![0, 3; 0, 3 => " quick"]);
|
||||
let text = apply_document_changes(encoding, || text, c![0, 3; 0, 3 => " quick"]);
|
||||
assert_eq!(text, "the quick");
|
||||
let text = apply_document_changes(|| text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
|
||||
let text =
|
||||
apply_document_changes(encoding, || text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
|
||||
assert_eq!(text, "quick foxes");
|
||||
let text = apply_document_changes(|| text, c![0, 11; 0, 11 => "\ndream"]);
|
||||
let text = apply_document_changes(encoding, || text, c![0, 11; 0, 11 => "\ndream"]);
|
||||
assert_eq!(text, "quick foxes\ndream");
|
||||
let text = apply_document_changes(|| text, c![1, 0; 1, 0 => "have "]);
|
||||
let text = apply_document_changes(encoding, || text, c![1, 0; 1, 0 => "have "]);
|
||||
assert_eq!(text, "quick foxes\nhave dream");
|
||||
let text = apply_document_changes(
|
||||
encoding,
|
||||
|| text,
|
||||
c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
|
||||
);
|
||||
assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
|
||||
let text = apply_document_changes(|| text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
|
||||
let text = apply_document_changes(
|
||||
encoding,
|
||||
|| text,
|
||||
c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"],
|
||||
);
|
||||
assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
|
||||
let text = apply_document_changes(
|
||||
encoding,
|
||||
|| text,
|
||||
c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
|
||||
);
|
||||
assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
|
||||
let text = apply_document_changes(|| text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
|
||||
let text =
|
||||
apply_document_changes(encoding, || text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
|
||||
assert_eq!(text, "the quick \nthey have quiet dreams\n");
|
||||
|
||||
let text = String::from("❤️");
|
||||
let text = apply_document_changes(|| text, c![0, 0; 0, 0 => "a"]);
|
||||
let text = apply_document_changes(encoding, || text, c![0, 0; 0, 0 => "a"]);
|
||||
assert_eq!(text, "a❤️");
|
||||
|
||||
let text = String::from("a\nb");
|
||||
let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
|
||||
let text =
|
||||
apply_document_changes(encoding, || text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
|
||||
assert_eq!(text, "adcb");
|
||||
|
||||
let text = String::from("a\nb");
|
||||
let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
|
||||
let text =
|
||||
apply_document_changes(encoding, || text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
|
||||
assert_eq!(text, "ațc\ncb");
|
||||
}
|
||||
|
||||
|
|
|
@ -831,6 +831,7 @@ impl GlobalState {
|
|||
let vfs = &mut this.vfs.write().0;
|
||||
let file_id = vfs.file_id(&path).unwrap();
|
||||
let text = apply_document_changes(
|
||||
this.config.position_encoding(),
|
||||
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
|
||||
params.content_changes,
|
||||
);
|
||||
|
|
|
@ -268,7 +268,10 @@ impl GlobalState {
|
|||
]
|
||||
})
|
||||
})
|
||||
.map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
|
||||
.map(|glob_pattern| lsp_types::FileSystemWatcher {
|
||||
glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
|
||||
kind: None,
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let registration = lsp_types::Registration {
|
||||
|
|
|
@ -31,8 +31,8 @@ pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::P
|
|||
let line_col = line_index.index.line_col(offset);
|
||||
match line_index.encoding {
|
||||
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
||||
PositionEncoding::Utf16 => {
|
||||
let line_col = line_index.index.to_utf16(line_col);
|
||||
PositionEncoding::Wide(enc) => {
|
||||
let line_col = line_index.index.to_wide(enc, line_col);
|
||||
lsp_types::Position::new(line_col.line, line_col.col)
|
||||
}
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ pub(crate) fn completion_items(
|
|||
tdpp: lsp_types::TextDocumentPositionParams,
|
||||
items: Vec<CompletionItem>,
|
||||
) -> Vec<lsp_types::CompletionItem> {
|
||||
let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
|
||||
let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default();
|
||||
let mut res = Vec::with_capacity(items.len());
|
||||
for item in items {
|
||||
completion_item(&mut res, config, line_index, &tdpp, max_relevance, item);
|
||||
|
@ -235,22 +235,26 @@ fn completion_item(
|
|||
item: CompletionItem,
|
||||
) {
|
||||
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
|
||||
let ref_match = item.ref_match();
|
||||
let lookup = item.lookup().to_string();
|
||||
|
||||
let mut additional_text_edits = Vec::new();
|
||||
|
||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||
// non-trivial mapping here.
|
||||
let text_edit = {
|
||||
let mut text_edit = None;
|
||||
let source_range = item.source_range();
|
||||
for indel in item.text_edit().iter() {
|
||||
let source_range = item.source_range;
|
||||
for indel in item.text_edit {
|
||||
if indel.delete.contains_range(source_range) {
|
||||
// Extract this indel as the main edit
|
||||
text_edit = Some(if indel.delete == source_range {
|
||||
self::completion_text_edit(line_index, insert_replace_support, indel.clone())
|
||||
} else {
|
||||
assert!(source_range.end() == indel.delete.end());
|
||||
let range1 = TextRange::new(indel.delete.start(), source_range.start());
|
||||
let range2 = source_range;
|
||||
let indel1 = Indel::replace(range1, String::new());
|
||||
let indel1 = Indel::delete(range1);
|
||||
let indel2 = Indel::replace(range2, indel.insert.clone());
|
||||
additional_text_edits.push(self::text_edit(line_index, indel1));
|
||||
self::completion_text_edit(line_index, insert_replace_support, indel2)
|
||||
|
@ -264,23 +268,23 @@ fn completion_item(
|
|||
text_edit.unwrap()
|
||||
};
|
||||
|
||||
let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET);
|
||||
let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
|
||||
let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
|
||||
let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET);
|
||||
let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
|
||||
let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints {
|
||||
Some(command::trigger_parameter_hints())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut lsp_item = lsp_types::CompletionItem {
|
||||
label: item.label().to_string(),
|
||||
detail: item.detail().map(|it| it.to_string()),
|
||||
filter_text: Some(item.lookup().to_string()),
|
||||
kind: Some(completion_item_kind(item.kind())),
|
||||
label: item.label.to_string(),
|
||||
detail: item.detail.map(|it| it.to_string()),
|
||||
filter_text: Some(lookup),
|
||||
kind: Some(completion_item_kind(item.kind)),
|
||||
text_edit: Some(text_edit),
|
||||
additional_text_edits: Some(additional_text_edits),
|
||||
documentation: item.documentation().map(documentation),
|
||||
deprecated: Some(item.deprecated()),
|
||||
documentation: item.documentation.map(documentation),
|
||||
deprecated: Some(item.deprecated),
|
||||
tags,
|
||||
command,
|
||||
insert_text_format,
|
||||
|
@ -294,12 +298,13 @@ fn completion_item(
|
|||
});
|
||||
}
|
||||
|
||||
set_score(&mut lsp_item, max_relevance, item.relevance());
|
||||
set_score(&mut lsp_item, max_relevance, item.relevance);
|
||||
|
||||
if config.completion().enable_imports_on_the_fly {
|
||||
if let imports @ [_, ..] = item.imports_to_add() {
|
||||
let imports: Vec<_> = imports
|
||||
.iter()
|
||||
if !item.import_to_add.is_empty() {
|
||||
let imports: Vec<_> = item
|
||||
.import_to_add
|
||||
.into_iter()
|
||||
.filter_map(|import_edit| {
|
||||
let import_path = &import_edit.import_path;
|
||||
let import_name = import_path.segments().last()?;
|
||||
|
@ -316,18 +321,13 @@ fn completion_item(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some((mutability, offset, relevance)) = item.ref_match() {
|
||||
let mut lsp_item_with_ref = lsp_item.clone();
|
||||
if let Some((label, indel, relevance)) = ref_match {
|
||||
let mut lsp_item_with_ref = lsp_types::CompletionItem { label, ..lsp_item.clone() };
|
||||
lsp_item_with_ref
|
||||
.additional_text_edits
|
||||
.get_or_insert_with(Default::default)
|
||||
.push(self::text_edit(line_index, indel));
|
||||
set_score(&mut lsp_item_with_ref, max_relevance, relevance);
|
||||
lsp_item_with_ref.label =
|
||||
format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label);
|
||||
lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push(
|
||||
self::text_edit(
|
||||
line_index,
|
||||
Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
|
||||
),
|
||||
);
|
||||
|
||||
acc.push(lsp_item_with_ref);
|
||||
};
|
||||
|
||||
|
@ -766,6 +766,7 @@ pub(crate) fn folding_range(
|
|||
end_line,
|
||||
end_character: None,
|
||||
kind,
|
||||
collapsed_text: None,
|
||||
}
|
||||
} else {
|
||||
lsp_types::FoldingRange {
|
||||
|
@ -774,6 +775,7 @@ pub(crate) fn folding_range(
|
|||
end_line: range.end.line,
|
||||
end_character: Some(range.end.character),
|
||||
kind,
|
||||
collapsed_text: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1360,7 +1362,7 @@ pub(crate) mod command {
|
|||
pub(crate) fn trigger_parameter_hints() -> lsp_types::Command {
|
||||
lsp_types::Command {
|
||||
title: "triggerParameterHints".into(),
|
||||
command: "editor.action.triggerParameterHints".into(),
|
||||
command: "rust-analyzer.triggerParameterHints".into(),
|
||||
arguments: None,
|
||||
}
|
||||
}
|
||||
|
@ -1429,7 +1431,7 @@ fn main() {
|
|||
let line_index = LineIndex {
|
||||
index: Arc::new(ide::LineIndex::new(text)),
|
||||
endings: LineEndings::Unix,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
encoding: PositionEncoding::Utf8,
|
||||
};
|
||||
let converted: Vec<lsp_types::FoldingRange> =
|
||||
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
|
||||
|
|
|
@ -22,7 +22,7 @@ use lsp_types::{
|
|||
notification::DidOpenTextDocument,
|
||||
request::{
|
||||
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
|
||||
WillRenameFiles, WorkspaceSymbol,
|
||||
WillRenameFiles, WorkspaceSymbolRequest,
|
||||
},
|
||||
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
|
||||
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
|
||||
|
@ -1095,5 +1095,5 @@ pub fn bar() {}
|
|||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
||||
server.request::<WorkspaceSymbol>(Default::default(), json!([]));
|
||||
server.request::<WorkspaceSymbolRequest>(Default::default(), json!([]));
|
||||
}
|
||||
|
|
|
@ -107,6 +107,7 @@ impl<'a> Project<'a> {
|
|||
did_change_watched_files: Some(
|
||||
lsp_types::DidChangeWatchedFilesClientCapabilities {
|
||||
dynamic_registration: Some(true),
|
||||
relative_pattern_support: None,
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
|
|
|
@ -11,6 +11,7 @@ pub mod hash;
|
|||
pub mod process;
|
||||
pub mod panic_context;
|
||||
pub mod non_empty_vec;
|
||||
pub mod rand;
|
||||
|
||||
pub use always_assert::{always, never};
|
||||
|
||||
|
|
21
crates/stdx/src/rand.rs
Normal file
21
crates/stdx/src/rand.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! We don't use `rand`, as that's too many things for us.
|
||||
//!
|
||||
//! We currently use oorandom instead, but it's missing these two utilities.
|
||||
//! Perhaps we should switch to `fastrand`, or our own small PRNG, it's not like
|
||||
//! we need anything more complicated than xor-shift.
|
||||
|
||||
pub fn shuffle<T>(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) {
|
||||
let mut remaining = slice.len() - 1;
|
||||
while remaining > 0 {
|
||||
let index = rand_index(remaining);
|
||||
slice.swap(remaining, index);
|
||||
remaining -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn seed() -> u64 {
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::hash::{BuildHasher, Hasher};
|
||||
|
||||
RandomState::new().build_hasher().finish()
|
||||
}
|
|
@ -186,7 +186,7 @@ impl SourceFile {
|
|||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! match_ast {
|
||||
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
|
||||
(match $node:ident { $($tt:tt)* }) => { $crate::match_ast!(match ($node) { $($tt)* }) };
|
||||
|
||||
(match ($node:expr) {
|
||||
$( $( $path:ident )::+ ($it:pat) => $res:expr, )*
|
||||
|
|
|
@ -119,7 +119,7 @@ See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR
|
|||
**Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
|
||||
This is important because it is possible to make useful tooling using only the syntax tree.
|
||||
Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
|
||||
See also https://web.stanford.edu/~mlfbrown/paper.pdf.
|
||||
See also https://mlfbrown.com/paper.pdf.
|
||||
You can view the `syntax` crate as an entry point to rust-analyzer.
|
||||
`syntax` crate is an **API Boundary**.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<!---
|
||||
lsp_ext.rs hash: ec29403e67dfd15b
|
||||
lsp_ext.rs hash: d87477896dfe41d4
|
||||
|
||||
If you need to change the above hash to make the test pass, please check if you
|
||||
need to adjust this doc as well and ping this issue:
|
||||
|
|
|
@ -87,6 +87,12 @@ export function shuffleCrateGraph(ctx: CtxInit): Cmd {
|
|||
};
|
||||
}
|
||||
|
||||
export function triggerParameterHints(_: CtxInit): Cmd {
|
||||
return async () => {
|
||||
await vscode.commands.executeCommand("editor.action.triggerParameterHints");
|
||||
};
|
||||
}
|
||||
|
||||
export function matchingBrace(ctx: CtxInit): Cmd {
|
||||
return async () => {
|
||||
const editor = ctx.activeRustEditor;
|
||||
|
|
|
@ -186,5 +186,6 @@ function createCommands(): Record<string, CommandFactory> {
|
|||
resolveCodeAction: { enabled: commands.resolveCodeAction },
|
||||
runSingle: { enabled: commands.runSingle },
|
||||
showReferences: { enabled: commands.showReferences },
|
||||
triggerParameterHints: { enabled: commands.triggerParameterHints },
|
||||
};
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ serde = { version = "1.0.144", features = ["derive"] }
|
|||
crossbeam-channel = "0.5.6"
|
||||
|
||||
[dev-dependencies]
|
||||
lsp-types = "=0.93.2"
|
||||
lsp-types = "=0.94"
|
||||
|
|
Loading…
Reference in a new issue