diff --git a/Cargo.lock b/Cargo.lock
index 8bafc38952..fe09e4afeb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1608,6 +1608,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"expect-test",
+ "indexmap",
"itertools",
"once_cell",
"parser",
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs
index 864f9c0c84..63c1a8ebf0 100644
--- a/crates/hir/src/code_model.rs
+++ b/crates/hir/src/code_model.rs
@@ -31,8 +31,7 @@ use hir_ty::{
autoderef,
display::{HirDisplayError, HirFormatter},
method_resolution,
- traits::Solution,
- traits::SolutionVariables,
+ traits::{FnTrait, Solution, SolutionVariables},
ApplicationTy, BoundVar, CallableDefId, Canonical, DebruijnIndex, FnSig, GenericPredicate,
InEnvironment, Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, Ty,
TyDefId, TyKind, TypeCtor,
@@ -1386,6 +1385,28 @@ impl Type {
)
}
+ /// Checks that particular type `ty` implements `std::ops::FnOnce`.
+ ///
+ /// This function can be used to check if a particular type is callable, since FnOnce is a
+ /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
+ pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ let krate = self.krate;
+
+ let fnonce_trait = match FnTrait::FnOnce.get_id(db, krate) {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) };
+ method_resolution::implements_trait(
+ &canonical_ty,
+ db,
+ self.ty.environment.clone(),
+ krate,
+ fnonce_trait,
+ )
+ }
+
pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
let trait_ref = hir_ty::TraitRef {
trait_: trait_.id,
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 63299dc31d..76b52fa047 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -11,7 +11,7 @@ doctest = false
[dependencies]
either = "1.5.3"
-indexmap = "1.3.2"
+indexmap = "1.4.0"
itertools = "0.9.0"
log = "0.4.8"
rustc-hash = "1.1.0"
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs
index 90574cb359..232074c3df 100644
--- a/crates/ide/src/diagnostics.rs
+++ b/crates/ide/src/diagnostics.rs
@@ -613,7 +613,7 @@ fn main() {
pub struct Foo { pub a: i32, pub b: i32 }
"#,
r#"
-fn {a:42, b: ()} {}
+fn some(, b: ()} {}
fn items() {}
fn here() {}
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index b35c031627..7508484676 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -763,6 +763,9 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight {
if local.is_mut(db) || local.ty(db).is_mutable_reference() {
h |= HighlightModifier::Mutable;
}
+ if local.ty(db).as_callable(db).is_some() || local.ty(db).impls_fnonce(db) {
+ h |= HighlightModifier::Callable;
+ }
return h;
}
}
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs
index c1b817f06c..e8f78ad525 100644
--- a/crates/ide/src/syntax_highlighting/tags.rs
+++ b/crates/ide/src/syntax_highlighting/tags.rs
@@ -64,6 +64,7 @@ pub enum HighlightModifier {
Mutable,
Consuming,
Unsafe,
+ Callable,
}
impl HighlightTag {
@@ -122,6 +123,7 @@ impl HighlightModifier {
HighlightModifier::Mutable,
HighlightModifier::Consuming,
HighlightModifier::Unsafe,
+ HighlightModifier::Callable,
];
fn as_str(self) -> &'static str {
@@ -134,6 +136,7 @@ impl HighlightModifier {
HighlightModifier::Mutable => "mutable",
HighlightModifier::Consuming => "consuming",
HighlightModifier::Unsafe => "unsafe",
+ HighlightModifier::Callable => "callable",
}
}
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlighting.html
index 0bb0928e44..0cb84866d6 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlighting.html
@@ -44,6 +44,17 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
pub trait Copy {}
}
+pub mod ops {
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {}
+
+ #[lang = "fn_mut"]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn"]
+ pub trait Fn<Args>: FnMut<Args> {}
+}
+
struct Foo {
pub x: i32,
@@ -99,6 +110,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
foo::<'a, i32>()
}
+use ops::Fn;
+fn baz<F: Fn() -> ()>(f: F) {
+ f()
+}
+
macro_rules! def_fn {
($($tt:tt)*) => {$($tt)*}
}
@@ -157,6 +173,9 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
copy.quop();
copy.qux();
copy.baz(copy);
+
+ let a = |x| x;
+ let bar = Foo::baz;
}
enum Option<T> {
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 126363b8be..da20c300e2 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -18,6 +18,17 @@ pub mod marker {
pub trait Copy {}
}
+pub mod ops {
+ #[lang = "fn_once"]
+ pub trait FnOnce {}
+
+ #[lang = "fn_mut"]
+ pub trait FnMut: FnOnce {}
+
+ #[lang = "fn"]
+ pub trait Fn: FnMut {}
+}
+
struct Foo {
pub x: i32,
@@ -73,6 +84,11 @@ fn foo<'a, T>() -> T {
foo::<'a, i32>()
}
+use ops::Fn;
+fn baz ()>(f: F) {
+ f()
+}
+
macro_rules! def_fn {
($($tt:tt)*) => {$($tt)*}
}
@@ -131,6 +147,9 @@ fn main() {
copy.quop();
copy.qux();
copy.baz(copy);
+
+ let a = |x| x;
+ let bar = Foo::baz;
}
enum Option {
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs
index a6c4d6099b..7df28c9ddc 100644
--- a/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/semantic_tokens.rs
@@ -77,6 +77,7 @@ define_semantic_token_modifiers![
(CONSUMING, "consuming"),
(UNSAFE, "unsafe"),
(ATTRIBUTE_MODIFIER, "attribute"),
+ (CALLABLE, "callable"),
];
#[derive(Default)]
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index d0fb92f89b..0d34970bca 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -425,6 +425,7 @@ fn semantic_token_type_and_modifiers(
HighlightModifier::Mutable => semantic_tokens::MUTABLE,
HighlightModifier::Consuming => semantic_tokens::CONSUMING,
HighlightModifier::Unsafe => semantic_tokens::UNSAFE,
+ HighlightModifier::Callable => semantic_tokens::CALLABLE,
};
mods |= modifier;
}
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index c343f2f70a..aa39ce554c 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -17,6 +17,7 @@ rustc_lexer = { version = "683.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0"
arrayvec = "0.5.1"
once_cell = "1.3.1"
+indexmap = "1.4.0"
# This crate transitively depends on `smol_str` via `rowan`.
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
# to reduce number of compilations
@@ -26,10 +27,9 @@ serde = { version = "1.0.106", features = ["derive"] }
stdx = { path = "../stdx", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
+test_utils = { path = "../test_utils" }
[dev-dependencies]
walkdir = "2.3.1"
rayon = "1"
expect-test = "1.0"
-
-test_utils = { path = "../test_utils" }
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index ea199f9b89..4f9a7a6e89 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -2,11 +2,14 @@
use std::{
fmt,
+ hash::BuildHasherDefault,
ops::{self, RangeInclusive},
};
+use indexmap::IndexMap;
use itertools::Itertools;
use rustc_hash::FxHashMap;
+use test_utils::mark;
use text_edit::TextEditBuilder;
use crate::{
@@ -106,42 +109,56 @@ pub enum InsertPosition {
After(T),
}
+type FxIndexMap = IndexMap>;
+
+#[derive(Debug)]
pub struct TreeDiff {
replacements: FxHashMap,
+ deletions: Vec,
+ // the vec as well as the indexmap are both here to preserve order
+ insertions: FxIndexMap>,
}
impl TreeDiff {
pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
+ for (anchor, to) in self.insertions.iter() {
+ to.iter().for_each(|to| builder.insert(anchor.text_range().end(), to.to_string()));
+ }
for (from, to) in self.replacements.iter() {
builder.replace(from.text_range(), to.to_string())
}
+ for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
+ builder.delete(text_range);
+ }
}
pub fn is_empty(&self) -> bool {
- self.replacements.is_empty()
+ self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty()
}
}
/// Finds minimal the diff, which, applied to `from`, will result in `to`.
///
-/// Specifically, returns a map whose keys are descendants of `from` and values
-/// are descendants of `to`, such that `replace_descendants(from, map) == to`.
+/// Specifically, returns a structure that consists of a replacements, insertions and deletions
+/// such that applying this map on `from` will result in `to`.
///
-/// A trivial solution is a singleton map `{ from: to }`, but this function
-/// tries to find a more fine-grained diff.
+/// This function tries to find a fine-grained diff.
pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
- let mut buf = FxHashMap::default();
- // FIXME: this is both horrible inefficient and gives larger than
- // necessary diff. I bet there's a cool algorithm to diff trees properly.
- go(&mut buf, from.clone().into(), to.clone().into());
- return TreeDiff { replacements: buf };
+ let mut diff = TreeDiff {
+ replacements: FxHashMap::default(),
+ insertions: FxIndexMap::default(),
+ deletions: Vec::new(),
+ };
+ let (from, to) = (from.clone().into(), to.clone().into());
- fn go(
- buf: &mut FxHashMap,
- lhs: SyntaxElement,
- rhs: SyntaxElement,
- ) {
- if lhs.kind() == rhs.kind()
+ // FIXME: this is horrible inefficient. I bet there's a cool algorithm to diff trees properly.
+ if !syntax_element_eq(&from, &to) {
+ go(&mut diff, from, to);
+ }
+ return diff;
+
+ fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool {
+ lhs.kind() == rhs.kind()
&& lhs.text_range().len() == rhs.text_range().len()
&& match (&lhs, &rhs) {
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
@@ -150,18 +167,47 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
_ => false,
}
- {
- return;
- }
- if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) {
- if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() {
- for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) {
- go(buf, lhs, rhs)
- }
+ }
+
+ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
+ let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) {
+ Some((lhs, rhs)) => (lhs, rhs),
+ _ => {
+ mark::hit!(diff_node_token_replace);
+ diff.replacements.insert(lhs, rhs);
return;
}
+ };
+
+ let mut rhs_children = rhs.children_with_tokens();
+ let mut lhs_children = lhs.children_with_tokens();
+ let mut last_lhs = None;
+ loop {
+ let lhs_child = lhs_children.next();
+ match (lhs_child.clone(), rhs_children.next()) {
+ (None, None) => break,
+ (None, Some(element)) => match last_lhs.clone() {
+ Some(prev) => {
+ mark::hit!(diff_insert);
+ diff.insertions.entry(prev).or_insert_with(Vec::new).push(element);
+ }
+ // first iteration, this means we got no anchor element to insert after
+ // therefor replace the parent node instead
+ None => {
+ mark::hit!(diff_replace_parent);
+ diff.replacements.insert(lhs.clone().into(), rhs.clone().into());
+ break;
+ }
+ },
+ (Some(element), None) => {
+ mark::hit!(diff_delete);
+ diff.deletions.push(element);
+ }
+ (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {}
+ (Some(lhs_ele), Some(rhs_ele)) => go(diff, lhs_ele, rhs_ele),
+ }
+ last_lhs = lhs_child.or(last_lhs);
}
- buf.insert(lhs, rhs);
}
}
@@ -404,3 +450,322 @@ fn to_green_element(element: SyntaxElement) -> NodeOrToken it.green().clone().into(),
}
}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use itertools::Itertools;
+ use parser::SyntaxKind;
+ use test_utils::mark;
+ use text_edit::TextEdit;
+
+ use crate::{AstNode, SyntaxElement};
+
+ #[test]
+ fn replace_node_token() {
+ mark::check!(diff_node_token_replace);
+ check_diff(
+ r#"use node;"#,
+ r#"ident"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 0: Token(USE_KW@0..3 "use") -> ident
+
+ deletions:
+
+ Line 1: " "
+ Line 1: node
+ Line 1: ;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert() {
+ mark::check!(diff_insert);
+ check_diff(
+ r#"use foo;"#,
+ r#"use foo;
+use bar;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: Node(USE@0..8)
+ -> "\n"
+ -> use bar;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn replace_parent() {
+ mark::check!(diff_replace_parent);
+ check_diff(
+ r#""#,
+ r#"use foo::bar;"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 0: Node(SOURCE_FILE@0..0) -> use foo::bar;
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete() {
+ mark::check!(diff_delete);
+ check_diff(
+ r#"use foo;
+ use bar;"#,
+ r#"use foo;"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 1: "\n "
+ Line 2: use bar;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_use() {
+ check_diff(
+ r#"
+use expect_test::{expect, Expect};
+
+use crate::AstNode;
+"#,
+ r#"
+use expect_test::{expect, Expect};
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 4: Token(WHITESPACE@56..57 "\n")
+ -> use crate::AstNode;
+ -> "\n"
+
+ replacements:
+
+ Line 2: Token(WHITESPACE@35..37 "\n\n") -> "\n"
+ Line 4: Token(CRATE_KW@41..46 "crate") -> text_edit
+ Line 4: Token(IDENT@48..55 "AstNode") -> TextEdit
+ Line 4: Token(WHITESPACE@56..57 "\n") -> "\n\n"
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn remove_use() {
+ check_diff(
+ r#"
+use expect_test::{expect, Expect};
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use expect_test::{expect, Expect};
+
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 2: Token(WHITESPACE@35..36 "\n") -> "\n\n"
+ Line 3: Node(NAME_REF@40..49) -> crate
+ Line 3: Token(IDENT@51..59 "TextEdit") -> AstNode
+ Line 3: Token(WHITESPACE@60..62 "\n\n") -> "\n"
+
+ deletions:
+
+ Line 4: use crate::AstNode;
+ Line 5: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn merge_use() {
+ check_diff(
+ r#"
+use std::{
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+};
+"#,
+ r#"
+use std::fmt;
+use std::hash::BuildHasherDefault;
+use std::ops::{self, RangeInclusive};
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: Node(PATH_SEGMENT@5..8)
+ -> ::
+ -> fmt
+ Line 6: Token(WHITESPACE@86..87 "\n")
+ -> use std::hash::BuildHasherDefault;
+ -> "\n"
+ -> use std::ops::{self, RangeInclusive};
+ -> "\n"
+
+ replacements:
+
+ Line 2: Token(IDENT@5..8 "std") -> std
+
+ deletions:
+
+ Line 2: ::
+ Line 2: {
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn early_return_assist() {
+ check_diff(
+ r#"
+fn main() {
+ if let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+ "#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+ "#,
+ expect![[r#"
+ insertions:
+
+ Line 3: Node(BLOCK_EXPR@40..63)
+ -> " "
+ -> match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ }
+ -> ;
+ Line 5: Token(R_CURLY@64..65 "}")
+ -> "\n"
+ -> }
+
+ replacements:
+
+ Line 3: Token(IF_KW@17..19 "if") -> let
+ Line 3: Token(LET_KW@20..23 "let") -> x
+ Line 3: Node(BLOCK_EXPR@40..63) -> =
+ Line 5: Token(WHITESPACE@63..64 "\n") -> "\n "
+ Line 5: Token(R_CURLY@64..65 "}") -> foo(x);
+
+ deletions:
+
+ Line 3: " "
+ Line 3: Ok(x)
+ Line 3: " "
+ Line 3: =
+ Line 3: " "
+ Line 3: Err(92)
+ "#]],
+ )
+ }
+
+ fn check_diff(from: &str, to: &str, expected_diff: Expect) {
+ let from_node = crate::SourceFile::parse(from).tree().syntax().clone();
+ let to_node = crate::SourceFile::parse(to).tree().syntax().clone();
+ let diff = super::diff(&from_node, &to_node);
+
+ let line_number =
+ |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count();
+
+ let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
+ SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
+ _ => format!("{}", syn),
+ };
+
+ let insertions = diff.insertions.iter().format_with("\n", |(k, v), f| {
+ f(&format!(
+ "Line {}: {:?}\n-> {}",
+ line_number(k),
+ k,
+ v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v)))
+ ))
+ });
+
+ let replacements = diff
+ .replacements
+ .iter()
+ .sorted_by_key(|(syntax, _)| syntax.text_range().start())
+ .format_with("\n", |(k, v), f| {
+ f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
+ });
+
+ let deletions = diff
+ .deletions
+ .iter()
+ .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v))));
+
+ let actual = format!(
+ "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n",
+ insertions, replacements, deletions
+ );
+ expected_diff.assert_eq(&actual);
+
+ let mut from = from.to_owned();
+ let mut text_edit = TextEdit::builder();
+ diff.into_text_edit(&mut text_edit);
+ text_edit.finish().apply(&mut from);
+ assert_eq!(&*from, to, "diff did not turn `from` to `to`");
+ }
+}
diff --git a/editors/code/package.json b/editors/code/package.json
index 4bd3117fc8..af845d7bc4 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -929,6 +929,10 @@
{
"id": "consuming",
"description": "Style for non-Copy lvalues consumed by method/function call"
+ },
+ {
+ "id": "callable",
+ "description": "Style for variables/parameters that can be used in call expressions"
}
],
"semanticTokenScopes": [