Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links

This commit is contained in:
Zac Pullar-Strecker 2020-08-25 16:44:36 +12:00
commit b835f06cec
46 changed files with 970 additions and 846 deletions

44
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,44 @@
name: publish
on:
workflow_dispatch: # We can add version input when 1.0 is released and scheduled releases are removed
schedule:
- cron: "0 0 * * *" # midnight UTC
push:
branches:
- release
jobs:
publish:
name: publish
runs-on: ubuntu-16.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install cargo-workspaces
uses: actions-rs/install@v0.1
with:
crate: cargo-workspaces
- name: Release
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
PATCH: ${{ github.run_number }}
shell: bash
run: |
git config --global user.email "runner@gha.local"
git config --global user.name "Github Action"
rm Cargo.lock
cargo workspaces rename ra_ap_%n
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH

4
Cargo.lock generated
View file

@ -1718,9 +1718,9 @@ dependencies = [
[[package]] [[package]]
name = "ungrammar" name = "ungrammar"
version = "1.1.2" version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bab6142ac77be714b1ea78faca6efaed5478c50724786b0fe80d8528d10692b3" checksum = "ca4d39065b45f658d33013f7cc93ee050708cd543f6e07dd15b4293fcf217e12"
[[package]] [[package]]
name = "unicase" name = "unicase"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "arena" name = "arena"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "assists" name = "assists"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -13,11 +14,11 @@ rustc-hash = "1.1.0"
itertools = "0.9.0" itertools = "0.9.0"
either = "1.5.3" either = "1.5.3"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db" } ide_db = { path = "../ide_db", version = "0.0.0" }
hir = { path = "../hir" } hir = { path = "../hir", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }

View file

@ -73,10 +73,6 @@ impl<'a> AssistContext<'a> {
self.sema.db self.sema.db
} }
pub(crate) fn source_file(&self) -> &SourceFile {
&self.source_file
}
// NB, this ignores active selection. // NB, this ignores active selection.
pub(crate) fn offset(&self) -> TextSize { pub(crate) fn offset(&self) -> TextSize {
self.frange.range.start() self.frange.range.start()

View file

@ -1,10 +1,10 @@
use either::Either; use either::Either;
use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; use hir::{AssocItem, MacroDef, Module, ModuleDef, Name, PathResolution, ScopeDef};
use ide_db::{ use ide_db::{
defs::{classify_name_ref, Definition, NameRefClass}, defs::{classify_name_ref, Definition, NameRefClass},
RootDatabase, search::SearchScope,
}; };
use syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T}; use syntax::{algo, ast, AstNode, Direction, SyntaxNode, SyntaxToken, T};
use crate::{ use crate::{
assist_context::{AssistBuilder, AssistContext, Assists}, assist_context::{AssistBuilder, AssistContext, Assists},
@ -38,140 +38,259 @@ use crate::{
// ``` // ```
pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let star = ctx.find_token_at_offset(T![*])?; let star = ctx.find_token_at_offset(T![*])?;
let mod_path = find_mod_path(&star)?; let (parent, mod_path) = find_parent_and_path(&star)?;
let module = match ctx.sema.resolve_path(&mod_path)? { let target_module = match ctx.sema.resolve_path(&mod_path)? {
PathResolution::Def(ModuleDef::Module(it)) => it, PathResolution::Def(ModuleDef::Module(it)) => it,
_ => return None, _ => return None,
}; };
let source_file = ctx.source_file(); let current_scope = ctx.sema.scope(&star.parent());
let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); let current_module = current_scope.module()?;
let defs_in_mod = find_defs_in_mod(ctx, scope, module)?; let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
let name_refs_in_source_file = let imported_defs = find_imported_defs(ctx, star)?;
source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file);
let parent = star.parent().parent()?; let target = parent.clone().either(|n| n.syntax().clone(), |n| n.syntax().clone());
acc.add( acc.add(
AssistId("expand_glob_import", AssistKind::RefactorRewrite), AssistId("expand_glob_import", AssistKind::RefactorRewrite),
"Expand glob import", "Expand glob import",
parent.text_range(), target.text_range(),
|builder| { |builder| {
replace_ast(builder, &parent, mod_path, used_names); replace_ast(builder, parent, mod_path, names_to_import);
}, },
) )
} }
fn find_mod_path(star: &SyntaxToken) -> Option<ast::Path> { fn find_parent_and_path(
star.ancestors().find_map(|n| ast::UseTree::cast(n).and_then(|u| u.path())) star: &SyntaxToken,
) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
return star.ancestors().find_map(|n| {
find_use_tree_list(n.clone())
.and_then(|(u, p)| Some((Either::Right(u), p)))
.or_else(|| find_use_tree(n).and_then(|(u, p)| Some((Either::Left(u), p))))
});
fn find_use_tree_list(n: SyntaxNode) -> Option<(ast::UseTreeList, ast::Path)> {
let use_tree_list = ast::UseTreeList::cast(n)?;
let path = use_tree_list.parent_use_tree().path()?;
Some((use_tree_list, path))
} }
#[derive(PartialEq)] fn find_use_tree(n: SyntaxNode) -> Option<(ast::UseTree, ast::Path)> {
let use_tree = ast::UseTree::cast(n)?;
let path = use_tree.path()?;
Some((use_tree, path))
}
}
#[derive(Debug, PartialEq, Clone)]
enum Def { enum Def {
ModuleDef(ModuleDef), ModuleDef(ModuleDef),
MacroDef(MacroDef), MacroDef(MacroDef),
} }
impl Def { impl Def {
fn name(&self, db: &RootDatabase) -> Option<Name> { fn is_referenced_in(&self, ctx: &AssistContext) -> bool {
match self { let def = match self {
Def::ModuleDef(def) => def.name(db), Def::ModuleDef(def) => Definition::ModuleDef(*def),
Def::MacroDef(def) => def.name(db), Def::MacroDef(def) => Definition::Macro(*def),
};
let search_scope = SearchScope::single_file(ctx.frange.file_id);
def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
}
}
#[derive(Debug, Clone)]
struct Ref {
// could be alias
visible_name: Name,
def: Def,
}
impl Ref {
fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
match scope_def {
ScopeDef::ModuleDef(def) => Some(Ref { visible_name: name, def: Def::ModuleDef(def) }),
ScopeDef::MacroDef(def) => Some(Ref { visible_name: name, def: Def::MacroDef(def) }),
_ => None,
} }
} }
} }
fn find_defs_in_mod( #[derive(Debug, Clone)]
struct Refs(Vec<Ref>);
impl Refs {
fn used_refs(&self, ctx: &AssistContext) -> Refs {
Refs(
self.0
.clone()
.into_iter()
.filter(|r| {
if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def {
if tr
.items(ctx.db())
.into_iter()
.find(|ai| {
if let AssocItem::Function(f) = *ai {
Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx)
} else {
false
}
})
.is_some()
{
return true;
}
}
r.def.is_referenced_in(ctx)
})
.collect(),
)
}
fn filter_out_by_defs(&self, defs: Vec<Def>) -> Refs {
Refs(self.0.clone().into_iter().filter(|r| !defs.contains(&r.def)).collect())
}
}
fn find_refs_in_mod(
ctx: &AssistContext, ctx: &AssistContext,
from: SemanticsScope<'_>, module: Module,
module: hir::Module, visible_from: Option<Module>,
) -> Option<Vec<Def>> { ) -> Option<Refs> {
let module_scope = module.scope(ctx.db(), from.module()); if let Some(from) = visible_from {
if !is_mod_visible_from(ctx, module, from) {
let mut defs = vec![]; return None;
for (_, def) in module_scope {
match def {
ScopeDef::ModuleDef(def) => defs.push(Def::ModuleDef(def)),
ScopeDef::MacroDef(def) => defs.push(Def::MacroDef(def)),
_ => continue,
} }
} }
Some(defs) let module_scope = module.scope(ctx.db(), visible_from);
let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
Some(Refs(refs))
} }
fn find_used_names( fn is_mod_visible_from(ctx: &AssistContext, module: Module, from: Module) -> bool {
ctx: &AssistContext, match module.parent(ctx.db()) {
defs_in_mod: Vec<Def>, Some(parent) => {
name_refs_in_source_file: Vec<ast::NameRef>, parent.visibility_of(ctx.db(), &ModuleDef::Module(module)).map_or(true, |vis| {
) -> Vec<Name> { vis.is_visible_from(ctx.db(), from.into()) && is_mod_visible_from(ctx, parent, from)
let defs_in_source_file = name_refs_in_source_file })
}
None => true,
}
}
// looks for name refs in parent use block's siblings
//
// mod bar {
// mod qux {
// struct Qux;
// }
//
// pub use qux::Qux;
// }
//
// ↓ ---------------
// use foo::*<|>;
// use baz::Baz;
// ↑ ---------------
fn find_imported_defs(ctx: &AssistContext, star: SyntaxToken) -> Option<Vec<Def>> {
let parent_use_item_syntax =
star.ancestors().find_map(|n| if ast::Use::can_cast(n.kind()) { Some(n) } else { None })?;
Some(
[Direction::Prev, Direction::Next]
.iter() .iter()
.filter_map(|r| classify_name_ref(&ctx.sema, r)) .map(|dir| {
.filter_map(|rc| match rc { parent_use_item_syntax
.siblings(dir.to_owned())
.filter(|n| ast::Use::can_cast(n.kind()))
})
.flatten()
.filter_map(|n| Some(n.descendants().filter_map(ast::NameRef::cast)))
.flatten()
.filter_map(|r| match classify_name_ref(&ctx.sema, &r)? {
NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)), NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)),
NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)), NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)),
_ => None, _ => None,
}) })
.collect::<Vec<Def>>(); .collect(),
)
defs_in_mod
.iter()
.filter(|def| {
if let Def::ModuleDef(ModuleDef::Trait(tr)) = def {
for item in tr.items(ctx.db()) {
if let AssocItem::Function(f) = item {
if defs_in_source_file.contains(&Def::ModuleDef(ModuleDef::Function(f))) {
return true;
}
}
}
} }
defs_in_source_file.contains(def) fn find_names_to_import(
}) ctx: &AssistContext,
.filter_map(|d| d.name(ctx.db())) refs_in_target: Refs,
.collect() imported_defs: Vec<Def>,
) -> Vec<Name> {
let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
} }
fn replace_ast( fn replace_ast(
builder: &mut AssistBuilder, builder: &mut AssistBuilder,
node: &SyntaxNode, parent: Either<ast::UseTree, ast::UseTreeList>,
path: ast::Path, path: ast::Path,
used_names: Vec<Name>, names_to_import: Vec<Name>,
) { ) {
let replacement: Either<ast::UseTree, ast::UseTreeList> = match used_names.as_slice() { let existing_use_trees = match parent.clone() {
[name] => Either::Left(ast::make::use_tree( Either::Left(_) => vec![],
ast::make::path_from_text(&format!("{}::{}", path, name)), Either::Right(u) => u
.use_trees()
.filter(|n|
// filter out star
n.star_token().is_none())
.collect(),
};
let new_use_trees: Vec<ast::UseTree> = names_to_import
.iter()
.map(|n| ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false))
.collect();
let use_trees = [&existing_use_trees[..], &new_use_trees[..]].concat();
match use_trees.as_slice() {
[name] => {
if let Some(end_path) = name.path() {
let replacement = ast::make::use_tree(
ast::make::path_from_text(&format!("{}::{}", path, end_path)),
None, None,
None, None,
false, false,
)), );
names => Either::Right(ast::make::use_tree_list(names.iter().map(|n| {
ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false)
}))),
};
let mut replace_node = |replacement: Either<ast::UseTree, ast::UseTreeList>| { algo::diff(
algo::diff(node, &replacement.either(|u| u.syntax().clone(), |ut| ut.syntax().clone())) &parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()),
replacement.syntax(),
)
.into_text_edit(builder.text_edit_builder()); .into_text_edit(builder.text_edit_builder());
}
}
names => {
let replacement = match parent {
Either::Left(_) => ast::make::use_tree(
path,
Some(ast::make::use_tree_list(names.to_owned())),
None,
false,
)
.syntax()
.clone(),
Either::Right(_) => ast::make::use_tree_list(names.to_owned()).syntax().clone(),
}; };
match_ast! { algo::diff(
match node { &parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()),
ast::UseTree(use_tree) => { &replacement,
replace_node(replacement); )
}, .into_text_edit(builder.text_edit_builder());
ast::UseTreeList(use_tree_list) => {
replace_node(replacement);
},
ast::Use(use_item) => {
builder.replace_ast(use_item, ast::make::use_(replacement.left_or_else(|ut| ast::make::use_tree(path, Some(ut), None, false))));
},
_ => {},
}
} }
};
} }
#[cfg(test)] #[cfg(test)]
@ -245,7 +364,46 @@ mod foo {
pub fn f() {} pub fn f() {}
} }
use foo::{Baz, Bar, f}; use foo::{f, Baz, Bar};
fn qux(bar: Bar, baz: Baz) {
f();
}
",
)
}
#[test]
fn expanding_glob_import_with_existing_uses_in_same_module() {
check_assist(
expand_glob_import,
r"
mod foo {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
use foo::Bar;
use foo::{*<|>, f};
fn qux(bar: Bar, baz: Baz) {
f();
}
",
r"
mod foo {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
use foo::Bar;
use foo::{f, Baz};
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {
f(); f();
@ -260,7 +418,7 @@ fn qux(bar: Bar, baz: Baz) {
expand_glob_import, expand_glob_import,
r" r"
mod foo { mod foo {
mod bar { pub mod bar {
pub struct Bar; pub struct Bar;
pub struct Baz; pub struct Baz;
pub struct Qux; pub struct Qux;
@ -268,7 +426,7 @@ mod foo {
pub fn f() {} pub fn f() {}
} }
mod baz { pub mod baz {
pub fn g() {} pub fn g() {}
} }
} }
@ -282,7 +440,7 @@ fn qux(bar: Bar, baz: Baz) {
", ",
r" r"
mod foo { mod foo {
mod bar { pub mod bar {
pub struct Bar; pub struct Bar;
pub struct Baz; pub struct Baz;
pub struct Qux; pub struct Qux;
@ -290,51 +448,330 @@ mod foo {
pub fn f() {} pub fn f() {}
} }
mod baz { pub mod baz {
pub fn g() {} pub fn g() {}
} }
} }
use foo::{bar::{Baz, Bar, f}, baz::*}; use foo::{bar::{f, Baz, Bar}, baz::*};
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {
f(); f();
g(); g();
} }
", ",
) );
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
}
}
use foo::{bar::{Bar, Baz, f}, baz::*<|>};
fn qux(bar: Bar, baz: Baz) {
f();
g();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
}
}
use foo::{bar::{Bar, Baz, f}, baz::g};
fn qux(bar: Bar, baz: Baz) {
f();
g();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::*<|>}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
q::j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q, h}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
q::j();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{h, q::*<|>}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{h, q::j}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q::j, *<|>}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q::j, h}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
);
} }
#[test] #[test]
fn expanding_glob_import_with_macro_defs() { fn expanding_glob_import_with_macro_defs() {
check_assist( // FIXME: this is currently fails because `Definition::find_usages` ignores macros
expand_glob_import, // https://github.com/rust-analyzer/rust-analyzer/issues/3484
r" //
//- /lib.rs crate:foo // check_assist(
#[macro_export] // expand_glob_import,
macro_rules! bar { // r"
() => () // //- /lib.rs crate:foo
} // #[macro_export]
// macro_rules! bar {
// () => ()
// }
pub fn baz() {} // pub fn baz() {}
//- /main.rs crate:main deps:foo // //- /main.rs crate:main deps:foo
use foo::*<|>; // use foo::*<|>;
fn main() { // fn main() {
bar!(); // bar!();
baz(); // baz();
} // }
", // ",
r" // r"
use foo::{bar, baz}; // use foo::{bar, baz};
fn main() { // fn main() {
bar!(); // bar!();
baz(); // baz();
} // }
", // ",
) // )
} }
#[test] #[test]
@ -362,7 +799,72 @@ fn main() {
().method(); ().method();
} }
", ",
) );
check_assist(
expand_glob_import,
r"
//- /lib.rs crate:foo
pub trait Tr {
fn method(&self) {}
}
impl Tr for () {}
pub trait Tr2 {
fn method2(&self) {}
}
impl Tr2 for () {}
//- /main.rs crate:main deps:foo
use foo::*<|>;
fn main() {
().method();
}
",
r"
use foo::Tr;
fn main() {
().method();
}
",
);
}
#[test]
fn expanding_is_not_applicable_if_target_module_is_not_accessible_from_current_scope() {
check_assist_not_applicable(
expand_glob_import,
r"
mod foo {
mod bar {
pub struct Bar;
}
}
use foo::bar::*<|>;
fn baz(bar: Bar) {}
",
);
check_assist_not_applicable(
expand_glob_import,
r"
mod foo {
mod bar {
pub mod baz {
pub struct Baz;
}
}
}
use foo::bar::baz::*<|>;
fn qux(baz: Baz) {}
",
);
} }
#[test] #[test]

View file

@ -106,4 +106,22 @@ mod tests {
"fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }", "fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }",
) )
} }
#[test]
fn invert_if_option_case() {
check_assist(
invert_if,
"fn f() { if<|> doc_style.is_some() { Class::DocComment } else { Class::Comment } }",
"fn f() { if doc_style.is_none() { Class::Comment } else { Class::DocComment } }",
)
}
#[test]
fn invert_if_result_case() {
check_assist(
invert_if,
"fn f() { i<|>f doc_style.is_err() { Class::Err } else { Class::Ok } }",
"fn f() { if doc_style.is_ok() { Class::Ok } else { Class::Err } }",
)
}
} }

View file

@ -11,7 +11,7 @@ use syntax::{
ast::{self, make, NameOwner}, ast::{self, make, NameOwner},
AstNode, Direction, AstNode, Direction,
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, TextSize, T, SyntaxNode, SyntaxText, TextSize, T,
}; };
use crate::assist_config::SnippetCap; use crate::assist_config::SnippetCap;
@ -179,6 +179,25 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()), ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()),
_ => None, _ => None,
}, },
ast::Expr::MethodCallExpr(mce) => {
const IS_SOME_TEXT: &str = "is_some";
const IS_NONE_TEXT: &str = "is_none";
const IS_OK_TEXT: &str = "is_ok";
const IS_ERR_TEXT: &str = "is_err";
let name = mce.name_ref()?;
let name_text = name.text();
let caller = || -> Option<SyntaxText> { Some(mce.receiver()?.syntax().text()) };
match name_text {
x if x == IS_SOME_TEXT => make::expr_method_call(IS_NONE_TEXT, caller),
x if x == IS_NONE_TEXT => make::expr_method_call(IS_SOME_TEXT, caller),
x if x == IS_OK_TEXT => make::expr_method_call(IS_ERR_TEXT, caller),
x if x == IS_ERR_TEXT => make::expr_method_call(IS_OK_TEXT, caller),
_ => None,
}
}
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(), ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(),
// FIXME: // FIXME:
// ast::Expr::Literal(true | false ) // ast::Expr::Literal(true | false )

View file

@ -1,6 +1,7 @@
[package] [package]
name = "base_db" name = "base_db"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -12,10 +13,10 @@ doctest = false
salsa = "0.15.2" salsa = "0.15.2"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
cfg = { path = "../cfg" } cfg = { path = "../cfg", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
vfs = { path = "../vfs" } vfs = { path = "../vfs", version = "0.0.0" }
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "cfg" name = "cfg"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -11,7 +12,7 @@ doctest = false
[dependencies] [dependencies]
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
mbe = { path = "../mbe" } mbe = { path = "../mbe" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "flycheck" name = "flycheck"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -15,4 +16,4 @@ cargo_metadata = "0.11.1"
serde_json = "1.0.48" serde_json = "1.0.48"
jod-thread = "0.1.1" jod-thread = "0.1.1"
toolchain = { path = "../toolchain" } toolchain = { path = "../toolchain", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "hir" name = "hir"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -16,11 +17,11 @@ arrayvec = "0.5.1"
itertools = "0.9.0" itertools = "0.9.0"
url = "2.1.1" url = "2.1.1"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
hir_expand = { path = "../hir_expand" } hir_expand = { path = "../hir_expand", version = "0.0.0" }
hir_def = { path = "../hir_def" } hir_def = { path = "../hir_def", version = "0.0.0" }
hir_ty = { path = "../hir_ty" } hir_ty = { path = "../hir_ty", version = "0.0.0" }
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "hir_def" name = "hir_def"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -20,16 +21,16 @@ itertools = "0.9.0"
indexmap = "1.4.0" indexmap = "1.4.0"
smallvec = "1.4.0" smallvec = "1.4.0"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
arena = { path = "../arena" } arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
hir_expand = { path = "../hir_expand" } hir_expand = { path = "../hir_expand", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
mbe = { path = "../mbe" } mbe = { path = "../mbe", version = "0.0.0" }
cfg = { path = "../cfg" } cfg = { path = "../cfg", version = "0.0.0" }
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
expect-test = "0.1" expect-test = "0.1"

View file

@ -557,6 +557,10 @@ impl Ctx {
let statik = self.lower_static(&ast)?; let statik = self.lower_static(&ast)?;
statik.into() statik.into()
} }
ast::ExternItem::TypeAlias(ty) => {
let id = self.lower_type_alias(&ty)?;
id.into()
}
ast::ExternItem::MacroCall(_) => return None, ast::ExternItem::MacroCall(_) => return None,
}; };
self.add_attrs(id.into(), attrs); self.add_attrs(id.into(), attrs);

View file

@ -46,6 +46,7 @@ union U { to_be: bool, not_to_be: u8 }
enum E { V } enum E { V }
extern { extern {
type Ext;
static EXT: u8; static EXT: u8;
fn ext(); fn ext();
} }
@ -65,6 +66,7 @@ extern {
Baz: t v Baz: t v
E: t E: t
EXT: v EXT: v
Ext: t
U: t U: t
ext: v ext: v
"#]], "#]],

View file

@ -1,6 +1,7 @@
[package] [package]
name = "hir_expand" name = "hir_expand"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -13,11 +14,11 @@ log = "0.4.8"
either = "1.5.3" either = "1.5.3"
rustc-hash = "1.0.0" rustc-hash = "1.0.0"
arena = { path = "../arena" } arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
parser = { path = "../parser" } parser = { path = "../parser", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe" } mbe = { path = "../mbe", version = "0.0.0" }
test_utils = { path = "../test_utils"} test_utils = { path = "../test_utils", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "hir_ty" name = "hir_ty"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -20,14 +21,14 @@ chalk-solve = { version = "0.23.0" }
chalk-ir = { version = "0.23.0" } chalk-ir = { version = "0.23.0" }
chalk-recursive = { version = "0.23.0" } chalk-recursive = { version = "0.23.0" }
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
hir_def = { path = "../hir_def" } hir_def = { path = "../hir_def", version = "0.0.0" }
hir_expand = { path = "../hir_expand" } hir_expand = { path = "../hir_expand", version = "0.0.0" }
arena = { path = "../arena" } arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
expect-test = "0.1" expect-test = "0.1"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "ide" name = "ide"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -18,20 +19,20 @@ oorandom = "11.1.2"
pulldown-cmark-to-cmark = "5.0.0" pulldown-cmark-to-cmark = "5.0.0"
pulldown-cmark = {version = "0.7.2", default-features = false} pulldown-cmark = {version = "0.7.2", default-features = false}
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db" } ide_db = { path = "../ide_db", version = "0.0.0" }
cfg = { path = "../cfg" } cfg = { path = "../cfg", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
assists = { path = "../assists" } assists = { path = "../assists", version = "0.0.0" }
ssr = { path = "../ssr" } ssr = { path = "../ssr", version = "0.0.0" }
# ide should depend only on the top-level `hir` package. if you need # ide should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`. # something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../hir" } hir = { path = "../hir", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
expect-test = "0.1" expect-test = "0.1"

View file

@ -92,7 +92,7 @@ pub use crate::completion::{
/// already present, it should give all possible variants for the identifier at /// already present, it should give all possible variants for the identifier at
/// the caret. In other words, for /// the caret. In other words, for
/// ///
/// ```no-run /// ```no_run
/// fn f() { /// fn f() {
/// let foo = 92; /// let foo = 92;
/// let _ = bar<|> /// let _ = bar<|>

View file

@ -160,7 +160,7 @@ fn runnable_fn(
RunnableKind::Test { test_id, attr } RunnableKind::Test { test_id, attr }
} else if fn_def.has_atom_attr("bench") { } else if fn_def.has_atom_attr("bench") {
RunnableKind::Bench { test_id } RunnableKind::Bench { test_id }
} else if has_doc_test(&fn_def) { } else if has_runnable_doc_test(&fn_def) {
RunnableKind::DocTest { test_id } RunnableKind::DocTest { test_id }
} else { } else {
return None; return None;
@ -211,8 +211,13 @@ fn has_test_related_attribute(fn_def: &ast::Fn) -> bool {
.any(|attribute_text| attribute_text.contains("test")) .any(|attribute_text| attribute_text.contains("test"))
} }
fn has_doc_test(fn_def: &ast::Fn) -> bool { fn has_runnable_doc_test(fn_def: &ast::Fn) -> bool {
fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```")) fn_def.doc_comment_text().map_or(false, |comments_text| {
comments_text.contains("```")
&& !comments_text.contains("```ignore")
&& !comments_text.contains("```no_run")
&& !comments_text.contains("```compile_fail")
})
} }
fn runnable_mod( fn runnable_mod(
@ -417,6 +422,21 @@ fn main() {}
/// let x = 5; /// let x = 5;
/// ``` /// ```
fn foo() {} fn foo() {}
/// ```no_run
/// let z = 55;
/// ```
fn should_have_no_runnable() {}
/// ```ignore
/// let z = 55;
/// ```
fn should_have_no_runnable_2() {}
/// ```compile_fail
/// let z = 55;
/// ```
fn should_have_no_runnable_3() {}
"#, "#,
&[&BIN, &DOCTEST], &[&BIN, &DOCTEST],
expect![[r#" expect![[r#"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "ide_db" name = "ide_db"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -19,12 +20,12 @@ rustc-hash = "1.1.0"
once_cell = "1.3.1" once_cell = "1.3.1"
either = "1.5.3" either = "1.5.3"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
# ide should depend only on the top-level `hir` package. if you need # ide should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`. # something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../hir" } hir = { path = "../hir", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "mbe" name = "mbe"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -13,9 +14,9 @@ rustc-hash = "1.1.0"
smallvec = "1.2.0" smallvec = "1.2.0"
log = "0.4.8" log = "0.4.8"
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
parser = { path = "../parser" } parser = { path = "../parser", version = "0.0.0" }
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "parser" name = "parser"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "paths" name = "paths"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "proc_macro_api" name = "proc_macro_api"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -15,4 +16,4 @@ log = "0.4.8"
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
jod-thread = "0.1.1" jod-thread = "0.1.1"
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "proc_macro_srv" name = "proc_macro_srv"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -13,10 +14,10 @@ goblin = "0.2.1"
libloading = "0.6.0" libloading = "0.6.0"
memmap = "0.7" memmap = "0.7"
tt = { path = "../tt" } tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe" } mbe = { path = "../mbe", version = "0.0.0" }
proc_macro_api = { path = "../proc_macro_api" } proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
cargo_metadata = "0.11.1" cargo_metadata = "0.11.1"

View file

@ -4,6 +4,7 @@ version = "0.0.0"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
publish = false
[lib] [lib]
doctest = false doctest = false

View file

@ -1,6 +1,7 @@
[package] [package]
name = "profile" name = "profile"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -14,7 +15,7 @@ cfg-if = "0.1.10"
libc = "0.2.73" libc = "0.2.73"
backtrace = { version = "0.3.44", optional = true } backtrace = { version = "0.3.44", optional = true }
arena = { path = "../arena" } arena = { path = "../arena", version = "0.0.0" }
[target.'cfg(target_os = "linux")'.dependencies] [target.'cfg(target_os = "linux")'.dependencies]
perf-event = "0.4" perf-event = "0.4"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "project_model" name = "project_model"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -16,10 +17,10 @@ serde = { version = "1.0.106", features = ["derive"] }
serde_json = "1.0.48" serde_json = "1.0.48"
anyhow = "1.0.26" anyhow = "1.0.26"
arena = { path = "../arena" } arena = { path = "../arena", version = "0.0.0" }
cfg = { path = "../cfg" } cfg = { path = "../cfg", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
toolchain = { path = "../toolchain" } toolchain = { path = "../toolchain", version = "0.0.0" }
proc_macro_api = { path = "../proc_macro_api" } proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
paths = { path = "../paths" } paths = { path = "../paths", version = "0.0.0" }
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "rust-analyzer" name = "rust-analyzer"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
autobins = false autobins = false
@ -32,26 +33,26 @@ rayon = "1.3.1"
mimalloc = { version = "0.1.19", default-features = false, optional = true } mimalloc = { version = "0.1.19", default-features = false, optional = true }
lsp-server = "0.3.3" lsp-server = "0.3.3"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
flycheck = { path = "../flycheck" } flycheck = { path = "../flycheck", version = "0.0.0" }
ide = { path = "../ide" } ide = { path = "../ide", version = "0.0.0" }
profile = { path = "../profile" } profile = { path = "../profile", version = "0.0.0" }
project_model = { path = "../project_model" } project_model = { path = "../project_model", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
vfs = { path = "../vfs" } vfs = { path = "../vfs", version = "0.0.0" }
vfs-notify = { path = "../vfs-notify" } vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
cfg = { path = "../cfg" } cfg = { path = "../cfg", version = "0.0.0" }
toolchain = { path = "../toolchain" } toolchain = { path = "../toolchain", version = "0.0.0" }
# This should only be used in CLI # This should only be used in CLI
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db" } ide_db = { path = "../ide_db", version = "0.0.0" }
ssr = { path = "../ssr" } ssr = { path = "../ssr", version = "0.0.0" }
hir = { path = "../hir" } hir = { path = "../hir", version = "0.0.0" }
hir_def = { path = "../hir_def" } hir_def = { path = "../hir_def", version = "0.0.0" }
hir_ty = { path = "../hir_ty" } hir_ty = { path = "../hir_ty", version = "0.0.0" }
proc_macro_srv = { path = "../proc_macro_srv" } proc_macro_srv = { path = "../proc_macro_srv", version = "0.0.0" }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = "0.3.8" winapi = "0.3.8"

View file

@ -14,12 +14,12 @@ doctest = false
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
itertools = "0.9.0" itertools = "0.9.0"
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
syntax = { path = "../syntax" } syntax = { path = "../syntax", version = "0.0.0" }
base_db = { path = "../base_db" } base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db" } ide_db = { path = "../ide_db", version = "0.0.0" }
hir = { path = "../hir" } hir = { path = "../hir", version = "0.0.0" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
expect-test = "0.1" expect-test = "0.1"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "stdx" name = "stdx"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -23,9 +23,9 @@ once_cell = "1.3.1"
smol_str = { version = "0.1.15", features = ["serde"] } smol_str = { version = "0.1.15", features = ["serde"] }
serde = { version = "1.0.106", features = ["derive"] } serde = { version = "1.0.106", features = ["derive"] }
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }
text_edit = { path = "../text_edit" } text_edit = { path = "../text_edit", version = "0.0.0" }
parser = { path = "../parser" } parser = { path = "../parser", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
walkdir = "2.3.1" walkdir = "2.3.1"

View file

@ -32,7 +32,7 @@ pub fn ancestors_at_offset(
/// imprecise: if the cursor is strictly between two nodes of the desired type, /// imprecise: if the cursor is strictly between two nodes of the desired type,
/// as in /// as in
/// ///
/// ```no-run /// ```no_run
/// struct Foo {}|struct Bar; /// struct Foo {}|struct Bar;
/// ``` /// ```
/// ///

View file

@ -1380,6 +1380,7 @@ pub enum ExternItem {
Fn(Fn), Fn(Fn),
MacroCall(MacroCall), MacroCall(MacroCall),
Static(Static), Static(Static),
TypeAlias(TypeAlias),
} }
impl ast::AttrsOwner for ExternItem {} impl ast::AttrsOwner for ExternItem {}
impl ast::NameOwner for ExternItem {} impl ast::NameOwner for ExternItem {}
@ -3339,10 +3340,13 @@ impl From<MacroCall> for ExternItem {
impl From<Static> for ExternItem { impl From<Static> for ExternItem {
fn from(node: Static) -> ExternItem { ExternItem::Static(node) } fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
} }
impl From<TypeAlias> for ExternItem {
fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
}
impl AstNode for ExternItem { impl AstNode for ExternItem {
fn can_cast(kind: SyntaxKind) -> bool { fn can_cast(kind: SyntaxKind) -> bool {
match kind { match kind {
FN | MACRO_CALL | STATIC => true, FN | MACRO_CALL | STATIC | TYPE_ALIAS => true,
_ => false, _ => false,
} }
} }
@ -3351,6 +3355,7 @@ impl AstNode for ExternItem {
FN => ExternItem::Fn(Fn { syntax }), FN => ExternItem::Fn(Fn { syntax }),
MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }), MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
STATIC => ExternItem::Static(Static { syntax }), STATIC => ExternItem::Static(Static { syntax }),
TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
_ => return None, _ => return None,
}; };
Some(res) Some(res)
@ -3360,6 +3365,7 @@ impl AstNode for ExternItem {
ExternItem::Fn(it) => &it.syntax, ExternItem::Fn(it) => &it.syntax,
ExternItem::MacroCall(it) => &it.syntax, ExternItem::MacroCall(it) => &it.syntax,
ExternItem::Static(it) => &it.syntax, ExternItem::Static(it) => &it.syntax,
ExternItem::TypeAlias(it) => &it.syntax,
} }
} }
} }

View file

@ -7,7 +7,7 @@
use itertools::Itertools; use itertools::Itertools;
use stdx::format_to; use stdx::format_to;
use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxText, SyntaxToken};
pub fn name(text: &str) -> ast::Name { pub fn name(text: &str) -> ast::Name {
ast_from_text(&format!("mod {};", text)) ast_from_text(&format!("mod {};", text))
@ -137,6 +137,12 @@ pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
expr_from_text(&format!("{}{}", f, arg_list)) expr_from_text(&format!("{}{}", f, arg_list))
} }
pub fn expr_method_call<F>(text: &str, caller: F) -> Option<ast::Expr>
where
F: FnOnce() -> Option<SyntaxText>,
{
try_expr_from_text(&format!("{}.{}()", caller()?, text))
}
fn expr_from_text(text: &str) -> ast::Expr { fn expr_from_text(text: &str) -> ast::Expr {
ast_from_text(&format!("const C: () = {};", text)) ast_from_text(&format!("const C: () = {};", text))
} }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "test_utils" name = "test_utils"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -15,4 +16,4 @@ text-size = "1.0.0"
serde_json = "1.0.48" serde_json = "1.0.48"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "text_edit" name = "text_edit"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "toolchain" name = "toolchain"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"

View file

@ -1,6 +1,7 @@
[package] [package]
name = "tt" name = "tt"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -13,4 +14,4 @@ doctest = false
# to reduce number of compilations # to reduce number of compilations
smol_str = { version = "0.1.15", features = ["serde"] } smol_str = { version = "0.1.15", features = ["serde"] }
stdx = { path = "../stdx" } stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "vfs-notify" name = "vfs-notify"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -16,5 +17,5 @@ walkdir = "2.3.1"
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
notify = "5.0.0-pre.3" notify = "5.0.0-pre.3"
vfs = { path = "../vfs" } vfs = { path = "../vfs", version = "0.0.0" }
paths = { path = "../paths" } paths = { path = "../paths", version = "0.0.0" }

View file

@ -1,6 +1,7 @@
[package] [package]
name = "vfs" name = "vfs"
version = "0.0.0" version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
edition = "2018" edition = "2018"
@ -12,4 +13,4 @@ doctest = false
rustc-hash = "1.0" rustc-hash = "1.0"
fst = "0.4" fst = "0.4"
paths = { path = "../paths" } paths = { path = "../paths", version = "0.0.0" }

View file

@ -181,6 +181,30 @@ fn frobnicate(walrus: Option<Walrus>) {
} }
``` ```
# Early Returns
Do use early returns
```rust
// Good
fn foo() -> Option<Bar> {
if !condition() {
return None;
}
Some(...)
}
// Not as good
fn foo() -> Option<Bar> {
if condition() {
Some(...)
} else {
None
}
}
```
# Getters & Setters # Getters & Setters
If a field can have any value without breaking invariants, make the field public. If a field can have any value without breaking invariants, make the field public.
@ -189,7 +213,7 @@ Never provide setters.
Getters should return borrowed data: Getters should return borrowed data:
``` ```rust
struct Person { struct Person {
// Invariant: never empty // Invariant: never empty
first_name: String, first_name: String,
@ -231,6 +255,41 @@ if words.len() != 2 {
} }
``` ```
# Avoid Monomorphization
Rust uses monomorphization to compile generic code, meaning that for each instantiation of a generic functions with concrete types, the function is compiled afresh, *per crate*.
This allows for exceptionally good performance, but leads to increased compile times.
Runtime performance obeys 80%/20% rule -- only a small fraction of code is hot.
Compile time **does not** obey this rule -- all code has to be compiled.
For this reason, avoid making a lot of code type parametric, *especially* on the boundaries between crates.
```rust
// Good
fn frbonicate(f: impl FnMut()) {
frobnicate_impl(&mut f)
}
fn frobnicate_impl(f: &mut dyn FnMut()) {
// lots of code
}
// Not as good
fn frbonicate(f: impl FnMut()) {
// lots of code
}
```
Avoid `AsRef` polymorphism, it pays back only for widely used libraries:
```rust
// Good
fn frbonicate(f: &Path) {
}
// Not as good
fn frbonicate(f: impl AsRef<Path>) {
}
```
# Documentation # Documentation
For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.

View file

@ -134,6 +134,14 @@ async function downloadFile(
await pipeline(srcStream, destFileStream); await pipeline(srcStream, destFileStream);
// Don't apply the workaround in fixed versions of nodejs, since the process
// freezes on them, the process waits for no-longer emitted `close` event.
// The fix was applied in commit 7eed9d6bcc in v13.11.0
// See the nodejs changelog:
// https://github.com/nodejs/node/blob/master/doc/changelogs/CHANGELOG_V13.md
const [, major, minor] = /v(\d+)\.(\d+)\.(\d+)/.exec(process.version)!;
if (+major > 13 || (+major === 13 && +minor >= 11)) return;
await new Promise<void>(resolve => { await new Promise<void>(resolve => {
destFileStream.on("close", resolve); destFileStream.on("close", resolve);
destFileStream.destroy(); destFileStream.destroy();

View file

@ -15,7 +15,7 @@ flate2 = "1.0"
pico-args = "0.3.1" pico-args = "0.3.1"
proc-macro2 = "1.0.8" proc-macro2 = "1.0.8"
quote = "1.0.2" quote = "1.0.2"
ungrammar = "1.1.1" ungrammar = "1.1.3"
walkdir = "2.3.1" walkdir = "2.3.1"
write-json = "0.1.0" write-json = "0.1.0"
# Avoid adding more dependencies to this crate # Avoid adding more dependencies to this crate

View file

@ -1,587 +0,0 @@
//*************************//
// Names, Paths and Macros //
//*************************//
Name =
'ident'
NameRef =
'ident' | 'int_number'
Path =
(qualifier:Path '::')? segment:PathSegment
PathSegment =
'crate' | 'self' | 'super'
| '::' NameRef
| NameRef GenericArgList?
| NameRef ParamList RetType?
| '<' PathType ('as' PathType)? '>'
GenericArgList =
'::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
GenericArg =
TypeArg
| AssocTypeArg
| LifetimeArg
| ConstArg
TypeArg =
Type
AssocTypeArg =
NameRef (':' TypeBoundList | '=' Type)
LifetimeArg =
'lifetime'
ConstArg =
Expr
MacroCall =
Attr* Path '!' Name? TokenTree ';'?
TokenTree =
'(' ')'
| '{' '}'
| '[' ']'
MacroItems =
Item*
MacroStmts =
statements:Stmt*
Expr?
//*************************//
// Items //
//*************************//
SourceFile =
'shebang'?
Attr*
Item*
Item =
Const
| Enum
| ExternBlock
| ExternCrate
| Fn
| Impl
| MacroCall
| Module
| Static
| Struct
| Trait
| TypeAlias
| Union
| Use
Module =
Attr* Visibility? 'mod' Name
(ItemList | ';')
ItemList =
'{' Attr* Item* '}'
ExternCrate =
Attr* Visibility? 'extern' 'crate' (NameRef | 'self') Rename? ';'
Rename =
'as' (Name | '_')
Use =
Attr* Visibility? 'use' UseTree ';'
UseTree =
(Path? '::')? ('*' | UseTreeList )
| Path Rename?
UseTreeList =
'{' (UseTree (',' UseTree)* ','?)? '}'
Fn =
Attr* Visibility?
'default'? ('async' | 'const')? 'unsafe'? Abi?
'fn' Name GenericParamList? ParamList RetType?
WhereClause?
(body:BlockExpr | ';')
Abi =
'extern' 'string'?
ParamList =
'('(
SelfParam
| (SelfParam ',')? (Param (',' Param)* ','?)?
)')'
SelfParam =
Attr* (
('&' 'lifetime'?)? 'mut'? 'self'
| 'mut'? 'self' ':' Type
)
Param =
Attr* (
Pat (':' Type)
| Type
| '...'
)
RetType =
'->' Type
TypeAlias =
Attr* Visibility? 'default'? 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
'=' Type ';'
Struct =
Attr* Visibility? 'struct' Name GenericParamList? (
WhereClause? (RecordFieldList | ';')
| TupleFieldList WhereClause? ';'
)
RecordFieldList =
'{' fields:(RecordField (',' RecordField)* ','?)? '}'
RecordField =
Attr* Visibility? Name ':' Type
TupleFieldList =
'(' fields:(TupleField (',' TupleField)* ','?)? ')'
TupleField =
Attr* Visibility? Type
FieldList =
RecordFieldList
| TupleFieldList
Enum =
Attr* Visibility? 'enum' Name GenericParamList? WhereClause?
VariantList
VariantList =
'{' (Variant (',' Variant)* ','?)? '}'
Variant =
Attr* Visibility? Name FieldList ('=' Expr)?
Union =
Attr* Visibility? 'union' Name GenericParamList? WhereClause?
RecordFieldList
AdtDef =
Enum
| Struct
| Union
Const =
Attr* Visibility? 'default'? 'const' (Name | '_') ':' Type
'=' body:Expr ';'
Static =
Attr* Visibility? 'static'? 'mut'? Name ':' Type
'=' body:Expr ';'
Trait =
Attr* Visibility? 'unsafe'? 'auto'? 'trait' Name GenericParamList
(':' TypeBoundList?)? WhereClause
AssocItemList
AssocItemList =
'{' Attr* AssocItem* '}'
AssocItem =
Const
| Fn
| MacroCall
| TypeAlias
Impl =
Attr* Visibility?
'default'? 'unsafe'? 'impl' 'const'? GenericParamList?
('!'? target_trait:Type 'for')? target_type:Type
WhereClause?
AssocItemList
ExternBlock =
Attr* Abi ExternItemList
ExternItemList =
'{' Attr* ExternItem* '}'
ExternItem =
Fn | Static | MacroCall
GenericParamList =
'<' (GenericParam (',' GenericParam)* ','?)? '>'
GenericParam =
ConstParam
| LifetimeParam
| TypeParam
TypeParam =
Attr* Name (':' TypeBoundList?)?
('=' default_type:Type)?
ConstParam =
Attr* 'const' Name ':' Type
('=' default_val:Expr)?
LifetimeParam =
Attr* 'lifetime' (':' TypeBoundList?)?
WhereClause =
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
('for' GenericParamList)? ('lifetime' | Type) ':' TypeBoundList
Visibility =
'pub' ('('
'super'
| 'self'
| 'crate'
| 'in' Path
')')?
Attr =
'#' '!'? '[' Path ('=' Literal | TokenTree)? ']'
//****************************//
// Statements and Expressions //
//****************************//
Stmt =
ExprStmt
| Item
| LetStmt
LetStmt =
Attr* 'let' Pat (':' Type)?
'=' initializer:Expr ';'
ExprStmt =
Attr* Expr ';'?
Expr =
ArrayExpr
| AwaitExpr
| BinExpr
| BlockExpr
| BoxExpr
| BreakExpr
| CallExpr
| CastExpr
| ClosureExpr
| ContinueExpr
| EffectExpr
| FieldExpr
| ForExpr
| IfExpr
| IndexExpr
| Literal
| LoopExpr
| MacroCall
| MatchExpr
| MethodCallExpr
| ParenExpr
| PathExpr
| PrefixExpr
| RangeExpr
| RecordExpr
| RefExpr
| ReturnExpr
| TryExpr
| TupleExpr
| WhileExpr
Literal =
Attr* value:(
'int_number' | 'float_number'
| 'string' | 'raw_string'
| 'byte_string' | 'raw_byte_string'
| 'true' | 'false'
| 'char' | 'byte'
)
PathExpr =
Attr* Path
BlockExpr =
'{'
Attr*
statements:Stmt*
Expr?
'}'
RefExpr =
Attr* '&' ('raw' |'mut' | 'const') Expr
TryExpr =
Attr* Expr '?'
EffectExpr =
Attr* Label? ('try' | 'unsafe' | 'async') BlockExpr
PrefixExpr =
Attr* op:('-' | '!' | '*') Expr
BinExpr =
Attr*
lhs:Expr
op:(
'||' | '&&'
| '==' | '!=' | '<=' | '>=' | '<' | '>'
| '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&'
| '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^='
)
rhs:Expr
CastExpr =
Attr* Expr 'as' Type
ParenExpr =
Attr* '(' Attr* Expr ')'
ArrayExpr =
Attr* '[' Attr* (
(Expr (',' Expr)* ','?)?
| Expr ';' Expr
) ']'
IndexExpr =
Attr* base:Expr '[' index:Expr ']'
TupleExpr =
Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')'
RecordExpr =
Path RecordExprFieldList
RecordExprFieldList =
'{'
Attr*
fields:(RecordExprField (',' RecordExprField)* ','?)
('..' spread:Expr)?
'}'
RecordExprField =
Attr* NameRef (':' Expr)?
CallExpr =
Attr* Expr ArgList
ArgList =
'(' args:(Expr (',' Expr)* ','?)? ')'
MethodCallExpr =
Attr* Expr '.' NameRef GenericArgList? ArgList
FieldExpr =
Attr* Expr '.' NameRef
ClosureExpr =
Attr* 'static'? 'async'? 'move'? ParamList RetType?
body:Expr
IfExpr =
Attr* 'if' Condition then_branch:BlockExpr
('else' else_branch:(IfExpr | BlockExpr))?
Condition =
'let' Pat '=' Expr
| Expr
LoopExpr =
Attr* Label? 'loop'
loop_body:BlockExpr
ForExpr =
Attr* Label? 'for' Pat 'in' iterable:Expr
loop_body:BlockExpr
WhileExpr =
Attr* Label? 'while' Condition
loop_body:BlockExpr
Label =
'lifetime'
BreakExpr =
Attr* 'break' 'lifetime'? Expr?
ContinueExpr =
Attr* 'continue' 'lifetime'?
RangeExpr =
Attr* start:Expr? op:('..' | '..=') end:Expr?
MatchExpr =
Attr* 'match' Expr MatchArmList
MatchArmList =
'{'
Attr*
arms:MatchArm*
'}'
MatchArm =
Attr* Pat guard:MatchGuard? '=>' Expr ','?
MatchGuard =
'if' Expr
ReturnExpr =
Attr* 'return' Expr?
AwaitExpr =
Attr* Expr '.' 'await'
BoxExpr =
Attr* 'box' Expr
//*************************//
// Types //
//*************************//
Type =
ArrayType
| DynTraitType
| FnPointerType
| ForType
| ImplTraitType
| InferType
| NeverType
| ParenType
| PathType
| PointerType
| ReferenceType
| SliceType
| TupleType
ParenType =
'(' Type ')'
NeverType =
'!'
PathType =
Path
TupleType =
'(' fields:(Type (',' Type)* ','?)? ')'
PointerType =
'*' ('const' | 'mut') Type
ReferenceType =
'&' 'lifetime'? 'mut'? Type
ArrayType =
'[' Type ';' Expr ']'
SliceType =
'[' Type ']'
InferType =
'_'
FnPointerType =
'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
ForType =
'for' GenericParamList Type
ImplTraitType =
'impl' TypeBoundList
DynTraitType =
'dyn' TypeBoundList
TypeBoundList =
bounds:(TypeBound ('+' TypeBound)* '+'?)
TypeBound =
'lifetime'
| '?'? Type
//************************//
// Patterns //
//************************//
Pat =
IdentPat
| BoxPat
| RestPat
| LiteralPat
| MacroPat
| OrPat
| ParenPat
| PathPat
| WildcardPat
| RangePat
| RecordPat
| RefPat
| SlicePat
| TuplePat
| TupleStructPat
LiteralPat =
Literal
IdentPat =
Attr* 'ref'? 'mut'? Name ('@' Pat)?
WildcardPat =
'_'
RangePat =
start:Pat op:('..' | '..=') end:Pat
RefPat =
'&' 'mut'? Pat
RecordPat =
Path RecordPatFieldList
RecordPatFieldList =
'{'
fields:(RecordPatField (',' RecordPatField)* ','?)
'..'?
'}'
RecordPatField =
Attr* (NameRef ':')? Pat
TupleStructPat =
Path '(' fields:(Pat (',' Pat)* ','?)? ')'
TuplePat =
'(' fields:(Pat (',' Pat)* ','?)? ')'
ParenPat =
'(' Pat ')'
SlicePat =
'[' (Pat (',' Pat)* ','?)? ']'
PathPat =
Path
OrPat =
(Pat ('|' Pat)* '|'?)
BoxPat =
'box' Pat
RestPat =
'..'
MacroPat =
MacroCall