mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
Add semicolons for consistency
`clippy::semicolon_if_nothing_returned`
This commit is contained in:
parent
60c5449120
commit
55c0b86cde
46 changed files with 151 additions and 151 deletions
|
@ -179,7 +179,7 @@ impl FlycheckActor {
|
||||||
tracing::error!(
|
tracing::error!(
|
||||||
"Flycheck failed to run the following command: {:?}",
|
"Flycheck failed to run the following command: {:?}",
|
||||||
self.check_command()
|
self.check_command()
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
self.progress(Progress::DidFinish(res));
|
self.progress(Progress::DidFinish(res));
|
||||||
}
|
}
|
||||||
|
@ -253,7 +253,7 @@ impl FlycheckActor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send(&self, check_task: Message) {
|
fn send(&self, check_task: Message) {
|
||||||
(self.sender)(check_task)
|
(self.sender)(check_task);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -334,15 +334,15 @@ impl CargoActor {
|
||||||
// Skip certain kinds of messages to only spend time on what's useful
|
// Skip certain kinds of messages to only spend time on what's useful
|
||||||
JsonMessage::Cargo(message) => match message {
|
JsonMessage::Cargo(message) => match message {
|
||||||
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||||
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap()
|
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
||||||
}
|
}
|
||||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||||
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap()
|
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
},
|
},
|
||||||
JsonMessage::Rustc(message) => {
|
JsonMessage::Rustc(message) => {
|
||||||
self.sender.send(CargoMessage::Diagnostic(message)).unwrap()
|
self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,11 +67,11 @@ pub(crate) mod entry_points {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn stmt(p: &mut Parser) {
|
pub(crate) fn stmt(p: &mut Parser) {
|
||||||
expressions::stmt(p, expressions::StmtWithSemi::No, true)
|
expressions::stmt(p, expressions::StmtWithSemi::No, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
|
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
|
||||||
expressions::stmt(p, expressions::StmtWithSemi::Optional, false)
|
expressions::stmt(p, expressions::StmtWithSemi::Optional, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn visibility(p: &mut Parser) {
|
pub(crate) fn visibility(p: &mut Parser) {
|
||||||
|
@ -84,7 +84,7 @@ pub(crate) mod entry_points {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn item(p: &mut Parser) {
|
pub(crate) fn item(p: &mut Parser) {
|
||||||
items::item_or_macro(p, true)
|
items::item_or_macro(p, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn macro_items(p: &mut Parser) {
|
pub(crate) fn macro_items(p: &mut Parser) {
|
||||||
|
@ -109,7 +109,7 @@ pub(crate) mod entry_points {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn attr(p: &mut Parser) {
|
pub(crate) fn attr(p: &mut Parser) {
|
||||||
attributes::outer_attrs(p)
|
attributes::outer_attrs(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,7 +246,7 @@ fn name_r(p: &mut Parser, recovery: TokenSet) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name(p: &mut Parser) {
|
fn name(p: &mut Parser) {
|
||||||
name_r(p, TokenSet::EMPTY)
|
name_r(p, TokenSet::EMPTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_ref(p: &mut Parser) {
|
fn name_ref(p: &mut Parser) {
|
||||||
|
|
|
@ -2,13 +2,13 @@ use super::*;
|
||||||
|
|
||||||
pub(super) fn inner_attrs(p: &mut Parser) {
|
pub(super) fn inner_attrs(p: &mut Parser) {
|
||||||
while p.at(T![#]) && p.nth(1) == T![!] {
|
while p.at(T![#]) && p.nth(1) == T![!] {
|
||||||
attr(p, true)
|
attr(p, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn outer_attrs(p: &mut Parser) {
|
pub(super) fn outer_attrs(p: &mut Parser) {
|
||||||
while p.at(T![#]) {
|
while p.at(T![#]) {
|
||||||
attr(p, false)
|
attr(p, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -139,7 +139,7 @@ pub(super) fn expr_block_contents(p: &mut Parser) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt(p, StmtWithSemi::Yes, false)
|
stmt(p, StmtWithSemi::Yes, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -468,12 +468,12 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||||
let m = lhs.precede(p);
|
let m = lhs.precede(p);
|
||||||
p.bump(T![.]);
|
p.bump(T![.]);
|
||||||
if p.at(IDENT) || p.at(INT_NUMBER) {
|
if p.at(IDENT) || p.at(INT_NUMBER) {
|
||||||
name_ref_or_index(p)
|
name_ref_or_index(p);
|
||||||
} else if p.at(FLOAT_NUMBER) {
|
} else if p.at(FLOAT_NUMBER) {
|
||||||
// FIXME: How to recover and instead parse INT + T![.]?
|
// FIXME: How to recover and instead parse INT + T![.]?
|
||||||
p.bump_any();
|
p.bump_any();
|
||||||
} else {
|
} else {
|
||||||
p.error("expected field name or number")
|
p.error("expected field name or number");
|
||||||
}
|
}
|
||||||
m.complete(p, FIELD_EXPR)
|
m.complete(p, FIELD_EXPR)
|
||||||
}
|
}
|
||||||
|
|
|
@ -374,7 +374,7 @@ fn match_expr(p: &mut Parser) -> CompletedMarker {
|
||||||
if p.at(T!['{']) {
|
if p.at(T!['{']) {
|
||||||
match_arm_list(p);
|
match_arm_list(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("expected `{`")
|
p.error("expected `{`");
|
||||||
}
|
}
|
||||||
m.complete(p, MATCH_EXPR)
|
m.complete(p, MATCH_EXPR)
|
||||||
}
|
}
|
||||||
|
@ -602,7 +602,7 @@ fn try_block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
||||||
if p.at(T!['{']) {
|
if p.at(T!['{']) {
|
||||||
stmt_list(p);
|
stmt_list(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("expected a block")
|
p.error("expected a block");
|
||||||
}
|
}
|
||||||
m.complete(p, BLOCK_EXPR)
|
m.complete(p, BLOCK_EXPR)
|
||||||
}
|
}
|
||||||
|
@ -639,7 +639,7 @@ fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
while !p.at(R_DOLLAR) {
|
while !p.at(R_DOLLAR) {
|
||||||
p.bump_any()
|
p.bump_any();
|
||||||
}
|
}
|
||||||
p.bump(R_DOLLAR);
|
p.bump(R_DOLLAR);
|
||||||
m.complete(p, ERROR)
|
m.complete(p, ERROR)
|
||||||
|
|
|
@ -34,7 +34,7 @@ fn generic_param(p: &mut Parser) {
|
||||||
T![const] => const_param(p, m),
|
T![const] => const_param(p, m),
|
||||||
_ => {
|
_ => {
|
||||||
m.abandon(p);
|
m.abandon(p);
|
||||||
p.err_and_bump("expected type parameter")
|
p.err_and_bump("expected type parameter");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -62,7 +62,7 @@ fn type_param(p: &mut Parser, m: Marker) {
|
||||||
// test type_param_default
|
// test type_param_default
|
||||||
// struct S<T = i32>;
|
// struct S<T = i32>;
|
||||||
p.bump(T![=]);
|
p.bump(T![=]);
|
||||||
types::type_(p)
|
types::type_(p);
|
||||||
}
|
}
|
||||||
m.complete(p, TYPE_PARAM);
|
m.complete(p, TYPE_PARAM);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ use super::*;
|
||||||
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
||||||
attributes::inner_attrs(p);
|
attributes::inner_attrs(p);
|
||||||
while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
|
while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
|
||||||
item_or_macro(p, stop_on_r_curly)
|
item_or_macro(p, stop_on_r_curly);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ pub(super) fn opt_item(p: &mut Parser, m: Marker) -> Result<(), Marker> {
|
||||||
p.bump_remap(T![default]);
|
p.bump_remap(T![default]);
|
||||||
p.bump(T![async]);
|
p.bump(T![async]);
|
||||||
if is_unsafe {
|
if is_unsafe {
|
||||||
p.bump(T![unsafe])
|
p.bump(T![unsafe]);
|
||||||
}
|
}
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
}
|
}
|
||||||
|
@ -404,7 +404,7 @@ fn fn_(p: &mut Parser, m: Marker) {
|
||||||
// trait T { fn foo(); }
|
// trait T { fn foo(); }
|
||||||
p.bump(T![;]);
|
p.bump(T![;]);
|
||||||
} else {
|
} else {
|
||||||
expressions::block_expr(p)
|
expressions::block_expr(p);
|
||||||
}
|
}
|
||||||
m.complete(p, FN);
|
m.complete(p, FN);
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,7 +58,7 @@ pub(super) fn enum_(p: &mut Parser, m: Marker) {
|
||||||
if p.at(T!['{']) {
|
if p.at(T!['{']) {
|
||||||
variant_list(p);
|
variant_list(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("expected `{`")
|
p.error("expected `{`");
|
||||||
}
|
}
|
||||||
m.complete(p, ENUM);
|
m.complete(p, ENUM);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,12 +4,12 @@ use super::*;
|
||||||
// const C: u32 = 92;
|
// const C: u32 = 92;
|
||||||
pub(super) fn konst(p: &mut Parser, m: Marker) {
|
pub(super) fn konst(p: &mut Parser, m: Marker) {
|
||||||
p.bump(T![const]);
|
p.bump(T![const]);
|
||||||
const_or_static(p, m, true)
|
const_or_static(p, m, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn static_(p: &mut Parser, m: Marker) {
|
pub(super) fn static_(p: &mut Parser, m: Marker) {
|
||||||
p.bump(T![static]);
|
p.bump(T![static]);
|
||||||
const_or_static(p, m, false)
|
const_or_static(p, m, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn const_or_static(p: &mut Parser, m: Marker, is_const: bool) {
|
fn const_or_static(p: &mut Parser, m: Marker, is_const: bool) {
|
||||||
|
@ -27,7 +27,7 @@ fn const_or_static(p: &mut Parser, m: Marker, is_const: bool) {
|
||||||
if p.at(T![:]) {
|
if p.at(T![:]) {
|
||||||
types::ascription(p);
|
types::ascription(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("missing type for `const` or `static`")
|
p.error("missing type for `const` or `static`");
|
||||||
}
|
}
|
||||||
if p.eat(T![=]) {
|
if p.eat(T![=]) {
|
||||||
expressions::expr(p);
|
expressions::expr(p);
|
||||||
|
|
|
@ -6,21 +6,21 @@ use super::*;
|
||||||
// fn c(x: i32, ) {}
|
// fn c(x: i32, ) {}
|
||||||
// fn d(x: i32, y: ()) {}
|
// fn d(x: i32, y: ()) {}
|
||||||
pub(super) fn param_list_fn_def(p: &mut Parser) {
|
pub(super) fn param_list_fn_def(p: &mut Parser) {
|
||||||
list_(p, Flavor::FnDef)
|
list_(p, Flavor::FnDef);
|
||||||
}
|
}
|
||||||
|
|
||||||
// test param_list_opt_patterns
|
// test param_list_opt_patterns
|
||||||
// fn foo<F: FnMut(&mut Foo<'a>)>(){}
|
// fn foo<F: FnMut(&mut Foo<'a>)>(){}
|
||||||
pub(super) fn param_list_fn_trait(p: &mut Parser) {
|
pub(super) fn param_list_fn_trait(p: &mut Parser) {
|
||||||
list_(p, Flavor::FnTrait)
|
list_(p, Flavor::FnTrait);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn param_list_fn_ptr(p: &mut Parser) {
|
pub(super) fn param_list_fn_ptr(p: &mut Parser) {
|
||||||
list_(p, Flavor::FnPointer)
|
list_(p, Flavor::FnPointer);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn param_list_closure(p: &mut Parser) {
|
pub(super) fn param_list_closure(p: &mut Parser) {
|
||||||
list_(p, Flavor::Closure)
|
list_(p, Flavor::Closure);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
@ -104,13 +104,13 @@ fn param(p: &mut Parser, m: Marker, flavor: Flavor) -> Variadic {
|
||||||
Flavor::FnDef => {
|
Flavor::FnDef => {
|
||||||
patterns::pattern(p);
|
patterns::pattern(p);
|
||||||
if variadic_param(p) {
|
if variadic_param(p) {
|
||||||
res = Variadic(true)
|
res = Variadic(true);
|
||||||
} else if p.at(T![:]) {
|
} else if p.at(T![:]) {
|
||||||
types::ascription(p)
|
types::ascription(p);
|
||||||
} else {
|
} else {
|
||||||
// test_err missing_fn_param_type
|
// test_err missing_fn_param_type
|
||||||
// fn f(x y: i32, z, t: i32) {}
|
// fn f(x y: i32, z, t: i32) {}
|
||||||
p.error("missing type for function parameter")
|
p.error("missing type for function parameter");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// test value_parameters_no_patterns
|
// test value_parameters_no_patterns
|
||||||
|
@ -128,11 +128,11 @@ fn param(p: &mut Parser, m: Marker, flavor: Flavor) -> Variadic {
|
||||||
if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
|
if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
|
||||||
patterns::pattern_single(p);
|
patterns::pattern_single(p);
|
||||||
if variadic_param(p) {
|
if variadic_param(p) {
|
||||||
res = Variadic(true)
|
res = Variadic(true);
|
||||||
} else if p.at(T![:]) {
|
} else if p.at(T![:]) {
|
||||||
types::ascription(p)
|
types::ascription(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("missing type for function parameter")
|
p.error("missing type for function parameter");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
types::type_(p);
|
types::type_(p);
|
||||||
|
|
|
@ -16,15 +16,15 @@ pub(super) fn is_use_path_start(p: &Parser) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn use_path(p: &mut Parser) {
|
pub(super) fn use_path(p: &mut Parser) {
|
||||||
path(p, Mode::Use)
|
path(p, Mode::Use);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn type_path(p: &mut Parser) {
|
pub(crate) fn type_path(p: &mut Parser) {
|
||||||
path(p, Mode::Type)
|
path(p, Mode::Type);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn expr_path(p: &mut Parser) {
|
pub(super) fn expr_path(p: &mut Parser) {
|
||||||
path(p, Mode::Expr)
|
path(p, Mode::Expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn type_path_for_qualifier(p: &mut Parser, qual: CompletedMarker) -> CompletedMarker {
|
pub(crate) fn type_path_for_qualifier(p: &mut Parser, qual: CompletedMarker) -> CompletedMarker {
|
||||||
|
@ -117,7 +117,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) {
|
||||||
params::param_list_fn_trait(p);
|
params::param_list_fn_trait(p);
|
||||||
opt_ret_type(p);
|
opt_ret_type(p);
|
||||||
} else {
|
} else {
|
||||||
generic_args::opt_generic_arg_list(p, false)
|
generic_args::opt_generic_arg_list(p, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Mode::Expr => generic_args::opt_generic_arg_list(p, true),
|
Mode::Expr => generic_args::opt_generic_arg_list(p, true),
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub(crate) fn pattern(p: &mut Parser) {
|
||||||
|
|
||||||
/// Parses a pattern list separated by pipes `|`.
|
/// Parses a pattern list separated by pipes `|`.
|
||||||
pub(super) fn pattern_top(p: &mut Parser) {
|
pub(super) fn pattern_top(p: &mut Parser) {
|
||||||
pattern_top_r(p, PAT_RECOVERY_SET)
|
pattern_top_r(p, PAT_RECOVERY_SET);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn pattern_single(p: &mut Parser) {
|
pub(crate) fn pattern_single(p: &mut Parser) {
|
||||||
|
|
|
@ -57,7 +57,7 @@ fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) {
|
||||||
pub(super) fn ascription(p: &mut Parser) {
|
pub(super) fn ascription(p: &mut Parser) {
|
||||||
assert!(p.at(T![:]));
|
assert!(p.at(T![:]));
|
||||||
p.bump(T![:]);
|
p.bump(T![:]);
|
||||||
type_(p)
|
type_(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn paren_or_tuple_type(p: &mut Parser) {
|
fn paren_or_tuple_type(p: &mut Parser) {
|
||||||
|
@ -204,7 +204,7 @@ fn fn_ptr_type(p: &mut Parser) {
|
||||||
if p.at(T!['(']) {
|
if p.at(T!['(']) {
|
||||||
params::param_list_fn_ptr(p);
|
params::param_list_fn_ptr(p);
|
||||||
} else {
|
} else {
|
||||||
p.error("expected parameters")
|
p.error("expected parameters");
|
||||||
}
|
}
|
||||||
// test fn_pointer_type_with_ret
|
// test fn_pointer_type_with_ret
|
||||||
// type F = fn() -> ();
|
// type F = fn() -> ();
|
||||||
|
@ -274,7 +274,7 @@ fn dyn_trait_type(p: &mut Parser) {
|
||||||
// type C = self::Foo;
|
// type C = self::Foo;
|
||||||
// type D = super::Foo;
|
// type D = super::Foo;
|
||||||
pub(super) fn path_type(p: &mut Parser) {
|
pub(super) fn path_type(p: &mut Parser) {
|
||||||
path_type_(p, true)
|
path_type_(p, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// test macro_call_type
|
// test macro_call_type
|
||||||
|
|
|
@ -177,7 +177,7 @@ impl<'t> Parser<'t> {
|
||||||
if kind == EOF {
|
if kind == EOF {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.do_bump(kind, 1)
|
self.do_bump(kind, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Advances the parser by one token, remapping its kind.
|
/// Advances the parser by one token, remapping its kind.
|
||||||
|
@ -200,7 +200,7 @@ impl<'t> Parser<'t> {
|
||||||
/// does.
|
/// does.
|
||||||
pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
|
pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
|
||||||
let msg = ParseError(Box::new(message.into()));
|
let msg = ParseError(Box::new(message.into()));
|
||||||
self.push_event(Event::Error { msg })
|
self.push_event(Event::Error { msg });
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token if it is `kind` or emit an error
|
/// Consume the next token if it is `kind` or emit an error
|
||||||
|
@ -258,7 +258,7 @@ impl<'t> Parser<'t> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_event(&mut self, event: Event) {
|
fn push_event(&mut self, event: Event) {
|
||||||
self.events.push(event)
|
self.events.push(event);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ impl TokenSet {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < kinds.len() {
|
while i < kinds.len() {
|
||||||
res |= mask(kinds[i]);
|
res |= mask(kinds[i]);
|
||||||
i += 1
|
i += 1;
|
||||||
}
|
}
|
||||||
TokenSet(res)
|
TokenSet(res)
|
||||||
}
|
}
|
||||||
|
|
|
@ -320,7 +320,7 @@ impl Reader {
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
res[i] = Some(s)
|
res[i] = Some(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
res[0].take().unwrap()
|
res[0].take().unwrap()
|
||||||
|
|
|
@ -93,7 +93,7 @@ struct ProfilerImpl {
|
||||||
impl ProfileSpan {
|
impl ProfileSpan {
|
||||||
pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
|
pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
|
||||||
if let Some(profiler) = &mut self.0 {
|
if let Some(profiler) = &mut self.0 {
|
||||||
profiler.detail = Some(detail())
|
profiler.detail = Some(detail());
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -114,7 +114,7 @@ impl HeartbeatSpan {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new(enabled: bool) -> Self {
|
pub fn new(enabled: bool) -> Self {
|
||||||
if enabled {
|
if enabled {
|
||||||
with_profile_stack(|it| it.heartbeats(true))
|
with_profile_stack(|it| it.heartbeats(true));
|
||||||
}
|
}
|
||||||
Self { enabled }
|
Self { enabled }
|
||||||
}
|
}
|
||||||
|
@ -123,7 +123,7 @@ impl HeartbeatSpan {
|
||||||
impl Drop for HeartbeatSpan {
|
impl Drop for HeartbeatSpan {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if self.enabled {
|
if self.enabled {
|
||||||
with_profile_stack(|it| it.heartbeats(false))
|
with_profile_stack(|it| it.heartbeats(false));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -238,7 +238,7 @@ impl ProfileStack {
|
||||||
self.heartbeat(frame.heartbeats);
|
self.heartbeat(frame.heartbeats);
|
||||||
let avg_span = duration / (frame.heartbeats + 1);
|
let avg_span = duration / (frame.heartbeats + 1);
|
||||||
if avg_span > self.filter.heartbeat_longer_than {
|
if avg_span > self.filter.heartbeat_longer_than {
|
||||||
eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration)
|
eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -292,7 +292,7 @@ fn print(
|
||||||
accounted_for += tree[child].duration;
|
accounted_for += tree[child].duration;
|
||||||
|
|
||||||
if tree[child].duration.as_millis() > longer_than.as_millis() {
|
if tree[child].duration.as_millis() > longer_than.as_millis() {
|
||||||
print(tree, child, level + 1, longer_than, out)
|
print(tree, child, level + 1, longer_than, out);
|
||||||
} else {
|
} else {
|
||||||
let (total_duration, cnt) =
|
let (total_duration, cnt) =
|
||||||
short_children.entry(tree[child].label).or_insert((Duration::default(), 0));
|
short_children.entry(tree[child].label).or_insert((Duration::default(), 0));
|
||||||
|
|
|
@ -92,7 +92,7 @@ pub fn cpu_span() -> CpuSpan {
|
||||||
{
|
{
|
||||||
eprintln!(
|
eprintln!(
|
||||||
r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
|
r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
CpuSpan { _private: () }
|
CpuSpan { _private: () }
|
||||||
|
|
|
@ -70,15 +70,15 @@ impl fmt::Display for StopWatchSpan {
|
||||||
let mut prefix = "";
|
let mut prefix = "";
|
||||||
if instructions > 10000 {
|
if instructions > 10000 {
|
||||||
instructions /= 1000;
|
instructions /= 1000;
|
||||||
prefix = "k"
|
prefix = "k";
|
||||||
}
|
}
|
||||||
if instructions > 10000 {
|
if instructions > 10000 {
|
||||||
instructions /= 1000;
|
instructions /= 1000;
|
||||||
prefix = "m"
|
prefix = "m";
|
||||||
}
|
}
|
||||||
if instructions > 10000 {
|
if instructions > 10000 {
|
||||||
instructions /= 1000;
|
instructions /= 1000;
|
||||||
prefix = "g"
|
prefix = "g";
|
||||||
}
|
}
|
||||||
write!(f, ", {}{}instr", instructions, prefix)?;
|
write!(f, ", {}{}instr", instructions, prefix)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use std::{env, path::PathBuf, process::Command};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
set_rerun();
|
set_rerun();
|
||||||
println!("cargo:rustc-env=REV={}", rev())
|
println!("cargo:rustc-env=REV={}", rev());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_rerun() {
|
fn set_rerun() {
|
||||||
|
|
|
@ -33,9 +33,9 @@ pub fn list_files(dir: &Path) -> Vec<PathBuf> {
|
||||||
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
|
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
|
||||||
if !is_hidden {
|
if !is_hidden {
|
||||||
if file_type.is_dir() {
|
if file_type.is_dir() {
|
||||||
work.push(path)
|
work.push(path);
|
||||||
} else if file_type.is_file() {
|
} else if file_type.is_file() {
|
||||||
res.push(path)
|
res.push(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,7 @@ impl CommentBlock {
|
||||||
panic!(
|
panic!(
|
||||||
"Use plain (non-doc) comments with tags like {}:\n {}",
|
"Use plain (non-doc) comments with tags like {}:\n {}",
|
||||||
tag, first
|
tag, first
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
block.id = id.trim().to_string();
|
block.id = id.trim().to_string();
|
||||||
|
@ -106,7 +106,7 @@ impl CommentBlock {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !block.contents.is_empty() {
|
if !block.contents.is_empty() {
|
||||||
res.push(block)
|
res.push(block);
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ fn ensure_rustfmt() {
|
||||||
panic!(
|
panic!(
|
||||||
"Failed to run rustfmt from toolchain 'stable'. \
|
"Failed to run rustfmt from toolchain 'stable'. \
|
||||||
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,7 +185,7 @@ pub fn ensure_file_contents(file: &Path, contents: &str) {
|
||||||
let _ = fs::create_dir_all(parent);
|
let _ = fs::create_dir_all(parent);
|
||||||
}
|
}
|
||||||
fs::write(file, contents).unwrap();
|
fs::write(file, contents).unwrap();
|
||||||
panic!("some file was not up to date and has been updated, simply re-run the tests")
|
panic!("some file was not up to date and has been updated, simply re-run the tests");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn normalize_newlines(s: &str) -> String {
|
fn normalize_newlines(s: &str) -> String {
|
||||||
|
|
|
@ -45,7 +45,7 @@ fn to_snake_case<F: Fn(&char) -> char>(s: &str, change_case: F) -> String {
|
||||||
if c.is_ascii_uppercase() && prev {
|
if c.is_ascii_uppercase() && prev {
|
||||||
// This check is required to not translate `Weird_Case` into `weird__case`.
|
// This check is required to not translate `Weird_Case` into `weird__case`.
|
||||||
if !buf.ends_with('_') {
|
if !buf.ends_with('_') {
|
||||||
buf.push('_')
|
buf.push('_');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
prev = true;
|
prev = true;
|
||||||
|
@ -60,7 +60,7 @@ pub fn replace(buf: &mut String, from: char, to: &str) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// FIXME: do this in place.
|
// FIXME: do this in place.
|
||||||
*buf = buf.replace(from, to)
|
*buf = buf.replace(from, to);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trim_indent(mut text: &str) -> String {
|
pub fn trim_indent(mut text: &str) -> String {
|
||||||
|
@ -101,7 +101,7 @@ pub fn defer<F: FnOnce()>(f: F) -> impl Drop {
|
||||||
impl<F: FnOnce()> Drop for D<F> {
|
impl<F: FnOnce()> Drop for D<F> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if let Some(f) = self.0.take() {
|
if let Some(f) = self.0.take() {
|
||||||
f()
|
f();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,19 +25,19 @@ impl PanicContext {
|
||||||
if !ctx.is_empty() {
|
if !ctx.is_empty() {
|
||||||
eprintln!("Panic context:");
|
eprintln!("Panic context:");
|
||||||
for frame in ctx.iter() {
|
for frame in ctx.iter() {
|
||||||
eprintln!("> {}\n", frame)
|
eprintln!("> {}\n", frame);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default_hook(panic_info)
|
default_hook(panic_info);
|
||||||
})
|
});
|
||||||
};
|
};
|
||||||
panic::set_hook(Box::new(hook))
|
panic::set_hook(Box::new(hook));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for PanicContext {
|
impl Drop for PanicContext {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
with_ctx(|ctx| assert!(ctx.pop().is_some()))
|
with_ctx(|ctx| assert!(ctx.pop().is_some()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,5 +45,5 @@ fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
|
static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
|
||||||
}
|
}
|
||||||
CTX.with(|ctx| f(&mut *ctx.borrow_mut()))
|
CTX.with(|ctx| f(&mut *ctx.borrow_mut()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,9 +42,9 @@ pub fn streaming_output(
|
||||||
};
|
};
|
||||||
for line in String::from_utf8_lossy(new_lines).lines() {
|
for line in String::from_utf8_lossy(new_lines).lines() {
|
||||||
if is_out {
|
if is_out {
|
||||||
on_stdout_line(line)
|
on_stdout_line(line);
|
||||||
} else {
|
} else {
|
||||||
on_stderr_line(line)
|
on_stderr_line(line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl TreeDiff {
|
||||||
to.iter().for_each(|to| builder.insert(offset, to.to_string()));
|
to.iter().for_each(|to| builder.insert(offset, to.to_string()));
|
||||||
}
|
}
|
||||||
for (from, to) in self.replacements.iter() {
|
for (from, to) in self.replacements.iter() {
|
||||||
builder.replace(from.text_range(), to.to_string())
|
builder.replace(from.text_range(), to.to_string());
|
||||||
}
|
}
|
||||||
for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
|
for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
|
||||||
builder.delete(text_range);
|
builder.delete(text_range);
|
||||||
|
@ -233,7 +233,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
|
||||||
diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
|
diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
|
||||||
rhs_children = rhs_children_clone;
|
rhs_children = rhs_children_clone;
|
||||||
} else {
|
} else {
|
||||||
go(diff, lhs_ele, rhs_ele)
|
go(diff, lhs_ele, rhs_ele);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,7 +126,7 @@ impl IndentLevel {
|
||||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||||
if ws.text().contains('\n') {
|
if ws.text().contains('\n') {
|
||||||
let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
|
let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
|
||||||
ted::replace(ws.syntax(), &new_ws)
|
ted::replace(ws.syntax(), &new_ws);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ impl IndentLevel {
|
||||||
let new_ws = make::tokens::whitespace(
|
let new_ws = make::tokens::whitespace(
|
||||||
&ws.syntax().text().replace(&format!("\n{}", self), "\n"),
|
&ws.syntax().text().replace(&format!("\n{}", self), "\n"),
|
||||||
);
|
);
|
||||||
ted::replace(ws.syntax(), &new_ws)
|
ted::replace(ws.syntax(), &new_ws);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ impl GenericParamsOwnerEdit for ast::Fn {
|
||||||
} else {
|
} else {
|
||||||
Position::last_child_of(self.syntax())
|
Position::last_child_of(self.syntax())
|
||||||
};
|
};
|
||||||
create_where_clause(position)
|
create_where_clause(position);
|
||||||
}
|
}
|
||||||
self.where_clause().unwrap()
|
self.where_clause().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ impl GenericParamsOwnerEdit for ast::Impl {
|
||||||
} else {
|
} else {
|
||||||
Position::last_child_of(self.syntax())
|
Position::last_child_of(self.syntax())
|
||||||
};
|
};
|
||||||
create_where_clause(position)
|
create_where_clause(position);
|
||||||
}
|
}
|
||||||
self.where_clause().unwrap()
|
self.where_clause().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -107,7 +107,7 @@ impl GenericParamsOwnerEdit for ast::Trait {
|
||||||
} else {
|
} else {
|
||||||
Position::last_child_of(self.syntax())
|
Position::last_child_of(self.syntax())
|
||||||
};
|
};
|
||||||
create_where_clause(position)
|
create_where_clause(position);
|
||||||
}
|
}
|
||||||
self.where_clause().unwrap()
|
self.where_clause().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -145,7 +145,7 @@ impl GenericParamsOwnerEdit for ast::Struct {
|
||||||
} else {
|
} else {
|
||||||
Position::last_child_of(self.syntax())
|
Position::last_child_of(self.syntax())
|
||||||
};
|
};
|
||||||
create_where_clause(position)
|
create_where_clause(position);
|
||||||
}
|
}
|
||||||
self.where_clause().unwrap()
|
self.where_clause().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -177,7 +177,7 @@ impl GenericParamsOwnerEdit for ast::Enum {
|
||||||
} else {
|
} else {
|
||||||
Position::last_child_of(self.syntax())
|
Position::last_child_of(self.syntax())
|
||||||
};
|
};
|
||||||
create_where_clause(position)
|
create_where_clause(position);
|
||||||
}
|
}
|
||||||
self.where_clause().unwrap()
|
self.where_clause().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -234,7 +234,7 @@ impl ast::GenericParamList {
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let after_l_angle = Position::after(self.l_angle_token().unwrap());
|
let after_l_angle = Position::after(self.l_angle_token().unwrap());
|
||||||
ted::insert(after_l_angle, generic_param.syntax())
|
ted::insert(after_l_angle, generic_param.syntax());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -247,7 +247,7 @@ impl ast::WhereClause {
|
||||||
ted::append_child_raw(self.syntax(), make::token(T![,]));
|
ted::append_child_raw(self.syntax(), make::token(T![,]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ted::append_child(self.syntax(), predicate.syntax())
|
ted::append_child(self.syntax(), predicate.syntax());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,7 +267,7 @@ impl ast::PathSegment {
|
||||||
pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
|
pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
|
||||||
if self.generic_arg_list().is_none() {
|
if self.generic_arg_list().is_none() {
|
||||||
let arg_list = make::generic_arg_list().clone_for_update();
|
let arg_list = make::generic_arg_list().clone_for_update();
|
||||||
ted::append_child(self.syntax(), arg_list.syntax())
|
ted::append_child(self.syntax(), arg_list.syntax());
|
||||||
}
|
}
|
||||||
self.generic_arg_list().unwrap()
|
self.generic_arg_list().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -286,7 +286,7 @@ impl ast::UseTree {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ted::remove(self.syntax())
|
ted::remove(self.syntax());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -301,13 +301,13 @@ impl ast::Use {
|
||||||
let ws_text = next_ws.syntax().text();
|
let ws_text = next_ws.syntax().text();
|
||||||
if let Some(rest) = ws_text.strip_prefix('\n') {
|
if let Some(rest) = ws_text.strip_prefix('\n') {
|
||||||
if rest.is_empty() {
|
if rest.is_empty() {
|
||||||
ted::remove(next_ws.syntax())
|
ted::remove(next_ws.syntax());
|
||||||
} else {
|
} else {
|
||||||
ted::replace(next_ws.syntax(), make::tokens::whitespace(rest))
|
ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ted::remove(self.syntax())
|
ted::remove(self.syntax());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -525,7 +525,7 @@ pub trait Indent: AstNode + Clone + Sized {
|
||||||
fn reindent_to(&self, target_level: IndentLevel) {
|
fn reindent_to(&self, target_level: IndentLevel) {
|
||||||
let current_level = IndentLevel::from_node(self.syntax());
|
let current_level = IndentLevel::from_node(self.syntax());
|
||||||
self.dedent(current_level);
|
self.dedent(current_level);
|
||||||
self.indent(target_level)
|
self.indent(target_level);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -257,7 +257,7 @@ pub fn block_expr(
|
||||||
format_to!(buf, " {}\n", stmt);
|
format_to!(buf, " {}\n", stmt);
|
||||||
}
|
}
|
||||||
if let Some(tail_expr) = tail_expr {
|
if let Some(tail_expr) = tail_expr {
|
||||||
format_to!(buf, " {}\n", tail_expr)
|
format_to!(buf, " {}\n", tail_expr);
|
||||||
}
|
}
|
||||||
buf += "}";
|
buf += "}";
|
||||||
ast_from_text(&format!("fn f() {}", buf))
|
ast_from_text(&format!("fn f() {}", buf))
|
||||||
|
|
|
@ -609,7 +609,7 @@ impl HasFormatSpecifier for ast::String {
|
||||||
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
|
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
|
||||||
+ offset,
|
+ offset,
|
||||||
unescaped_char,
|
unescaped_char,
|
||||||
))
|
));
|
||||||
});
|
});
|
||||||
|
|
||||||
Some(res)
|
Some(res)
|
||||||
|
@ -631,7 +631,7 @@ impl ast::IntNumber {
|
||||||
|
|
||||||
let mut text = token.text();
|
let mut text = token.text();
|
||||||
if let Some(suffix) = self.suffix() {
|
if let Some(suffix) = self.suffix() {
|
||||||
text = &text[..text.len() - suffix.len()]
|
text = &text[..text.len() - suffix.len()];
|
||||||
}
|
}
|
||||||
|
|
||||||
let radix = self.radix();
|
let radix = self.radix();
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub fn function_declaration(node: &ast::Fn) -> String {
|
||||||
format_to!(buf, "{} ", abi);
|
format_to!(buf, "{} ", abi);
|
||||||
}
|
}
|
||||||
if let Some(name) = node.name() {
|
if let Some(name) = node.name() {
|
||||||
format_to!(buf, "fn {}", name)
|
format_to!(buf, "fn {}", name);
|
||||||
}
|
}
|
||||||
if let Some(type_params) = node.generic_param_list() {
|
if let Some(type_params) = node.generic_param_list() {
|
||||||
format_to!(buf, "{}", type_params);
|
format_to!(buf, "{}", type_params);
|
||||||
|
|
|
@ -88,7 +88,7 @@ impl<'a> TreeSink for TextTreeSink<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error(&mut self, error: ParseError) {
|
fn error(&mut self, error: ParseError) {
|
||||||
self.inner.error(error, self.text_pos)
|
self.inner.error(error, self.text_pos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ impl<'a> TextTreeSink<'a> {
|
||||||
match mem::replace(&mut self.state, State::Normal) {
|
match mem::replace(&mut self.state, State::Normal) {
|
||||||
State::PendingFinish => {
|
State::PendingFinish => {
|
||||||
self.eat_trivias();
|
self.eat_trivias();
|
||||||
self.inner.finish_node()
|
self.inner.finish_node();
|
||||||
}
|
}
|
||||||
State::PendingStart | State::Normal => unreachable!(),
|
State::PendingStart | State::Normal => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ impl<N: AstNode> PartialEq for AstPtr<N> {
|
||||||
|
|
||||||
impl<N: AstNode> Hash for AstPtr<N> {
|
impl<N: AstNode> Hash for AstPtr<N> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.raw.hash(state)
|
self.raw.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,19 +56,19 @@ impl SyntaxTreeBuilder {
|
||||||
|
|
||||||
pub fn token(&mut self, kind: SyntaxKind, text: &str) {
|
pub fn token(&mut self, kind: SyntaxKind, text: &str) {
|
||||||
let kind = RustLanguage::kind_to_raw(kind);
|
let kind = RustLanguage::kind_to_raw(kind);
|
||||||
self.inner.token(kind, text)
|
self.inner.token(kind, text);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start_node(&mut self, kind: SyntaxKind) {
|
pub fn start_node(&mut self, kind: SyntaxKind) {
|
||||||
let kind = RustLanguage::kind_to_raw(kind);
|
let kind = RustLanguage::kind_to_raw(kind);
|
||||||
self.inner.start_node(kind)
|
self.inner.start_node(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish_node(&mut self) {
|
pub fn finish_node(&mut self) {
|
||||||
self.inner.finish_node()
|
self.inner.finish_node();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) {
|
pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) {
|
||||||
self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos))
|
self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,23 +77,23 @@ impl Position {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert(position: Position, elem: impl Element) {
|
pub fn insert(position: Position, elem: impl Element) {
|
||||||
insert_all(position, vec![elem.syntax_element()])
|
insert_all(position, vec![elem.syntax_element()]);
|
||||||
}
|
}
|
||||||
pub fn insert_raw(position: Position, elem: impl Element) {
|
pub fn insert_raw(position: Position, elem: impl Element) {
|
||||||
insert_all_raw(position, vec![elem.syntax_element()])
|
insert_all_raw(position, vec![elem.syntax_element()]);
|
||||||
}
|
}
|
||||||
pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
|
pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
|
||||||
if let Some(first) = elements.first() {
|
if let Some(first) = elements.first() {
|
||||||
if let Some(ws) = ws_before(&position, first) {
|
if let Some(ws) = ws_before(&position, first) {
|
||||||
elements.insert(0, ws.into())
|
elements.insert(0, ws.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(last) = elements.last() {
|
if let Some(last) = elements.last() {
|
||||||
if let Some(ws) = ws_after(&position, last) {
|
if let Some(ws) = ws_after(&position, last) {
|
||||||
elements.push(ws.into())
|
elements.push(ws.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
insert_all_raw(position, elements)
|
insert_all_raw(position, elements);
|
||||||
}
|
}
|
||||||
pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
|
pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
|
||||||
let (parent, index) = match position.repr {
|
let (parent, index) = match position.repr {
|
||||||
|
@ -104,10 +104,10 @@ pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove(elem: impl Element) {
|
pub fn remove(elem: impl Element) {
|
||||||
elem.syntax_element().detach()
|
elem.syntax_element().detach();
|
||||||
}
|
}
|
||||||
pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
|
pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
|
||||||
replace_all(range, Vec::new())
|
replace_all(range, Vec::new());
|
||||||
}
|
}
|
||||||
pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
|
pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
|
||||||
let mut it = range.into_iter();
|
let mut it = range.into_iter();
|
||||||
|
@ -115,9 +115,9 @@ pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
|
||||||
match it.last() {
|
match it.last() {
|
||||||
Some(mut last) => {
|
Some(mut last) => {
|
||||||
if first.index() > last.index() {
|
if first.index() > last.index() {
|
||||||
mem::swap(&mut first, &mut last)
|
mem::swap(&mut first, &mut last);
|
||||||
}
|
}
|
||||||
remove_all(first..=last)
|
remove_all(first..=last);
|
||||||
}
|
}
|
||||||
None => remove(first),
|
None => remove(first),
|
||||||
}
|
}
|
||||||
|
@ -125,26 +125,26 @@ pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace(old: impl Element, new: impl Element) {
|
pub fn replace(old: impl Element, new: impl Element) {
|
||||||
replace_with_many(old, vec![new.syntax_element()])
|
replace_with_many(old, vec![new.syntax_element()]);
|
||||||
}
|
}
|
||||||
pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
|
pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
|
||||||
let old = old.syntax_element();
|
let old = old.syntax_element();
|
||||||
replace_all(old.clone()..=old, new)
|
replace_all(old.clone()..=old, new);
|
||||||
}
|
}
|
||||||
pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
|
pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
|
||||||
let start = range.start().index();
|
let start = range.start().index();
|
||||||
let end = range.end().index();
|
let end = range.end().index();
|
||||||
let parent = range.start().parent().unwrap();
|
let parent = range.start().parent().unwrap();
|
||||||
parent.splice_children(start..end + 1, new)
|
parent.splice_children(start..end + 1, new);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
|
pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
|
||||||
let position = Position::last_child_of(node);
|
let position = Position::last_child_of(node);
|
||||||
insert(position, child)
|
insert(position, child);
|
||||||
}
|
}
|
||||||
pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
|
pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
|
||||||
let position = Position::last_child_of(node);
|
let position = Position::last_child_of(node);
|
||||||
insert_raw(position, child)
|
insert_raw(position, child);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
|
fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
|
||||||
|
|
|
@ -137,7 +137,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||||
if let Err(err) = char {
|
if let Err(err) = char {
|
||||||
push_err(1, (range.start, err));
|
push_err(1, (range.start, err));
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,7 +148,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||||
if let Err(err) = char {
|
if let Err(err) = char {
|
||||||
push_err(2, (range.start, err));
|
push_err(2, (range.start, err));
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,6 @@ pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<Syntax
|
||||||
"A block in this position cannot accept inner attributes",
|
"A block in this position cannot accept inner attributes",
|
||||||
attr.syntax().text_range(),
|
attr.syntax().text_range(),
|
||||||
)
|
)
|
||||||
}))
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl AssertLinear {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sample(&mut self, x: f64, y: f64) {
|
pub fn sample(&mut self, x: f64, y: f64) {
|
||||||
self.rounds.last_mut().unwrap().samples.push((x, y))
|
self.rounds.last_mut().unwrap().samples.push((x, y));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ impl Drop for AssertLinear {
|
||||||
for round in &self.rounds {
|
for round in &self.rounds {
|
||||||
eprintln!("\n{}", round.plot);
|
eprintln!("\n{}", round.plot);
|
||||||
}
|
}
|
||||||
panic!("Doesn't look linear!")
|
panic!("Doesn't look linear!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,14 +142,14 @@ impl Fixture {
|
||||||
|
|
||||||
if line.starts_with("//-") {
|
if line.starts_with("//-") {
|
||||||
let meta = Fixture::parse_meta_line(line);
|
let meta = Fixture::parse_meta_line(line);
|
||||||
res.push(meta)
|
res.push(meta);
|
||||||
} else {
|
} else {
|
||||||
if line.starts_with("// ")
|
if line.starts_with("// ")
|
||||||
&& line.contains(':')
|
&& line.contains(':')
|
||||||
&& !line.contains("::")
|
&& !line.contains("::")
|
||||||
&& line.chars().all(|it| !it.is_uppercase())
|
&& line.chars().all(|it| !it.is_uppercase())
|
||||||
{
|
{
|
||||||
panic!("looks like invalid metadata line: {:?}", line)
|
panic!("looks like invalid metadata line: {:?}", line);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(entry) = res.last_mut() {
|
if let Some(entry) = res.last_mut() {
|
||||||
|
@ -256,9 +256,9 @@ impl MiniCore {
|
||||||
let line = line.strip_prefix("//- minicore:").unwrap().trim();
|
let line = line.strip_prefix("//- minicore:").unwrap().trim();
|
||||||
for entry in line.split(", ") {
|
for entry in line.split(", ") {
|
||||||
if res.has_flag(entry) {
|
if res.has_flag(entry) {
|
||||||
panic!("duplicate minicore flag: {:?}", entry)
|
panic!("duplicate minicore flag: {:?}", entry);
|
||||||
}
|
}
|
||||||
res.activated_flags.push(entry.to_string())
|
res.activated_flags.push(entry.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
|
@ -354,7 +354,7 @@ impl MiniCore {
|
||||||
}
|
}
|
||||||
|
|
||||||
if keep {
|
if keep {
|
||||||
buf.push_str(line)
|
buf.push_str(line);
|
||||||
}
|
}
|
||||||
if line_region {
|
if line_region {
|
||||||
active_regions.pop().unwrap();
|
active_regions.pop().unwrap();
|
||||||
|
|
|
@ -244,7 +244,7 @@ pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> {
|
||||||
|
|
||||||
range + line_start.1
|
range + line_start.1
|
||||||
};
|
};
|
||||||
res.push((range, content))
|
res.push((range, content));
|
||||||
}
|
}
|
||||||
LineAnnotation::Continuation { mut offset, content } => {
|
LineAnnotation::Continuation { mut offset, content } => {
|
||||||
offset += annotation_offset;
|
offset += annotation_offset;
|
||||||
|
@ -301,7 +301,7 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
|
||||||
let mut file = false;
|
let mut file = false;
|
||||||
if !continuation && content.starts_with("file") {
|
if !continuation && content.starts_with("file") {
|
||||||
file = true;
|
file = true;
|
||||||
content = &content["file".len()..]
|
content = &content["file".len()..];
|
||||||
}
|
}
|
||||||
|
|
||||||
let content = content.trim().to_string();
|
let content = content.trim().to_string();
|
||||||
|
@ -371,7 +371,7 @@ fn main() {
|
||||||
pub fn skip_slow_tests() -> bool {
|
pub fn skip_slow_tests() -> bool {
|
||||||
let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err();
|
let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err();
|
||||||
if should_skip {
|
if should_skip {
|
||||||
eprintln!("ignoring slow test")
|
eprintln!("ignoring slow test");
|
||||||
} else {
|
} else {
|
||||||
let path = project_root().join("./target/.slow_tests_cookie");
|
let path = project_root().join("./target/.slow_tests_cookie");
|
||||||
fs::write(&path, ".").unwrap();
|
fs::write(&path, ".").unwrap();
|
||||||
|
@ -432,7 +432,7 @@ pub fn bench(label: &'static str) -> impl Drop {
|
||||||
|
|
||||||
impl Drop for Bencher {
|
impl Drop for Bencher {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
eprintln!("{}: {}", self.label, self.sw.elapsed())
|
eprintln!("{}: {}", self.label, self.sw.elapsed());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,7 @@ impl TextEdit {
|
||||||
|
|
||||||
// FIXME: figure out a way to mutate the text in-place or reuse the
|
// FIXME: figure out a way to mutate the text in-place or reuse the
|
||||||
// memory in some other way
|
// memory in some other way
|
||||||
*text = buf
|
*text = buf;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> {
|
pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> {
|
||||||
|
@ -163,13 +163,13 @@ impl TextEditBuilder {
|
||||||
self.indels.is_empty()
|
self.indels.is_empty()
|
||||||
}
|
}
|
||||||
pub fn replace(&mut self, range: TextRange, replace_with: String) {
|
pub fn replace(&mut self, range: TextRange, replace_with: String) {
|
||||||
self.indel(Indel::replace(range, replace_with))
|
self.indel(Indel::replace(range, replace_with));
|
||||||
}
|
}
|
||||||
pub fn delete(&mut self, range: TextRange) {
|
pub fn delete(&mut self, range: TextRange) {
|
||||||
self.indel(Indel::delete(range))
|
self.indel(Indel::delete(range));
|
||||||
}
|
}
|
||||||
pub fn insert(&mut self, offset: TextSize, text: String) {
|
pub fn insert(&mut self, offset: TextSize, text: String) {
|
||||||
self.indel(Indel::insert(offset, text))
|
self.indel(Indel::insert(offset, text));
|
||||||
}
|
}
|
||||||
pub fn finish(self) -> TextEdit {
|
pub fn finish(self) -> TextEdit {
|
||||||
let mut indels = self.indels;
|
let mut indels = self.indels;
|
||||||
|
|
|
@ -169,7 +169,7 @@ impl fmt::Display for Subtree {
|
||||||
match tt {
|
match tt {
|
||||||
TokenTree::Leaf(Leaf::Punct(p)) => {
|
TokenTree::Leaf(Leaf::Punct(p)) => {
|
||||||
needs_space = p.spacing == Spacing::Alone;
|
needs_space = p.spacing == Spacing::Alone;
|
||||||
fmt::Display::fmt(p, f)?
|
fmt::Display::fmt(p, f)?;
|
||||||
}
|
}
|
||||||
tt => fmt::Display::fmt(tt, f)?,
|
tt => fmt::Display::fmt(tt, f)?,
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ impl loader::Handle for NotifyHandle {
|
||||||
NotifyHandle { sender, _thread: thread }
|
NotifyHandle { sender, _thread: thread }
|
||||||
}
|
}
|
||||||
fn set_config(&mut self, config: loader::Config) {
|
fn set_config(&mut self, config: loader::Config) {
|
||||||
self.sender.send(Message::Config(config)).unwrap()
|
self.sender.send(Message::Config(config)).unwrap();
|
||||||
}
|
}
|
||||||
fn invalidate(&mut self, path: AbsPathBuf) {
|
fn invalidate(&mut self, path: AbsPathBuf) {
|
||||||
self.sender.send(Message::Invalidate(path)).unwrap();
|
self.sender.send(Message::Invalidate(path)).unwrap();
|
||||||
|
@ -84,7 +84,7 @@ impl NotifyActor {
|
||||||
if !config.watch.is_empty() {
|
if !config.watch.is_empty() {
|
||||||
let (watcher_sender, watcher_receiver) = unbounded();
|
let (watcher_sender, watcher_receiver) = unbounded();
|
||||||
let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
|
let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
|
||||||
watcher_sender.send(event).unwrap()
|
watcher_sender.send(event).unwrap();
|
||||||
}));
|
}));
|
||||||
self.watcher = watcher.map(|it| (it, watcher_receiver));
|
self.watcher = watcher.map(|it| (it, watcher_receiver));
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ impl NotifyActor {
|
||||||
for (i, entry) in config.load.into_iter().enumerate() {
|
for (i, entry) in config.load.into_iter().enumerate() {
|
||||||
let watch = config.watch.contains(&i);
|
let watch = config.watch.contains(&i);
|
||||||
if watch {
|
if watch {
|
||||||
self.watched_entries.push(entry.clone())
|
self.watched_entries.push(entry.clone());
|
||||||
}
|
}
|
||||||
let files = self.load_entry(entry, watch);
|
let files = self.load_entry(entry, watch);
|
||||||
self.send(loader::Message::Loaded { files });
|
self.send(loader::Message::Loaded { files });
|
||||||
|
@ -149,7 +149,7 @@ impl NotifyActor {
|
||||||
Some((path, contents))
|
Some((path, contents))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
self.send(loader::Message::Loaded { files })
|
self.send(loader::Message::Loaded { files });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -165,7 +165,7 @@ impl NotifyActor {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|file| {
|
.map(|file| {
|
||||||
if watch {
|
if watch {
|
||||||
self.watch(file.clone())
|
self.watch(file.clone());
|
||||||
}
|
}
|
||||||
let contents = read(file.as_path());
|
let contents = read(file.as_path());
|
||||||
(file, contents)
|
(file, contents)
|
||||||
|
@ -218,7 +218,7 @@ impl NotifyActor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn send(&mut self, msg: loader::Message) {
|
fn send(&mut self, msg: loader::Message) {
|
||||||
(self.sender)(msg)
|
(self.sender)(msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -112,7 +112,7 @@ impl FileSetConfig {
|
||||||
let mut res = vec![FileSet::default(); self.len()];
|
let mut res = vec![FileSet::default(); self.len()];
|
||||||
for (file_id, path) in vfs.iter() {
|
for (file_id, path) in vfs.iter() {
|
||||||
let root = self.classify(path, &mut scratch_space);
|
let root = self.classify(path, &mut scratch_space);
|
||||||
res[root].insert(file_id, path.clone())
|
res[root].insert(file_id, path.clone());
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
@ -157,7 +157,7 @@ impl FileSetConfigBuilder {
|
||||||
|
|
||||||
/// Add a new set of paths prefixes.
|
/// Add a new set of paths prefixes.
|
||||||
pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
|
pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
|
||||||
self.roots.push(roots)
|
self.roots.push(roots);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build the `FileSetConfig`.
|
/// Build the `FileSetConfig`.
|
||||||
|
|
|
@ -357,7 +357,7 @@ impl VirtualPath {
|
||||||
if !res.pop() {
|
if !res.pop() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
path = &path["../".len()..]
|
path = &path["../".len()..];
|
||||||
}
|
}
|
||||||
path = path.trim_start_matches("./");
|
path = path.trim_start_matches("./");
|
||||||
res.0 = format!("{}/{}", res.0, path);
|
res.0 = format!("{}/{}", res.0, path);
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl<T> Eq for Idx<T> {}
|
||||||
|
|
||||||
impl<T> Hash for Idx<T> {
|
impl<T> Hash for Idx<T> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.raw.hash(state)
|
self.raw.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ impl<T> fmt::Debug for Idx<T> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let mut type_name = std::any::type_name::<T>();
|
let mut type_name = std::any::type_name::<T>();
|
||||||
if let Some(idx) = type_name.rfind(':') {
|
if let Some(idx) = type_name.rfind(':') {
|
||||||
type_name = &type_name[idx + 1..]
|
type_name = &type_name[idx + 1..];
|
||||||
}
|
}
|
||||||
write!(f, "Idx::<{}>({})", type_name, self.raw)
|
write!(f, "Idx::<{}>({})", type_name, self.raw)
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ const REQUIRED_RUST_VERSION: u32 = 55;
|
||||||
impl flags::Install {
|
impl flags::Install {
|
||||||
pub(crate) fn run(self) -> Result<()> {
|
pub(crate) fn run(self) -> Result<()> {
|
||||||
if cfg!(target_os = "macos") {
|
if cfg!(target_os = "macos") {
|
||||||
fix_path_for_mac().context("Fix path for mac")?
|
fix_path_for_mac().context("Fix path for mac")?;
|
||||||
}
|
}
|
||||||
if let Some(server) = self.server() {
|
if let Some(server) = self.server() {
|
||||||
install_server(server).context("install server")?;
|
install_server(server).context("install server")?;
|
||||||
|
@ -148,7 +148,7 @@ fn install_server(opts: ServerOpt) -> Result<()> {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
||||||
REQUIRED_RUST_VERSION,
|
REQUIRED_RUST_VERSION,
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
let features = match opts.malloc {
|
let features = match opts.malloc {
|
||||||
Malloc::System => &[][..],
|
Malloc::System => &[][..],
|
||||||
|
|
Loading…
Reference in a new issue