mirror of
https://github.com/fish-shell/fish-shell
synced 2024-12-27 05:13:10 +00:00
Merge branch 'andand_oror_exclam'
This merges support for && || ! Fixes #4620
This commit is contained in:
commit
014b91488d
24 changed files with 540 additions and 285 deletions
|
@ -43,6 +43,7 @@ This section is for changes merged to the `major` branch that are not also merge
|
|||
- The names `argparse`, `read`, `set`, `status`, `test` and `[` are now reserved and not allowed as function names. This prevents users unintentionally breaking stuff (#3000).
|
||||
- Wrapping completions (from `complete -w` or `function -w`) can now inject arguments. For example, `complete gco -w 'git checkout'` now works properly (#1976). The `alias` function has been updated to respect this behavior.
|
||||
- The `jobs` builtin now has a `-q` and `--quiet` option to silence the output.
|
||||
- fish now supports `&&`, `||`, and `!` (#4620).
|
||||
|
||||
## Other significant changes
|
||||
- Command substitution output is now limited to 10 MB by default (#3822).
|
||||
|
|
|
@ -281,7 +281,7 @@ The fish user community extends fish in unique and useful ways via scripts that
|
|||
- <a href="https://github.com/oh-my-fish/oh-my-fish">Oh My Fish</a>
|
||||
- <a href="https://github.com/justinmayer/tacklebox">Tacklebox</a>
|
||||
|
||||
This is not an exhaustive list and the fish project has no opinion regarding the merits of the repositories listed above or the scripts found therein. We mention these only because you may find within them a solution to a need you have such as supporting the `&&` and `||` operators or improved integration with other tools that you use.
|
||||
This is not an exhaustive list and the fish project has no opinion regarding the merits of the repositories listed above or the scripts found therein.
|
||||
|
||||
\htmlonly[block]
|
||||
</div>
|
||||
|
|
|
@ -420,17 +420,23 @@ echo chips
|
|||
|
||||
\section tut_combiners Combiners (And, Or, Not)
|
||||
|
||||
Unlike other shells, `fish` does not have special syntax like && or || to combine commands. Instead it has commands `and`, `or`, and `not`.
|
||||
fish supports the familiar `&&` and `||` to combine commands, and `!` to negate them:
|
||||
|
||||
\fish{cli-dark}
|
||||
>_ cp file1.txt file1_bak.txt; and echo "Backup successful"; or echo "Backup failed"
|
||||
>_ ./configure && make && sudo make install
|
||||
\endfish
|
||||
|
||||
fish also supports `and`, `or`, and `not`. The first two are job modifiers and have lower precedence. Example usage:
|
||||
|
||||
\fish{cli-dark}
|
||||
>_ cp file1.txt file1_bak.txt && cp file2.txt file2_bak.txt ; and echo "Backup successful"; or echo "Backup failed"
|
||||
<outp>Backup failed</outp>
|
||||
\endfish
|
||||
|
||||
As mentioned in <a href="#tut_semicolon">the section on the semicolon</a>, this can also be written in multiple lines, like so:
|
||||
|
||||
\fish
|
||||
cp file1.txt file1_bak.txt
|
||||
cp file1.txt file1_bak.txt && cp file2.txt file2_bak.txt
|
||||
and echo "Backup successful"
|
||||
or echo "Backup failed"
|
||||
\endfish
|
||||
|
|
|
@ -584,10 +584,11 @@ x
|
|||
# A special case. Tidy up after performing command substitution.
|
||||
# Redirectors
|
||||
s/\([^{|] *\)|/\1@redr{|}/g
|
||||
s/\&@EOL$/@redr{@amp}@EOL/g
|
||||
s/@amp@EOL$/@redr{@amp}@EOL/g
|
||||
s/\([<>]\)@amp\([0-9]\)/@redr{\1@amp\2}/g
|
||||
s/\([^{&] *\)&[^@a-z]/\1@redr{\&}/g
|
||||
#s/\&@EOL$/@redr{@amp}@EOL/g
|
||||
#s/@amp@EOL$/@redr{@amp}@EOL/g
|
||||
#s/\([<>]\)@amp\([0-9]\)/@redr{\1@amp\2}/g
|
||||
s/@amp&/@optr{@amp@amp}/g
|
||||
#s/\([^{&] *\)&[^@a-z]/\1@redr{\&}/g
|
||||
s/\([^{<>^] *\)\([0-9]* *[<>^][<>^]*[^@][a-zA-Z0-9./_-]*\)/\1@redr{\2}/g
|
||||
s/\\}/}\\/g
|
||||
#.
|
||||
|
|
|
@ -121,7 +121,7 @@ static void prettify_node_recursive(const wcstring &source, const parse_node_tre
|
|||
const bool is_root_case_list =
|
||||
node_type == symbol_case_item_list && parent_type != symbol_case_item_list;
|
||||
const bool is_if_while_header =
|
||||
(node_type == symbol_job || node_type == symbol_andor_job_list) &&
|
||||
(node_type == symbol_job_conjunction || node_type == symbol_andor_job_list) &&
|
||||
(parent_type == symbol_if_clause || parent_type == symbol_while_header);
|
||||
|
||||
if (is_root_job_list || is_root_case_list || is_if_while_header) {
|
||||
|
|
|
@ -535,10 +535,14 @@ static void test_tokenizer() {
|
|||
const wchar_t *str =
|
||||
L"string <redirection 2>&1 'nested \"quoted\" '(string containing subshells "
|
||||
L"){and,brackets}$as[$well (as variable arrays)] not_a_redirect^ ^ ^^is_a_redirect "
|
||||
L"&&& ||| "
|
||||
L"&& || & |"
|
||||
L"Compress_Newlines\n \n\t\n \nInto_Just_One";
|
||||
const int types[] = {TOK_STRING, TOK_REDIRECT, TOK_STRING, TOK_REDIRECT, TOK_STRING,
|
||||
TOK_STRING, TOK_STRING, TOK_REDIRECT, TOK_REDIRECT, TOK_STRING,
|
||||
TOK_STRING, TOK_END, TOK_STRING};
|
||||
const int types[] = {TOK_STRING, TOK_REDIRECT, TOK_STRING, TOK_REDIRECT, TOK_STRING,
|
||||
TOK_STRING, TOK_STRING, TOK_REDIRECT, TOK_REDIRECT, TOK_STRING,
|
||||
TOK_ANDAND, TOK_BACKGROUND, TOK_OROR, TOK_PIPE, TOK_ANDAND,
|
||||
TOK_OROR, TOK_BACKGROUND, TOK_PIPE, TOK_STRING, TOK_END,
|
||||
TOK_STRING};
|
||||
|
||||
say(L"Test correct tokenization");
|
||||
|
||||
|
@ -834,6 +838,38 @@ static void test_parser() {
|
|||
err(L"backgrounded 'while' conditional not reported as error");
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true | || false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"|| false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"&& false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true ; && false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true ; || false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true || && false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true && || false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
if (!parse_util_detect_errors(L"true && && false")) {
|
||||
err(L"bogus boolean statement error not detected on line %d", __LINE__);
|
||||
}
|
||||
|
||||
say(L"Testing basic evaluation");
|
||||
|
||||
// Ensure that we don't crash on infinite self recursion and mutual recursion. These must use
|
||||
|
@ -3410,6 +3446,11 @@ static void test_new_parser_correctness() {
|
|||
{L"begin; end", true},
|
||||
{L"begin if true; end; end;", true},
|
||||
{L"begin if true ; echo hi ; end; end", true},
|
||||
{L"true && false || false", true},
|
||||
{L"true || false; and true", true},
|
||||
{L"true || ||", false},
|
||||
{L"|| true", false},
|
||||
{L"true || \n\n false", true},
|
||||
};
|
||||
|
||||
for (size_t i = 0; i < sizeof parser_tests / sizeof *parser_tests; i++) {
|
||||
|
@ -3624,9 +3665,6 @@ static void test_new_parser_errors() {
|
|||
|
||||
{L"case", parse_error_unbalancing_case},
|
||||
{L"if true ; case ; end", parse_error_unbalancing_case},
|
||||
|
||||
{L"foo || bar", parse_error_double_pipe},
|
||||
{L"foo && bar", parse_error_double_background},
|
||||
};
|
||||
|
||||
for (size_t i = 0; i < sizeof tests / sizeof *tests; i++) {
|
||||
|
@ -3742,9 +3780,7 @@ static void test_error_messages() {
|
|||
{L"echo \"foo\"$\"bar\"", ERROR_NO_VAR_NAME},
|
||||
{L"echo foo $ bar", ERROR_NO_VAR_NAME},
|
||||
{L"echo foo$(foo)bar", ERROR_BAD_VAR_SUBCOMMAND1},
|
||||
{L"echo \"foo$(foo)bar\"", ERROR_BAD_VAR_SUBCOMMAND1},
|
||||
{L"echo foo || echo bar", ERROR_BAD_OR},
|
||||
{L"echo foo && echo bar", ERROR_BAD_AND}};
|
||||
{L"echo \"foo$(foo)bar\"", ERROR_BAD_VAR_SUBCOMMAND1}};
|
||||
|
||||
parse_error_list_t errors;
|
||||
for (size_t i = 0; i < sizeof error_tests / sizeof *error_tests; i++) {
|
||||
|
@ -3915,10 +3951,28 @@ static void test_highlighting() {
|
|||
{L"2>", highlight_spec_redirection},
|
||||
{NULL, -1}};
|
||||
|
||||
const highlight_component_t components15[] = {{L"if", highlight_spec_command},
|
||||
{L"true", highlight_spec_command},
|
||||
{L"&&", highlight_spec_operator},
|
||||
{L"false", highlight_spec_command},
|
||||
{L";", highlight_spec_statement_terminator},
|
||||
{L"or", highlight_spec_operator},
|
||||
{L"false", highlight_spec_command},
|
||||
{L"||", highlight_spec_operator},
|
||||
{L"true", highlight_spec_command},
|
||||
{L";", highlight_spec_statement_terminator},
|
||||
{L"and", highlight_spec_operator},
|
||||
{L"not", highlight_spec_operator},
|
||||
{L"!", highlight_spec_operator},
|
||||
{L"true", highlight_spec_command},
|
||||
{L";", highlight_spec_statement_terminator},
|
||||
{L"end", highlight_spec_command},
|
||||
{NULL, -1}};
|
||||
|
||||
const highlight_component_t *tests[] = {components1, components2, components3, components4,
|
||||
components5, components6, components7, components8,
|
||||
components9, components10, components11, components12,
|
||||
components13, components14};
|
||||
components13, components14, components15};
|
||||
for (size_t which = 0; which < sizeof tests / sizeof *tests; which++) {
|
||||
const highlight_component_t *components = tests[which];
|
||||
// Count how many we have.
|
||||
|
@ -3954,8 +4008,10 @@ static void test_highlighting() {
|
|||
|
||||
if (expected_colors.at(i) != colors.at(i)) {
|
||||
const wcstring spaces(i, L' ');
|
||||
err(L"Wrong color at index %lu in text (expected %#x, actual %#x):\n%ls\n%ls^", i,
|
||||
expected_colors.at(i), colors.at(i), text.c_str(), spaces.c_str());
|
||||
err(L"Wrong color in test %lu at index %lu in text (expected %#x, actual "
|
||||
L"%#x):\n%ls\n%ls^",
|
||||
which + 1, i, expected_colors.at(i), colors.at(i), text.c_str(),
|
||||
spaces.c_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1033,7 +1033,6 @@ const highlighter_t::color_array_t &highlighter_t::highlight() {
|
|||
case symbol_if_clause:
|
||||
case symbol_else_clause:
|
||||
case symbol_case_item:
|
||||
case symbol_boolean_statement:
|
||||
case symbol_decorated_statement:
|
||||
case symbol_if_statement: {
|
||||
this->color_children(node, parse_token_type_string, highlight_spec_command);
|
||||
|
@ -1059,6 +1058,20 @@ const highlighter_t::color_array_t &highlighter_t::highlight() {
|
|||
this->color_argument(fhead.child<1>());
|
||||
break;
|
||||
}
|
||||
|
||||
case parse_token_type_andand:
|
||||
case parse_token_type_oror:
|
||||
this->color_node(node, highlight_spec_operator);
|
||||
break;
|
||||
|
||||
case symbol_not_statement:
|
||||
this->color_children(node, parse_token_type_string, highlight_spec_operator);
|
||||
break;
|
||||
|
||||
case symbol_job_decorator:
|
||||
this->color_node(node, highlight_spec_operator);
|
||||
break;
|
||||
|
||||
case parse_token_type_pipe:
|
||||
case parse_token_type_background:
|
||||
case parse_token_type_end:
|
||||
|
|
|
@ -1773,6 +1773,10 @@ static bool should_import_bash_history_line(const std::string &line) {
|
|||
if (line.find("((") != std::string::npos) return false;
|
||||
if (line.find("))") != std::string::npos) return false;
|
||||
|
||||
// Temporarily skip lines with && and ||
|
||||
if (line.find("&&") != std::string::npos) return false;
|
||||
if (line.find("||") != std::string::npos) return false;
|
||||
|
||||
// Skip lines that end with a backslash. We do not handle multiline commands from bash history.
|
||||
if (line.back() == '\\') return false;
|
||||
|
||||
|
|
|
@ -17,6 +17,9 @@ enum parse_token_type_t {
|
|||
token_type_invalid = 1,
|
||||
// Non-terminal tokens
|
||||
symbol_job_list,
|
||||
symbol_job_conjunction,
|
||||
symbol_job_conjunction_continuation,
|
||||
symbol_job_decorator,
|
||||
symbol_job,
|
||||
symbol_job_continuation,
|
||||
symbol_statement,
|
||||
|
@ -33,7 +36,7 @@ enum parse_token_type_t {
|
|||
symbol_switch_statement,
|
||||
symbol_case_item_list,
|
||||
symbol_case_item,
|
||||
symbol_boolean_statement,
|
||||
symbol_not_statement,
|
||||
symbol_decorated_statement,
|
||||
symbol_plain_statement,
|
||||
symbol_arguments_or_redirections_list,
|
||||
|
@ -52,6 +55,8 @@ enum parse_token_type_t {
|
|||
parse_token_type_pipe,
|
||||
parse_token_type_redirection,
|
||||
parse_token_type_background,
|
||||
parse_token_type_andand,
|
||||
parse_token_type_oror,
|
||||
parse_token_type_end,
|
||||
// Special terminal type that means no more tokens forthcoming.
|
||||
parse_token_type_terminate,
|
||||
|
@ -77,36 +82,12 @@ const enum_map<parse_token_type_t> token_enum_map[] = {
|
|||
{parse_token_type_pipe, L"parse_token_type_pipe"},
|
||||
{parse_token_type_redirection, L"parse_token_type_redirection"},
|
||||
{parse_token_type_string, L"parse_token_type_string"},
|
||||
{parse_token_type_andand, L"parse_token_type_andand"},
|
||||
{parse_token_type_oror, L"parse_token_type_oror"},
|
||||
{parse_token_type_terminate, L"parse_token_type_terminate"},
|
||||
{symbol_andor_job_list, L"symbol_andor_job_list"},
|
||||
{symbol_argument, L"symbol_argument"},
|
||||
{symbol_argument_list, L"symbol_argument_list"},
|
||||
{symbol_arguments_or_redirections_list, L"symbol_arguments_or_redirections_list"},
|
||||
{symbol_begin_header, L"symbol_begin_header"},
|
||||
{symbol_block_header, L"symbol_block_header"},
|
||||
{symbol_block_statement, L"symbol_block_statement"},
|
||||
{symbol_boolean_statement, L"symbol_boolean_statement"},
|
||||
{symbol_case_item, L"symbol_case_item"},
|
||||
{symbol_case_item_list, L"symbol_case_item_list"},
|
||||
{symbol_decorated_statement, L"symbol_decorated_statement"},
|
||||
{symbol_else_clause, L"symbol_else_clause"},
|
||||
{symbol_else_continuation, L"symbol_else_continuation"},
|
||||
{symbol_end_command, L"symbol_end_command"},
|
||||
{symbol_for_header, L"symbol_for_header"},
|
||||
{symbol_freestanding_argument_list, L"symbol_freestanding_argument_list"},
|
||||
{symbol_function_header, L"symbol_function_header"},
|
||||
{symbol_if_clause, L"symbol_if_clause"},
|
||||
{symbol_if_statement, L"symbol_if_statement"},
|
||||
{symbol_job, L"symbol_job"},
|
||||
{symbol_job_continuation, L"symbol_job_continuation"},
|
||||
{symbol_job_list, L"symbol_job_list"},
|
||||
{symbol_optional_newlines, L"symbol_optional_newlines"},
|
||||
{symbol_optional_background, L"symbol_optional_background"},
|
||||
{symbol_plain_statement, L"symbol_plain_statement"},
|
||||
{symbol_redirection, L"symbol_redirection"},
|
||||
{symbol_statement, L"symbol_statement"},
|
||||
{symbol_switch_statement, L"symbol_switch_statement"},
|
||||
{symbol_while_header, L"symbol_while_header"},
|
||||
// Define all symbols
|
||||
#define ELEM(sym) {symbol_##sym, L"symbol_" #sym},
|
||||
#include "parse_grammar_elements.inc"
|
||||
{token_type_invalid, L"token_type_invalid"},
|
||||
{token_type_invalid, NULL}};
|
||||
#define token_enum_map_len (sizeof token_enum_map / sizeof *token_enum_map)
|
||||
|
@ -130,21 +111,30 @@ enum parse_keyword_t {
|
|||
parse_keyword_if,
|
||||
parse_keyword_in,
|
||||
parse_keyword_not,
|
||||
parse_keyword_exclam,
|
||||
parse_keyword_or,
|
||||
parse_keyword_switch,
|
||||
parse_keyword_while,
|
||||
} __packed;
|
||||
|
||||
const enum_map<parse_keyword_t> keyword_enum_map[] = {
|
||||
{parse_keyword_and, L"and"}, {parse_keyword_begin, L"begin"},
|
||||
{parse_keyword_builtin, L"builtin"}, {parse_keyword_case, L"case"},
|
||||
{parse_keyword_command, L"command"}, {parse_keyword_else, L"else"},
|
||||
{parse_keyword_end, L"end"}, {parse_keyword_exec, L"exec"},
|
||||
{parse_keyword_for, L"for"}, {parse_keyword_function, L"function"},
|
||||
{parse_keyword_if, L"if"}, {parse_keyword_in, L"in"},
|
||||
{parse_keyword_not, L"not"}, {parse_keyword_or, L"or"},
|
||||
{parse_keyword_switch, L"switch"}, {parse_keyword_while, L"while"},
|
||||
{parse_keyword_none, NULL}};
|
||||
const enum_map<parse_keyword_t> keyword_enum_map[] = {{parse_keyword_exclam, L"!"},
|
||||
{parse_keyword_and, L"and"},
|
||||
{parse_keyword_begin, L"begin"},
|
||||
{parse_keyword_builtin, L"builtin"},
|
||||
{parse_keyword_case, L"case"},
|
||||
{parse_keyword_command, L"command"},
|
||||
{parse_keyword_else, L"else"},
|
||||
{parse_keyword_end, L"end"},
|
||||
{parse_keyword_exec, L"exec"},
|
||||
{parse_keyword_for, L"for"},
|
||||
{parse_keyword_function, L"function"},
|
||||
{parse_keyword_if, L"if"},
|
||||
{parse_keyword_in, L"in"},
|
||||
{parse_keyword_not, L"not"},
|
||||
{parse_keyword_or, L"or"},
|
||||
{parse_keyword_switch, L"switch"},
|
||||
{parse_keyword_while, L"while"},
|
||||
{parse_keyword_none, NULL}};
|
||||
#define keyword_enum_map_len (sizeof keyword_enum_map / sizeof *keyword_enum_map)
|
||||
|
||||
// Node tag values.
|
||||
|
@ -158,7 +148,7 @@ enum parse_statement_decoration_t {
|
|||
};
|
||||
|
||||
// Boolean statement types, stored in node tag.
|
||||
enum parse_bool_statement_type_t { parse_bool_and, parse_bool_or, parse_bool_not };
|
||||
enum parse_bool_statement_type_t { parse_bool_none, parse_bool_and, parse_bool_or };
|
||||
|
||||
// Whether a statement is backgrounded.
|
||||
enum parse_optional_background_t { parse_no_background, parse_background };
|
||||
|
@ -183,10 +173,7 @@ enum parse_error_code_t {
|
|||
|
||||
parse_error_unbalancing_end, // end outside of block
|
||||
parse_error_unbalancing_else, // else outside of if
|
||||
parse_error_unbalancing_case, // case outside of switch
|
||||
|
||||
parse_error_double_pipe, // foo || bar, has special error message
|
||||
parse_error_double_background // foo && bar, has special error message
|
||||
parse_error_unbalancing_case // case outside of switch
|
||||
};
|
||||
|
||||
enum { PARSER_TEST_ERROR = 1, PARSER_TEST_INCOMPLETE = 2 };
|
||||
|
@ -289,12 +276,6 @@ void parse_error_offset_source_start(parse_error_list_t *errors, size_t amt);
|
|||
/// Error issued on $.
|
||||
#define ERROR_NO_VAR_NAME _(L"Expected a variable name after this $.")
|
||||
|
||||
/// Error on ||.
|
||||
#define ERROR_BAD_OR _(L"Unsupported use of '||'. In fish, please use 'COMMAND; or COMMAND'.")
|
||||
|
||||
/// Error on &&.
|
||||
#define ERROR_BAD_AND _(L"Unsupported use of '&&'. In fish, please use 'COMMAND; and COMMAND'.")
|
||||
|
||||
/// Error on foo=bar.
|
||||
#define ERROR_BAD_EQUALS_IN_COMMAND5 \
|
||||
_(L"Unsupported use of '='. To run '%ls' with a modified environment, please use 'env " \
|
||||
|
|
|
@ -107,7 +107,7 @@ tnode_t<g::plain_statement> parse_execution_context_t::infinite_recursive_statem
|
|||
const wcstring &forbidden_function_name = parser->forbidden_function.back();
|
||||
|
||||
// Get the first job in the job list.
|
||||
auto first_job = job_list.next_in_list<g::job>();
|
||||
tnode_t<g::job> first_job = job_list.try_get_child<g::job_conjunction, 1>().child<0>();
|
||||
if (!first_job) {
|
||||
return {};
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ bool parse_execution_context_t::job_is_simple_block(tnode_t<g::job> job_node) co
|
|||
return is_empty(statement.require_get_child<g::switch_statement, 0>().child<5>());
|
||||
case symbol_if_statement:
|
||||
return is_empty(statement.require_get_child<g::if_statement, 0>().child<3>());
|
||||
case symbol_boolean_statement:
|
||||
case symbol_not_statement:
|
||||
case symbol_decorated_statement:
|
||||
// not block statements
|
||||
return false;
|
||||
|
@ -244,12 +244,12 @@ parse_execution_result_t parse_execution_context_t::run_if_statement(
|
|||
}
|
||||
|
||||
// An if condition has a job and a "tail" of andor jobs, e.g. "foo ; and bar; or baz".
|
||||
tnode_t<g::job> condition_head = if_clause.child<1>();
|
||||
tnode_t<g::job_conjunction> condition_head = if_clause.child<1>();
|
||||
tnode_t<g::andor_job_list> condition_boolean_tail = if_clause.child<3>();
|
||||
|
||||
// Check the condition and the tail. We treat parse_execution_errored here as failure, in
|
||||
// accordance with historic behavior.
|
||||
parse_execution_result_t cond_ret = run_1_job(condition_head, ib);
|
||||
parse_execution_result_t cond_ret = run_job_conjunction(condition_head, ib);
|
||||
if (cond_ret == parse_execution_success) {
|
||||
cond_ret = run_job_list(condition_boolean_tail, ib);
|
||||
}
|
||||
|
@ -527,13 +527,13 @@ parse_execution_result_t parse_execution_context_t::run_while_statement(
|
|||
parse_execution_result_t ret = parse_execution_success;
|
||||
|
||||
// The conditions of the while loop.
|
||||
tnode_t<g::job> condition_head = header.child<1>();
|
||||
tnode_t<g::job_conjunction> condition_head = header.child<1>();
|
||||
tnode_t<g::andor_job_list> condition_boolean_tail = header.child<3>();
|
||||
|
||||
// Run while the condition is true.
|
||||
for (;;) {
|
||||
// Check the condition.
|
||||
parse_execution_result_t cond_ret = this->run_1_job(condition_head, wb);
|
||||
parse_execution_result_t cond_ret = this->run_job_conjunction(condition_head, wb);
|
||||
if (cond_ret == parse_execution_success) {
|
||||
cond_ret = run_job_list(condition_boolean_tail, wb);
|
||||
}
|
||||
|
@ -921,33 +921,11 @@ bool parse_execution_context_t::determine_io_chain(tnode_t<g::arguments_or_redir
|
|||
return !errored;
|
||||
}
|
||||
|
||||
parse_execution_result_t parse_execution_context_t::populate_boolean_process(
|
||||
job_t *job, process_t *proc, tnode_t<g::boolean_statement> bool_statement) {
|
||||
// Handle a boolean statement.
|
||||
bool skip_job = false;
|
||||
switch (bool_statement_type(bool_statement)) {
|
||||
case parse_bool_and: {
|
||||
// AND. Skip if the last job failed.
|
||||
skip_job = (proc_get_last_status() != 0);
|
||||
break;
|
||||
}
|
||||
case parse_bool_or: {
|
||||
// OR. Skip if the last job succeeded.
|
||||
skip_job = (proc_get_last_status() == 0);
|
||||
break;
|
||||
}
|
||||
case parse_bool_not: {
|
||||
// NOT. Negate it.
|
||||
job->set_flag(JOB_NEGATE, !job->get_flag(JOB_NEGATE));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (skip_job) {
|
||||
return parse_execution_skipped;
|
||||
}
|
||||
parse_execution_result_t parse_execution_context_t::populate_not_process(
|
||||
job_t *job, process_t *proc, tnode_t<g::not_statement> not_statement) {
|
||||
job->set_flag(JOB_NEGATE, !job->get_flag(JOB_NEGATE));
|
||||
return this->populate_job_process(job, proc,
|
||||
bool_statement.require_get_child<g::statement, 1>());
|
||||
not_statement.require_get_child<g::statement, 1>());
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
|
@ -985,8 +963,8 @@ parse_execution_result_t parse_execution_context_t::populate_job_process(
|
|||
parse_execution_result_t result = parse_execution_success;
|
||||
|
||||
switch (specific_statement.type) {
|
||||
case symbol_boolean_statement: {
|
||||
result = this->populate_boolean_process(job, proc, {&tree(), &specific_statement});
|
||||
case symbol_not_statement: {
|
||||
result = this->populate_not_process(job, proc, {&tree(), &specific_statement});
|
||||
break;
|
||||
}
|
||||
case symbol_block_statement:
|
||||
|
@ -1220,19 +1198,64 @@ parse_execution_result_t parse_execution_context_t::run_1_job(tnode_t<g::job> jo
|
|||
return parse_execution_success;
|
||||
}
|
||||
|
||||
parse_execution_result_t parse_execution_context_t::run_job_conjunction(
|
||||
tnode_t<grammar::job_conjunction> job_expr, const block_t *associated_block) {
|
||||
parse_execution_result_t result = parse_execution_success;
|
||||
tnode_t<g::job_conjunction> cursor = job_expr;
|
||||
// continuation is the parent of the cursor
|
||||
tnode_t<g::job_conjunction_continuation> continuation;
|
||||
while (cursor) {
|
||||
if (should_cancel_execution(associated_block)) break;
|
||||
bool skip = false;
|
||||
if (continuation) {
|
||||
// Check the conjunction type.
|
||||
parse_bool_statement_type_t conj = bool_statement_type(continuation);
|
||||
assert((conj == parse_bool_and || conj == parse_bool_or) && "Unexpected conjunction");
|
||||
skip = should_skip(conj);
|
||||
}
|
||||
if (! skip) {
|
||||
result = run_1_job(cursor.child<0>(), associated_block);
|
||||
}
|
||||
continuation = cursor.child<1>();
|
||||
cursor = continuation.try_get_child<g::job_conjunction, 2>();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool parse_execution_context_t::should_skip(parse_bool_statement_type_t type) const {
|
||||
switch (type) {
|
||||
case parse_bool_and:
|
||||
// AND. Skip if the last job failed.
|
||||
return proc_get_last_status() != 0;
|
||||
case parse_bool_or:
|
||||
// OR. Skip if the last job succeeded.
|
||||
return proc_get_last_status() == 0;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
parse_execution_result_t parse_execution_context_t::run_job_list(tnode_t<Type> job_list,
|
||||
const block_t *associated_block) {
|
||||
// We handle both job_list and andor_job_list uniformly.
|
||||
static_assert(Type::token == symbol_job_list || Type::token == symbol_andor_job_list,
|
||||
"Not a job list");
|
||||
|
||||
parse_execution_result_t result = parse_execution_success;
|
||||
while (tnode_t<g::job> job = job_list.template next_in_list<g::job>()) {
|
||||
while (auto job_conj = job_list.template next_in_list<g::job_conjunction>()) {
|
||||
if (should_cancel_execution(associated_block)) break;
|
||||
result = this->run_1_job(job, associated_block);
|
||||
|
||||
// Maybe skip the job if it has a leading and/or.
|
||||
// Skipping is treated as success.
|
||||
if (should_skip(get_decorator(job_conj))) {
|
||||
result = parse_execution_success;
|
||||
} else {
|
||||
result = this->run_job_conjunction(job_conj, associated_block);
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the last job executed.
|
||||
// Returns the result of the last job executed or skipped.
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -72,6 +72,9 @@ class parse_execution_context_t {
|
|||
tnode_t<grammar::job_list> job_list, wcstring *out_func_name) const;
|
||||
bool is_function_context() const;
|
||||
|
||||
/// Return whether we should skip a job with the given bool statement type.
|
||||
bool should_skip(parse_bool_statement_type_t type) const;
|
||||
|
||||
/// Indicates whether a job is a simple block (one block, no redirections).
|
||||
bool job_is_simple_block(tnode_t<grammar::job> job) const;
|
||||
|
||||
|
@ -81,8 +84,8 @@ class parse_execution_context_t {
|
|||
// These create process_t structures from statements.
|
||||
parse_execution_result_t populate_job_process(job_t *job, process_t *proc,
|
||||
tnode_t<grammar::statement> statement);
|
||||
parse_execution_result_t populate_boolean_process(
|
||||
job_t *job, process_t *proc, tnode_t<grammar::boolean_statement> bool_statement);
|
||||
parse_execution_result_t populate_not_process(job_t *job, process_t *proc,
|
||||
tnode_t<grammar::not_statement> not_statement);
|
||||
parse_execution_result_t populate_plain_process(job_t *job, process_t *proc,
|
||||
tnode_t<grammar::plain_statement> statement);
|
||||
|
||||
|
@ -114,6 +117,7 @@ class parse_execution_context_t {
|
|||
io_chain_t *out_chain);
|
||||
|
||||
parse_execution_result_t run_1_job(tnode_t<grammar::job> job, const block_t *associated_block);
|
||||
parse_execution_result_t run_job_conjunction(tnode_t<grammar::job_conjunction> job_conj, const block_t *associated_block);
|
||||
template <typename Type>
|
||||
parse_execution_result_t run_job_list(tnode_t<Type> job_list_node,
|
||||
const block_t *associated_block);
|
||||
|
|
|
@ -35,6 +35,8 @@ using tok_string = primitive<parse_token_type_string>;
|
|||
using tok_pipe = primitive<parse_token_type_pipe>;
|
||||
using tok_background = primitive<parse_token_type_background>;
|
||||
using tok_redirection = primitive<parse_token_type_redirection>;
|
||||
using tok_andand = primitive<parse_token_type_andand>;
|
||||
using tok_oror = primitive<parse_token_type_oror>;
|
||||
|
||||
// Define keyword types.
|
||||
template <parse_keyword_t Keyword>
|
||||
|
@ -195,14 +197,34 @@ struct alternative {};
|
|||
static const production_element_t *resolve(const parse_token_t &, const parse_token_t &, \
|
||||
parse_node_tag_t *);
|
||||
|
||||
// A job_list is a list of jobs, separated by semicolons or newlines
|
||||
// A job_list is a list of job_conjunctions, separated by semicolons or newlines
|
||||
DEF_ALT(job_list) {
|
||||
using normal = seq<job, job_list>;
|
||||
using normal = seq<job_decorator, job_conjunction, job_list>;
|
||||
using empty_line = seq<tok_end, job_list>;
|
||||
using empty = grammar::empty;
|
||||
ALT_BODY(job_list, normal, empty_line, empty);
|
||||
};
|
||||
|
||||
// Job decorators are 'and' and 'or'. These apply to the whole job.
|
||||
DEF_ALT(job_decorator) {
|
||||
using ands = single<keyword<parse_keyword_and>>;
|
||||
using ors = single<keyword<parse_keyword_or>>;
|
||||
using empty = grammar::empty;
|
||||
ALT_BODY(job_decorator, ands, ors, empty);
|
||||
};
|
||||
|
||||
// A job_conjunction is a job followed by a continuation.
|
||||
DEF(job_conjunction) produces_sequence<job, job_conjunction_continuation> {
|
||||
BODY(job_conjunction);
|
||||
};
|
||||
|
||||
DEF_ALT(job_conjunction_continuation) {
|
||||
using andands = seq<tok_andand, optional_newlines, job_conjunction>;
|
||||
using orors = seq<tok_oror, optional_newlines, job_conjunction>;
|
||||
using empty = grammar::empty;
|
||||
ALT_BODY(job_conjunction_continuation, andands, orors, empty);
|
||||
};
|
||||
|
||||
// A job is a non-empty list of statements, separated by pipes. (Non-empty is useful for cases
|
||||
// like if statements, where we require a command). To represent "non-empty", we require a
|
||||
// statement, followed by a possibly empty job_continuation, and then optionally a background
|
||||
|
@ -217,12 +239,12 @@ DEF_ALT(job_continuation) {
|
|||
|
||||
// A statement is a normal command, or an if / while / and etc
|
||||
DEF_ALT(statement) {
|
||||
using boolean = single<boolean_statement>;
|
||||
using nots = single<not_statement>;
|
||||
using block = single<block_statement>;
|
||||
using ifs = single<if_statement>;
|
||||
using switchs = single<switch_statement>;
|
||||
using decorated = single<decorated_statement>;
|
||||
ALT_BODY(statement, boolean, block, ifs, switchs, decorated);
|
||||
ALT_BODY(statement, nots, block, ifs, switchs, decorated);
|
||||
};
|
||||
|
||||
// A block is a conditional, loop, or begin/end
|
||||
|
@ -231,7 +253,7 @@ produces_sequence<if_clause, else_clause, end_command, arguments_or_redirections
|
|||
BODY(if_statement)};
|
||||
|
||||
DEF(if_clause)
|
||||
produces_sequence<keyword<parse_keyword_if>, job, tok_end, andor_job_list, job_list>{
|
||||
produces_sequence<keyword<parse_keyword_if>, job_conjunction, tok_end, andor_job_list, job_list>{
|
||||
BODY(if_clause)};
|
||||
|
||||
DEF_ALT(else_clause) {
|
||||
|
@ -280,7 +302,8 @@ produces_sequence<keyword<parse_keyword_for>, tok_string, keyword<parse_keyword_
|
|||
};
|
||||
|
||||
DEF(while_header)
|
||||
produces_sequence<keyword<parse_keyword_while>, job, tok_end, andor_job_list>{BODY(while_header)};
|
||||
produces_sequence<keyword<parse_keyword_while>, job_conjunction, tok_end, andor_job_list>{
|
||||
BODY(while_header)};
|
||||
|
||||
DEF(begin_header) produces_single<keyword<parse_keyword_begin>>{BODY(begin_header)};
|
||||
|
||||
|
@ -289,19 +312,17 @@ DEF(function_header)
|
|||
produces_sequence<keyword<parse_keyword_function>, argument, argument_list, tok_end>{
|
||||
BODY(function_header)};
|
||||
|
||||
// A boolean statement is AND or OR or NOT
|
||||
DEF_ALT(boolean_statement) {
|
||||
using ands = seq<keyword<parse_keyword_and>, statement>;
|
||||
using ors = seq<keyword<parse_keyword_or>, statement>;
|
||||
DEF_ALT(not_statement) {
|
||||
using nots = seq<keyword<parse_keyword_not>, statement>;
|
||||
ALT_BODY(boolean_statement, ands, ors, nots);
|
||||
using exclams = seq<keyword<parse_keyword_exclam>, statement>;
|
||||
ALT_BODY(not_statement, nots, exclams);
|
||||
};
|
||||
|
||||
// An andor_job_list is zero or more job lists, where each starts with an `and` or `or` boolean
|
||||
// statement.
|
||||
DEF_ALT(andor_job_list) {
|
||||
using empty = grammar::empty;
|
||||
using andor_job = seq<job, andor_job_list>;
|
||||
using andor_job = seq<job_decorator, job_conjunction, andor_job_list>;
|
||||
using empty_line = seq<tok_end, andor_job_list>;
|
||||
ALT_BODY(andor_job_list, empty, andor_job, empty_line);
|
||||
};
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
// Define ELEM before including this file.
|
||||
ELEM(job_list)
|
||||
ELEM(job)
|
||||
ELEM(job_decorator)
|
||||
ELEM(job_conjunction)
|
||||
ELEM(job_conjunction_continuation)
|
||||
ELEM(job_continuation)
|
||||
ELEM(statement)
|
||||
ELEM(if_statement)
|
||||
|
@ -16,7 +19,7 @@ ELEM(for_header)
|
|||
ELEM(while_header)
|
||||
ELEM(begin_header)
|
||||
ELEM(function_header)
|
||||
ELEM(boolean_statement)
|
||||
ELEM(not_statement)
|
||||
ELEM(andor_job_list)
|
||||
ELEM(decorated_statement)
|
||||
ELEM(plain_statement)
|
||||
|
|
|
@ -61,6 +61,41 @@ RESOLVE(job_list) {
|
|||
}
|
||||
}
|
||||
|
||||
// A job decorator is AND or OR
|
||||
RESOLVE(job_decorator) {
|
||||
UNUSED(token2);
|
||||
|
||||
switch (token1.keyword) {
|
||||
case parse_keyword_and: {
|
||||
*out_tag = parse_bool_and;
|
||||
return production_for<ands>();
|
||||
}
|
||||
case parse_keyword_or: {
|
||||
*out_tag = parse_bool_or;
|
||||
return production_for<ors>();
|
||||
}
|
||||
default: {
|
||||
*out_tag = parse_bool_none;
|
||||
return production_for<empty>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RESOLVE(job_conjunction_continuation) {
|
||||
UNUSED(token2);
|
||||
UNUSED(out_tag);
|
||||
switch (token1.type) {
|
||||
case parse_token_type_andand:
|
||||
*out_tag = parse_bool_and;
|
||||
return production_for<andands>();
|
||||
case parse_token_type_oror:
|
||||
*out_tag = parse_bool_or;
|
||||
return production_for<orors>();
|
||||
default:
|
||||
return production_for<empty>();
|
||||
}
|
||||
}
|
||||
|
||||
RESOLVE(job_continuation) {
|
||||
UNUSED(token2);
|
||||
UNUSED(out_tag);
|
||||
|
@ -108,10 +143,9 @@ RESOLVE(statement) {
|
|||
switch (token1.type) {
|
||||
case parse_token_type_string: {
|
||||
switch (token1.keyword) {
|
||||
case parse_keyword_and:
|
||||
case parse_keyword_or:
|
||||
case parse_keyword_not: {
|
||||
return production_for<boolean>();
|
||||
case parse_keyword_not:
|
||||
case parse_keyword_exclam: {
|
||||
return production_for<nots>();
|
||||
}
|
||||
case parse_keyword_for:
|
||||
case parse_keyword_while:
|
||||
|
@ -182,6 +216,19 @@ RESOLVE(case_item_list) {
|
|||
return production_for<empty>();
|
||||
}
|
||||
|
||||
RESOLVE(not_statement) {
|
||||
UNUSED(token2);
|
||||
UNUSED(out_tag);
|
||||
switch (token1.keyword) {
|
||||
case parse_keyword_not:
|
||||
return production_for<nots>();
|
||||
case parse_keyword_exclam:
|
||||
return production_for<exclams>();
|
||||
default:
|
||||
return NO_PRODUCTION;
|
||||
}
|
||||
}
|
||||
|
||||
RESOLVE(andor_job_list) {
|
||||
UNUSED(out_tag);
|
||||
|
||||
|
@ -245,27 +292,6 @@ RESOLVE(block_header) {
|
|||
}
|
||||
}
|
||||
|
||||
// A boolean statement is AND or OR or NOT.
|
||||
RESOLVE(boolean_statement) {
|
||||
UNUSED(token2);
|
||||
|
||||
switch (token1.keyword) {
|
||||
case parse_keyword_and: {
|
||||
*out_tag = parse_bool_and;
|
||||
return production_for<ands>();
|
||||
}
|
||||
case parse_keyword_or: {
|
||||
*out_tag = parse_bool_or;
|
||||
return production_for<ors>();
|
||||
}
|
||||
case parse_keyword_not: {
|
||||
*out_tag = parse_bool_not;
|
||||
return production_for<nots>();
|
||||
}
|
||||
default: { return NO_PRODUCTION; }
|
||||
}
|
||||
}
|
||||
|
||||
RESOLVE(decorated_statement) {
|
||||
|
||||
// If this is e.g. 'command --help' then the command is 'command' and not a decoration. If the
|
||||
|
@ -356,6 +382,8 @@ const production_element_t *parse_productions::production_for_token(parse_token_
|
|||
case parse_token_type_pipe:
|
||||
case parse_token_type_redirection:
|
||||
case parse_token_type_background:
|
||||
case parse_token_type_andand:
|
||||
case parse_token_type_oror:
|
||||
case parse_token_type_end:
|
||||
case parse_token_type_terminate: {
|
||||
debug(0, "Terminal token type %ls passed to %s", token_type_description(node_type),
|
||||
|
|
|
@ -138,30 +138,29 @@ static wcstring token_type_user_presentable_description(
|
|||
|
||||
switch (type) {
|
||||
// Hackish. We only support the following types.
|
||||
case symbol_statement: {
|
||||
case symbol_statement:
|
||||
return L"a command";
|
||||
}
|
||||
case symbol_argument: {
|
||||
case symbol_argument:
|
||||
return L"an argument";
|
||||
}
|
||||
case parse_token_type_string: {
|
||||
case symbol_job:
|
||||
case symbol_job_list:
|
||||
return L"a job";
|
||||
case parse_token_type_string:
|
||||
return L"a string";
|
||||
}
|
||||
case parse_token_type_pipe: {
|
||||
case parse_token_type_pipe:
|
||||
return L"a pipe";
|
||||
}
|
||||
case parse_token_type_redirection: {
|
||||
case parse_token_type_redirection:
|
||||
return L"a redirection";
|
||||
}
|
||||
case parse_token_type_background: {
|
||||
case parse_token_type_background:
|
||||
return L"a '&'";
|
||||
}
|
||||
case parse_token_type_end: {
|
||||
case parse_token_type_andand:
|
||||
return L"'&&'";
|
||||
case parse_token_type_oror:
|
||||
return L"'||'";
|
||||
case parse_token_type_end:
|
||||
return L"end of the statement";
|
||||
}
|
||||
case parse_token_type_terminate: {
|
||||
case parse_token_type_terminate:
|
||||
return L"end of the input";
|
||||
}
|
||||
default: { return format_string(L"a %ls", token_type_description(type)); }
|
||||
}
|
||||
}
|
||||
|
@ -213,43 +212,32 @@ wcstring parse_token_t::user_presentable_description() const {
|
|||
/// Convert from tokenizer_t's token type to a parse_token_t type.
|
||||
static inline parse_token_type_t parse_token_type_from_tokenizer_token(
|
||||
enum token_type tokenizer_token_type) {
|
||||
parse_token_type_t result = token_type_invalid;
|
||||
switch (tokenizer_token_type) {
|
||||
case TOK_STRING: {
|
||||
result = parse_token_type_string;
|
||||
break;
|
||||
}
|
||||
case TOK_PIPE: {
|
||||
result = parse_token_type_pipe;
|
||||
break;
|
||||
}
|
||||
case TOK_END: {
|
||||
result = parse_token_type_end;
|
||||
break;
|
||||
}
|
||||
case TOK_BACKGROUND: {
|
||||
result = parse_token_type_background;
|
||||
break;
|
||||
}
|
||||
case TOK_REDIRECT: {
|
||||
result = parse_token_type_redirection;
|
||||
break;
|
||||
}
|
||||
case TOK_ERROR: {
|
||||
result = parse_special_type_tokenizer_error;
|
||||
break;
|
||||
}
|
||||
case TOK_COMMENT: {
|
||||
result = parse_special_type_comment;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
debug(0, "Bad token type %d passed to %s", (int)tokenizer_token_type, __FUNCTION__);
|
||||
DIE("bad token type");
|
||||
break;
|
||||
}
|
||||
case TOK_NONE:
|
||||
DIE("TOK_NONE passed to parse_token_type_from_tokenizer_token");
|
||||
return token_type_invalid;
|
||||
case TOK_STRING:
|
||||
return parse_token_type_string;
|
||||
case TOK_PIPE:
|
||||
return parse_token_type_pipe;
|
||||
case TOK_ANDAND:
|
||||
return parse_token_type_andand;
|
||||
case TOK_OROR:
|
||||
return parse_token_type_oror;
|
||||
case TOK_END:
|
||||
return parse_token_type_end;
|
||||
case TOK_BACKGROUND:
|
||||
return parse_token_type_background;
|
||||
case TOK_REDIRECT:
|
||||
return parse_token_type_redirection;
|
||||
case TOK_ERROR:
|
||||
return parse_special_type_tokenizer_error;
|
||||
case TOK_COMMENT:
|
||||
return parse_special_type_comment;
|
||||
}
|
||||
return result;
|
||||
debug(0, "Bad token type %d passed to %s", (int)tokenizer_token_type, __FUNCTION__);
|
||||
DIE("bad token type");
|
||||
return token_type_invalid;
|
||||
}
|
||||
|
||||
/// Helper function for parse_dump_tree().
|
||||
|
@ -674,37 +662,8 @@ void parse_ll_t::parse_error_failed_production(struct parse_stack_element_t &sta
|
|||
parse_token_t token) {
|
||||
fatal_errored = true;
|
||||
if (this->should_generate_error_messages) {
|
||||
bool done = false;
|
||||
|
||||
// Check for ||.
|
||||
if (token.type == parse_token_type_pipe && token.source_start > 0) {
|
||||
// Here we wanted a statement and instead got a pipe. See if this is a double pipe: foo
|
||||
// || bar. If so, we have a special error message.
|
||||
const parse_node_t *prev_pipe = nodes.find_node_matching_source_location(
|
||||
parse_token_type_pipe, token.source_start - 1, NULL);
|
||||
if (prev_pipe != NULL) {
|
||||
// The pipe of the previous job abuts our current token. So we have ||.
|
||||
this->parse_error(token, parse_error_double_pipe, ERROR_BAD_OR);
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for &&.
|
||||
if (!done && token.type == parse_token_type_background && token.source_start > 0) {
|
||||
// Check to see if there was a previous token_background.
|
||||
const parse_node_t *prev_background = nodes.find_node_matching_source_location(
|
||||
parse_token_type_background, token.source_start - 1, NULL);
|
||||
if (prev_background != NULL) {
|
||||
// We have &&.
|
||||
this->parse_error(token, parse_error_double_background, ERROR_BAD_AND);
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!done) {
|
||||
const wcstring expected = stack_elem.user_presentable_description();
|
||||
this->parse_error_unexpected_token(expected.c_str(), token);
|
||||
}
|
||||
const wcstring expected = stack_elem.user_presentable_description();
|
||||
this->parse_error_unexpected_token(expected.c_str(), token);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -771,6 +730,8 @@ static bool type_is_terminal_type(parse_token_type_t type) {
|
|||
case parse_token_type_redirection:
|
||||
case parse_token_type_background:
|
||||
case parse_token_type_end:
|
||||
case parse_token_type_andand:
|
||||
case parse_token_type_oror:
|
||||
case parse_token_type_terminate: {
|
||||
return true;
|
||||
}
|
||||
|
@ -999,7 +960,7 @@ static inline parse_keyword_t keyword_with_name(const wchar_t *name) {
|
|||
|
||||
static bool is_keyword_char(wchar_t c) {
|
||||
return (c >= L'a' && c <= L'z') || (c >= L'A' && c <= L'Z') || (c >= L'0' && c <= L'9') ||
|
||||
c == L'\'' || c == L'"' || c == L'\\' || c == '\n';
|
||||
c == L'\'' || c == L'"' || c == L'\\' || c == '\n' || c == L'!';
|
||||
}
|
||||
|
||||
/// Given a token, returns the keyword it matches, or parse_keyword_none.
|
||||
|
|
|
@ -95,7 +95,7 @@ class parse_node_t {
|
|||
// This is used to store e.g. the statement decoration.
|
||||
parse_node_tag_t tag : 4;
|
||||
// Description
|
||||
wcstring describe(void) const;
|
||||
wcstring describe() const;
|
||||
|
||||
// Constructor
|
||||
explicit parse_node_t(parse_token_type_t ty)
|
||||
|
|
|
@ -1047,6 +1047,7 @@ parser_test_error_bits_t parse_util_detect_errors_in_argument(tnode_t<grammar::a
|
|||
/// Given that the job given by node should be backgrounded, return true if we detect any errors.
|
||||
static bool detect_errors_in_backgrounded_job(tnode_t<grammar::job> job,
|
||||
parse_error_list_t *parse_errors) {
|
||||
namespace g = grammar;
|
||||
auto source_range = job.source_range();
|
||||
if (!source_range) return false;
|
||||
|
||||
|
@ -1056,29 +1057,30 @@ static bool detect_errors_in_backgrounded_job(tnode_t<grammar::job> job,
|
|||
// foo & ; or bar
|
||||
// if foo & ; end
|
||||
// while foo & ; end
|
||||
if (job.try_get_parent<grammar::if_clause>()) {
|
||||
auto job_conj = job.try_get_parent<g::job_conjunction>();
|
||||
if (job_conj.try_get_parent<g::if_clause>()) {
|
||||
errored = append_syntax_error(parse_errors, source_range->start,
|
||||
BACKGROUND_IN_CONDITIONAL_ERROR_MSG);
|
||||
} else if (job.try_get_parent<grammar::while_header>()) {
|
||||
} else if (job_conj.try_get_parent<g::while_header>()) {
|
||||
errored = append_syntax_error(parse_errors, source_range->start,
|
||||
BACKGROUND_IN_CONDITIONAL_ERROR_MSG);
|
||||
} else if (auto job_list = job.try_get_parent<grammar::job_list>()) {
|
||||
} else if (auto jlist = job_conj.try_get_parent<g::job_list>()) {
|
||||
// This isn't very complete, e.g. we don't catch 'foo & ; not and bar'.
|
||||
// Build the job list and then advance it by one.
|
||||
auto first_job = job_list.next_in_list<grammar::job>();
|
||||
assert(first_job == job && "Expected first job to be the node we found");
|
||||
(void)first_job;
|
||||
// Try getting the next job as a boolean statement.
|
||||
auto next_job = job_list.next_in_list<grammar::job>();
|
||||
tnode_t<grammar::statement> next_stmt = next_job.child<0>();
|
||||
if (auto bool_stmt = next_stmt.try_get_child<grammar::boolean_statement, 0>()) {
|
||||
// Fetch the job list and then advance it by one.
|
||||
auto first_jconj = jlist.next_in_list<g::job_conjunction>();
|
||||
assert(first_jconj == job.try_get_parent<g::job_conjunction>() &&
|
||||
"Expected first job to be the node we found");
|
||||
(void)first_jconj;
|
||||
|
||||
// Try getting the next job's decorator.
|
||||
if (auto next_job_dec = jlist.next_in_list<g::job_decorator>()) {
|
||||
// The next job is indeed a boolean statement.
|
||||
parse_bool_statement_type_t bool_type = bool_statement_type(bool_stmt);
|
||||
if (bool_type == parse_bool_and) { // this is not allowed
|
||||
errored = append_syntax_error(parse_errors, bool_stmt.source_range()->start,
|
||||
parse_bool_statement_type_t bool_type = bool_statement_type(next_job_dec);
|
||||
if (bool_type == parse_bool_and) {
|
||||
errored = append_syntax_error(parse_errors, next_job_dec.source_range()->start,
|
||||
BOOL_AFTER_BACKGROUND_ERROR_MSG, L"and");
|
||||
} else if (bool_type == parse_bool_or) { // this is not allowed
|
||||
errored = append_syntax_error(parse_errors, bool_stmt.source_range()->start,
|
||||
} else if (bool_type == parse_bool_or) {
|
||||
errored = append_syntax_error(parse_errors, next_job_dec.source_range()->start,
|
||||
BOOL_AFTER_BACKGROUND_ERROR_MSG, L"or");
|
||||
}
|
||||
}
|
||||
|
@ -1096,7 +1098,8 @@ static bool detect_errors_in_plain_statement(const wcstring &buff_src,
|
|||
|
||||
// In a few places below, we want to know if we are in a pipeline.
|
||||
tnode_t<statement> st = pst.try_get_parent<decorated_statement>().try_get_parent<statement>();
|
||||
const bool is_in_pipeline = statement_is_in_pipeline(st, true /* count first */);
|
||||
pipeline_position_t pipe_pos = get_pipeline_position(st);
|
||||
bool is_in_pipeline = (pipe_pos != pipeline_position_t::none);
|
||||
|
||||
// We need to know the decoration.
|
||||
const enum parse_statement_decoration_t decoration = get_decoration(pst);
|
||||
|
@ -1106,6 +1109,19 @@ static bool detect_errors_in_plain_statement(const wcstring &buff_src,
|
|||
errored = append_syntax_error(parse_errors, source_start, EXEC_ERR_MSG, L"exec");
|
||||
}
|
||||
|
||||
// This is a somewhat stale check that 'and' and 'or' are not in pipelines, except at the
|
||||
// beginning. We can't disallow them as commands entirely because we need to support 'and
|
||||
// --help', etc.
|
||||
if (pipe_pos == pipeline_position_t::subsequent) {
|
||||
// check if our command is 'and' or 'or'. This is very clumsy; we don't catch e.g. quoted
|
||||
// commands.
|
||||
wcstring command = pst.child<0>().get_source(buff_src);
|
||||
if (command == L"and" || command == L"or") {
|
||||
errored =
|
||||
append_syntax_error(parse_errors, source_start, EXEC_ERR_MSG, command.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
if (maybe_t<wcstring> mcommand = command_for_plain_statement(pst, buff_src)) {
|
||||
wcstring command = std::move(*mcommand);
|
||||
// Check that we can expand the command.
|
||||
|
@ -1250,16 +1266,9 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src,
|
|||
has_unclosed_block = true;
|
||||
} else if (node.type == symbol_statement && !node.has_source()) {
|
||||
// Check for a statement without source in a pipeline, i.e. unterminated pipeline.
|
||||
has_unclosed_pipe |= statement_is_in_pipeline({&node_tree, &node}, false);
|
||||
} else if (node.type == symbol_boolean_statement) {
|
||||
// 'or' and 'and' can be in a pipeline, as long as they're first.
|
||||
tnode_t<g::boolean_statement> gbs{&node_tree, &node};
|
||||
parse_bool_statement_type_t type = bool_statement_type(gbs);
|
||||
if ((type == parse_bool_and || type == parse_bool_or) &&
|
||||
statement_is_in_pipeline(gbs.try_get_parent<g::statement>(),
|
||||
false /* don't count first */)) {
|
||||
errored = append_syntax_error(&parse_errors, node.source_start, EXEC_ERR_MSG,
|
||||
(type == parse_bool_and) ? L"and" : L"or");
|
||||
auto pipe_pos = get_pipeline_position({&node_tree, &node});
|
||||
if (pipe_pos != pipeline_position_t::none) {
|
||||
has_unclosed_pipe = true;
|
||||
}
|
||||
} else if (node.type == symbol_argument) {
|
||||
tnode_t<g::argument> arg{&node_tree, &node};
|
||||
|
|
|
@ -46,10 +46,14 @@ enum parse_statement_decoration_t get_decoration(tnode_t<grammar::plain_statemen
|
|||
return decoration;
|
||||
}
|
||||
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::boolean_statement> stmt) {
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::job_decorator> stmt) {
|
||||
return static_cast<parse_bool_statement_type_t>(stmt.tag());
|
||||
}
|
||||
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::job_conjunction_continuation> cont) {
|
||||
return static_cast<parse_bool_statement_type_t>(cont.tag());
|
||||
}
|
||||
|
||||
maybe_t<redirection_type_t> redirection_type(tnode_t<grammar::redirection> redirection,
|
||||
const wcstring &src, int *out_fd,
|
||||
wcstring *out_target) {
|
||||
|
@ -107,24 +111,35 @@ bool job_node_is_background(tnode_t<grammar::job> job) {
|
|||
return bg.tag() == parse_background;
|
||||
}
|
||||
|
||||
bool statement_is_in_pipeline(tnode_t<grammar::statement> st, bool include_first) {
|
||||
parse_bool_statement_type_t get_decorator(tnode_t<grammar::job_conjunction> conj) {
|
||||
using namespace grammar;
|
||||
tnode_t<job_decorator> dec;
|
||||
// We have two possible parents: job_list and andor_job_list.
|
||||
if (auto p = conj.try_get_parent<job_list>()) {
|
||||
dec = p.require_get_child<job_decorator, 0>();
|
||||
} else if (auto p = conj.try_get_parent<andor_job_list>()) {
|
||||
dec = p.require_get_child<job_decorator, 0>();
|
||||
}
|
||||
// note this returns 0 (none) if dec is empty.
|
||||
return bool_statement_type(dec);
|
||||
}
|
||||
|
||||
pipeline_position_t get_pipeline_position(tnode_t<grammar::statement> st) {
|
||||
using namespace grammar;
|
||||
if (!st) {
|
||||
return false;
|
||||
return pipeline_position_t::none;
|
||||
}
|
||||
|
||||
// If we're part of a job continuation, we're definitely in a pipeline.
|
||||
if (st.try_get_parent<job_continuation>()) {
|
||||
return true;
|
||||
return pipeline_position_t::subsequent;
|
||||
}
|
||||
|
||||
// If include_first is set, check if we're the beginning of a job, and if so, whether that job
|
||||
// Check if we're the beginning of a job, and if so, whether that job
|
||||
// has a non-empty continuation.
|
||||
if (include_first) {
|
||||
tnode_t<job_continuation> jc = st.try_get_parent<job>().child<1>();
|
||||
if (jc.try_get_child<statement, 2>()) {
|
||||
return true;
|
||||
}
|
||||
tnode_t<job_continuation> jc = st.try_get_parent<job>().child<1>();
|
||||
if (jc.try_get_child<statement, 2>()) {
|
||||
return pipeline_position_t::first;
|
||||
}
|
||||
return false;
|
||||
return pipeline_position_t::none;
|
||||
}
|
||||
|
|
26
src/tnode.h
26
src/tnode.h
|
@ -91,6 +91,8 @@ class tnode_t {
|
|||
// Helper to return whether the given tree is the same as ours.
|
||||
bool matches_node_tree(const parse_node_tree_t &t) const { return &t == tree; }
|
||||
|
||||
const parse_node_tree_t *get_tree() const { return tree; }
|
||||
|
||||
bool has_source() const { return nodeptr && nodeptr->has_source(); }
|
||||
|
||||
// return the tag, or 0 if missing.
|
||||
|
@ -147,7 +149,7 @@ class tnode_t {
|
|||
const parse_node_t *child = nullptr;
|
||||
if (nodeptr) child = tree->get_child(*nodeptr, Index);
|
||||
if (child && child->type == ChildType::token) return {tree, child};
|
||||
return {};
|
||||
return {tree, nullptr};
|
||||
}
|
||||
|
||||
/// assert that this is not empty and that the child at index Index has the given type, then
|
||||
|
@ -206,7 +208,7 @@ class tnode_t {
|
|||
// We require that we can contain ourselves, and ItemType as well.
|
||||
static_assert(child_type_possible<Type, Type>(), "Is not a list");
|
||||
static_assert(child_type_possible<Type, ItemType>(), "Is not a list of that type");
|
||||
if (!nodeptr) return {};
|
||||
if (!nodeptr) return {tree, nullptr};
|
||||
const parse_node_t *next =
|
||||
tree->next_node_in_node_list(*nodeptr, ItemType::token, &nodeptr);
|
||||
return {tree, next};
|
||||
|
@ -232,7 +234,9 @@ maybe_t<wcstring> command_for_plain_statement(tnode_t<grammar::plain_statement>
|
|||
parse_statement_decoration_t get_decoration(tnode_t<grammar::plain_statement> stmt);
|
||||
|
||||
/// Return the type for a boolean statement.
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::boolean_statement> stmt);
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::job_decorator> stmt);
|
||||
|
||||
enum parse_bool_statement_type_t bool_statement_type(tnode_t<grammar::job_conjunction_continuation> stmt);
|
||||
|
||||
/// Given a redirection, get the redirection type (or none) and target (file path, or fd).
|
||||
maybe_t<redirection_type_t> redirection_type(tnode_t<grammar::redirection> redirection,
|
||||
|
@ -249,9 +253,19 @@ arguments_node_list_t get_argument_nodes(tnode_t<grammar::arguments_or_redirecti
|
|||
/// Return whether the given job is background because it has a & symbol.
|
||||
bool job_node_is_background(tnode_t<grammar::job>);
|
||||
|
||||
/// Return whether the statement is part of a pipeline. If include_first is set, the first command
|
||||
/// in a pipeline is considered part of it; otherwise only the second or additional commands are.
|
||||
bool statement_is_in_pipeline(tnode_t<grammar::statement> st, bool include_first);
|
||||
/// If the conjunction is has a decorator (and/or), return it; otherwise return none. This only
|
||||
/// considers the leading conjunction, e.g. in `and true || false` only the 'true' conjunction will
|
||||
/// return 'and'.
|
||||
parse_bool_statement_type_t get_decorator(tnode_t<grammar::job_conjunction>);
|
||||
|
||||
/// Return whether the statement is part of a pipeline.
|
||||
/// This doesn't detect e.g. pipelines involving our parent's block statements.
|
||||
enum class pipeline_position_t {
|
||||
none, // not part of a pipeline
|
||||
first, // first command in a pipeline
|
||||
subsequent // second or further command in a pipeline
|
||||
};
|
||||
pipeline_position_t get_pipeline_position(tnode_t<grammar::statement> st);
|
||||
|
||||
/// Check whether an argument_list is a root list.
|
||||
inline bool argument_list_is_root(tnode_t<grammar::argument_list> list) {
|
||||
|
|
|
@ -528,16 +528,28 @@ maybe_t<tok_t> tokenizer_t::tok_next() {
|
|||
break;
|
||||
}
|
||||
case L'&': {
|
||||
result.type = TOK_BACKGROUND;
|
||||
result.length = 1;
|
||||
this->buff++;
|
||||
if (this->buff[1] == L'&') {
|
||||
result.type = TOK_ANDAND;
|
||||
result.length = 2;
|
||||
this->buff += 2;
|
||||
} else {
|
||||
result.type = TOK_BACKGROUND;
|
||||
result.length = 1;
|
||||
this->buff++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case L'|': {
|
||||
result.type = TOK_PIPE;
|
||||
result.redirected_fd = 1;
|
||||
result.length = 1;
|
||||
this->buff++;
|
||||
if (this->buff[1] == L'|') {
|
||||
result.type = TOK_OROR;
|
||||
result.length = 2;
|
||||
this->buff += 2;
|
||||
} else {
|
||||
result.type = TOK_PIPE;
|
||||
result.redirected_fd = 1;
|
||||
result.length = 1;
|
||||
this->buff++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case L'>':
|
||||
|
|
|
@ -14,6 +14,8 @@ enum token_type {
|
|||
TOK_ERROR, /// Error reading token
|
||||
TOK_STRING, /// String token
|
||||
TOK_PIPE, /// Pipe token
|
||||
TOK_ANDAND, /// && token
|
||||
TOK_OROR, /// || token
|
||||
TOK_END, /// End token (semicolon or newline, not literal end)
|
||||
TOK_REDIRECT, /// redirection token
|
||||
TOK_BACKGROUND, /// send job to bg token
|
||||
|
|
15
tests/andandoror.err
Normal file
15
tests/andandoror.err
Normal file
|
@ -0,0 +1,15 @@
|
|||
|
||||
####################
|
||||
# Basic && and || support
|
||||
|
||||
####################
|
||||
# && and || in if statements
|
||||
|
||||
####################
|
||||
# && and || in while statements
|
||||
|
||||
####################
|
||||
# Nots
|
||||
|
||||
####################
|
||||
# Complex scenarios
|
46
tests/andandoror.in
Normal file
46
tests/andandoror.in
Normal file
|
@ -0,0 +1,46 @@
|
|||
logmsg "Basic && and || support"
|
||||
|
||||
echo first && echo second
|
||||
echo third || echo fourth
|
||||
true && false ; echo "true && false: $status"
|
||||
true || false ; echo "true || false: $status"
|
||||
true && false || true ; echo "true && false || true: $status"
|
||||
|
||||
logmsg "&& and || in if statements"
|
||||
|
||||
if true || false ; echo "if test 1 ok" ; end
|
||||
if true && false ; else; echo "if test 2 ok" ; end
|
||||
if true && false ; or true ; echo "if test 3 ok" ; end
|
||||
if [ 0 = 1 ] || [ 5 -ge 3 ] ; echo "if test 4 ok"; end
|
||||
|
||||
logmsg "&& and || in while statements"
|
||||
|
||||
set alpha 0
|
||||
set beta 0
|
||||
set gamma 0
|
||||
set delta 0
|
||||
while [ $alpha -lt 2 ] && [ $beta -lt 3 ]
|
||||
or [ $gamma -lt 4 ] || [ $delta -lt 5 ]
|
||||
echo $alpha $beta $gamma
|
||||
set alpha ( math $alpha + 1 )
|
||||
set beta ( math $beta + 1 )
|
||||
set gamma ( math $gamma + 1 )
|
||||
set delta ( math $delta + 1 )
|
||||
end
|
||||
|
||||
logmsg "Nots"
|
||||
|
||||
true && ! false ; echo $status
|
||||
not true && ! false ; echo $status
|
||||
not not not true ; echo $status
|
||||
not ! ! not true ; echo $status
|
||||
not ! echo not ! ; echo $status
|
||||
|
||||
logmsg "Complex scenarios"
|
||||
|
||||
begin; echo 1 ; false ; end || begin ; echo 2 && echo 3 ; end
|
||||
|
||||
if false && true
|
||||
or not false
|
||||
echo 4
|
||||
end
|
40
tests/andandoror.out
Normal file
40
tests/andandoror.out
Normal file
|
@ -0,0 +1,40 @@
|
|||
|
||||
####################
|
||||
# Basic && and || support
|
||||
first
|
||||
second
|
||||
third
|
||||
true && false: 1
|
||||
true || false: 0
|
||||
true && false || true: 0
|
||||
|
||||
####################
|
||||
# && and || in if statements
|
||||
if test 1 ok
|
||||
if test 2 ok
|
||||
if test 3 ok
|
||||
if test 4 ok
|
||||
|
||||
####################
|
||||
# && and || in while statements
|
||||
0 0 0
|
||||
1 1 1
|
||||
2 2 2
|
||||
3 3 3
|
||||
4 4 4
|
||||
|
||||
####################
|
||||
# Nots
|
||||
0
|
||||
1
|
||||
1
|
||||
0
|
||||
not !
|
||||
0
|
||||
|
||||
####################
|
||||
# Complex scenarios
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
Loading…
Reference in a new issue