mirror of
https://github.com/fish-shell/fish-shell
synced 2024-12-25 20:33:08 +00:00
Update style and formatting to conform to fish style guide.
This commit is contained in:
parent
e2fe873049
commit
53814983ff
28 changed files with 703 additions and 683 deletions
40
builtin.cpp
40
builtin.cpp
|
@ -1038,14 +1038,14 @@ static int builtin_emit(parser_t &parser, wchar_t **argv)
|
|||
static int builtin_generic(parser_t &parser, wchar_t **argv)
|
||||
{
|
||||
int argc=builtin_count_args(argv);
|
||||
|
||||
|
||||
/* Hackish - if we have no arguments other than the command, we are a "naked invocation" and we just print help */
|
||||
if (argc == 1)
|
||||
{
|
||||
builtin_print_help(parser, argv[0], stdout_buffer);
|
||||
return STATUS_BUILTIN_ERROR;
|
||||
}
|
||||
|
||||
|
||||
woptind=0;
|
||||
|
||||
static const struct woption
|
||||
|
@ -1754,16 +1754,16 @@ static int builtin_pwd(parser_t &parser, wchar_t **argv)
|
|||
int define_function(parser_t &parser, const wcstring_list_t &c_args, const wcstring &contents, wcstring *out_err)
|
||||
{
|
||||
assert(out_err != NULL);
|
||||
|
||||
|
||||
/* wgetopt expects 'function' as the first argument. Make a new wcstring_list with that property. */
|
||||
wcstring_list_t args;
|
||||
args.push_back(L"function");
|
||||
args.insert(args.end(), c_args.begin(), c_args.end());
|
||||
|
||||
|
||||
/* Hackish const_cast matches the one in builtin_run */
|
||||
const null_terminated_array_t<wchar_t> argv_array(args);
|
||||
wchar_t **argv = const_cast<wchar_t **>(argv_array.get());
|
||||
|
||||
|
||||
int argc = builtin_count_args(argv);
|
||||
int res=STATUS_BUILTIN_OK;
|
||||
wchar_t *desc=0;
|
||||
|
@ -1806,9 +1806,9 @@ int define_function(parser_t &parser, const wcstring_list_t &c_args, const wcstr
|
|||
case 0:
|
||||
if (long_options[opt_index].flag != 0)
|
||||
break;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
append_format(*out_err,
|
||||
BUILTIN_ERR_UNKNOWN,
|
||||
argv[0],
|
||||
|
@ -1876,7 +1876,7 @@ int define_function(parser_t &parser, const wcstring_list_t &c_args, const wcstr
|
|||
if (is_subshell)
|
||||
{
|
||||
size_t block_idx = 0;
|
||||
|
||||
|
||||
/* Find the outermost substitution block */
|
||||
for (block_idx = 0; ; block_idx++)
|
||||
{
|
||||
|
@ -1884,7 +1884,7 @@ int define_function(parser_t &parser, const wcstring_list_t &c_args, const wcstr
|
|||
if (b == NULL || b->type() == SUBST)
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
/* Go one step beyond that, to get to the caller */
|
||||
const block_t *caller_block = parser.block_at_index(block_idx + 1);
|
||||
if (caller_block != NULL && caller_block->job != NULL)
|
||||
|
@ -2046,9 +2046,9 @@ int define_function(parser_t &parser, const wcstring_list_t &c_args, const wcstr
|
|||
event_t &e = d.events.at(i);
|
||||
e.function_name = d.name;
|
||||
}
|
||||
|
||||
|
||||
d.definition = contents.c_str();
|
||||
|
||||
|
||||
// TODO: fix def_offset inside function_add
|
||||
function_add(d, parser);
|
||||
}
|
||||
|
@ -2068,7 +2068,7 @@ static int builtin_function(parser_t &parser, wchar_t **argv)
|
|||
builtin_print_help(parser, argv[0], stdout_buffer);
|
||||
return STATUS_BUILTIN_OK;
|
||||
}
|
||||
|
||||
|
||||
int argc = builtin_count_args(argv);
|
||||
int res=STATUS_BUILTIN_OK;
|
||||
wchar_t *desc=0;
|
||||
|
@ -2181,7 +2181,7 @@ static int builtin_function(parser_t &parser, wchar_t **argv)
|
|||
if (is_subshell)
|
||||
{
|
||||
size_t block_idx = 0;
|
||||
|
||||
|
||||
/* Find the outermost substitution block */
|
||||
for (block_idx = 0; ; block_idx++)
|
||||
{
|
||||
|
@ -2189,7 +2189,7 @@ static int builtin_function(parser_t &parser, wchar_t **argv)
|
|||
if (b == NULL || b->type() == SUBST)
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
/* Go one step beyond that, to get to the caller */
|
||||
const block_t *caller_block = parser.block_at_index(block_idx + 1);
|
||||
if (caller_block != NULL && caller_block->job != NULL)
|
||||
|
@ -3965,7 +3965,7 @@ static int builtin_break_continue(parser_t &parser, wchar_t **argv)
|
|||
{
|
||||
parser.block_at_index(block_idx)->skip = true;
|
||||
}
|
||||
|
||||
|
||||
/* Skip the loop itself */
|
||||
block_t *loop_block = parser.block_at_index(loop_idx);
|
||||
loop_block->skip = true;
|
||||
|
@ -4043,7 +4043,7 @@ static int builtin_return(parser_t &parser, wchar_t **argv)
|
|||
builtin_print_help(parser, argv[0], stderr_buffer);
|
||||
return STATUS_BUILTIN_ERROR;
|
||||
}
|
||||
|
||||
|
||||
/* Skip everything up to (and then including) the function block */
|
||||
for (size_t i=0; i < function_block_idx; i++)
|
||||
{
|
||||
|
@ -4063,8 +4063,8 @@ static int builtin_switch(parser_t &parser, wchar_t **argv)
|
|||
{
|
||||
int res=STATUS_BUILTIN_OK;
|
||||
int argc = builtin_count_args(argv);
|
||||
|
||||
/* Hackish - if we have no arguments other than the command, we are a "naked invocation" and we just print help */
|
||||
|
||||
/* Hackish - if we have no arguments other than the command, we are a "naked invocation" and we just print help */
|
||||
if (argc == 1)
|
||||
{
|
||||
builtin_print_help(parser, argv[0], stdout_buffer);
|
||||
|
@ -4307,7 +4307,7 @@ int builtin_parse(parser_t &parser, wchar_t **argv)
|
|||
stdout_buffer.append(errors.at(i).describe(src));
|
||||
stdout_buffer.push_back(L'\n');
|
||||
}
|
||||
|
||||
|
||||
stdout_buffer.append(L"(Reparsed with continue after error)\n");
|
||||
parse_tree.clear();
|
||||
errors.clear();
|
||||
|
|
|
@ -712,7 +712,7 @@ static int builtin_set(parser_t &parser, wchar_t **argv)
|
|||
retcode = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
size_t idx_count = indexes.size();
|
||||
size_t val_count = argc-woptind-1;
|
||||
|
||||
|
|
|
@ -1130,7 +1130,7 @@ static size_t read_unquoted_escape(const wchar_t *input, wcstring *result, bool
|
|||
switch (c)
|
||||
{
|
||||
|
||||
/* A null character after a backslash is an error */
|
||||
/* A null character after a backslash is an error */
|
||||
case L'\0':
|
||||
{
|
||||
/* Adjust in_pos to only include the backslash */
|
||||
|
@ -1177,11 +1177,11 @@ static size_t read_unquoted_escape(const wchar_t *input, wcstring *result, bool
|
|||
{
|
||||
chars=8;
|
||||
max_val = WCHAR_MAX;
|
||||
|
||||
|
||||
// Don't exceed the largest Unicode code point - see #1107
|
||||
if (0x10FFFF < max_val)
|
||||
max_val = (wchar_t)0x10FFFF;
|
||||
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
58
complete.cpp
58
complete.cpp
|
@ -472,7 +472,7 @@ void completion_autoload_t::command_removed(const wcstring &cmd)
|
|||
void append_completion(std::vector<completion_t> &completions, const wcstring &comp, const wcstring &desc, complete_flags_t flags, string_fuzzy_match_t match)
|
||||
{
|
||||
/* If we just constructed the completion and used push_back, we would get two string copies. Try to avoid that by making a stubby completion in the vector first, and then copying our string in. Note that completion_t's constructor will munge 'flags' so it's important that we pass those to the constructor.
|
||||
|
||||
|
||||
Nasty hack for #1241 - since the constructor needs the completion string to resolve AUTO_SPACE, and we aren't providing it with the completion, we have to do the resolution ourselves. We should get this resolving out of the constructor.
|
||||
*/
|
||||
const wcstring empty;
|
||||
|
@ -1811,56 +1811,56 @@ void complete(const wcstring &cmd_with_subcmds, std::vector<completion_t> &comps
|
|||
parse_util_cmdsubst_extent(cmd_with_subcmds.c_str(), cmd_with_subcmds.size(), &cmdsubst_begin, &cmdsubst_end);
|
||||
assert(cmdsubst_begin != NULL && cmdsubst_end != NULL && cmdsubst_end >= cmdsubst_begin);
|
||||
const wcstring cmd = wcstring(cmdsubst_begin, cmdsubst_end - cmdsubst_begin);
|
||||
|
||||
|
||||
/* Make our completer */
|
||||
completer_t completer(cmd, flags);
|
||||
|
||||
|
||||
wcstring current_command;
|
||||
const size_t pos = cmd.size();
|
||||
bool done=false;
|
||||
bool use_command = 1;
|
||||
bool use_function = 1;
|
||||
bool use_builtin = 1;
|
||||
|
||||
|
||||
// debug( 1, L"Complete '%ls'", cmd );
|
||||
|
||||
|
||||
const wchar_t *cmd_cstr = cmd.c_str();
|
||||
const wchar_t *tok_begin = NULL, *prev_begin = NULL, *prev_end = NULL;
|
||||
parse_util_token_extent(cmd_cstr, cmd.size(), &tok_begin, NULL, &prev_begin, &prev_end);
|
||||
|
||||
|
||||
/**
|
||||
If we are completing a variable name or a tilde expansion user
|
||||
name, we do that and return. No need for any other completions.
|
||||
*/
|
||||
|
||||
|
||||
const wcstring current_token = tok_begin;
|
||||
|
||||
|
||||
if (!done)
|
||||
{
|
||||
done = completer.try_complete_variable(current_token) || completer.try_complete_user(current_token);
|
||||
}
|
||||
|
||||
|
||||
if (!done)
|
||||
{
|
||||
//const size_t prev_token_len = (prev_begin ? prev_end - prev_begin : 0);
|
||||
//const wcstring prev_token(prev_begin, prev_token_len);
|
||||
|
||||
|
||||
parse_node_tree_t tree;
|
||||
parse_tree_from_string(cmd, parse_flag_continue_after_error | parse_flag_accept_incomplete_tokens, &tree, NULL);
|
||||
|
||||
|
||||
/* Find the plain statement that contains the position */
|
||||
const parse_node_t *plain_statement = tree.find_node_matching_source_location(symbol_plain_statement, pos, NULL);
|
||||
if (plain_statement != NULL)
|
||||
{
|
||||
assert(plain_statement->has_source() && plain_statement->type == symbol_plain_statement);
|
||||
|
||||
|
||||
/* Get the command node */
|
||||
const parse_node_t *cmd_node = tree.get_child(*plain_statement, 0, parse_token_type_string);
|
||||
|
||||
|
||||
/* Get the actual command string */
|
||||
if (cmd_node != NULL)
|
||||
current_command = cmd_node->get_source(cmd);
|
||||
|
||||
|
||||
/* Check the decoration */
|
||||
switch (tree.decoration_for_plain_statement(*plain_statement))
|
||||
{
|
||||
|
@ -1869,20 +1869,20 @@ void complete(const wcstring &cmd_with_subcmds, std::vector<completion_t> &comps
|
|||
use_function = true;
|
||||
use_builtin = true;
|
||||
break;
|
||||
|
||||
|
||||
case parse_statement_decoration_command:
|
||||
use_command = true;
|
||||
use_function = false;
|
||||
use_builtin = false;
|
||||
break;
|
||||
|
||||
|
||||
case parse_statement_decoration_builtin:
|
||||
use_command = false;
|
||||
use_function = false;
|
||||
use_builtin = true;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
if (cmd_node && cmd_node->location_in_or_at_end_of_source_range(pos))
|
||||
{
|
||||
/* Complete command filename */
|
||||
|
@ -1892,7 +1892,7 @@ void complete(const wcstring &cmd_with_subcmds, std::vector<completion_t> &comps
|
|||
{
|
||||
/* Get all the arguments */
|
||||
const parse_node_tree_t::parse_node_list_t all_arguments = tree.find_nodes(*plain_statement, symbol_argument);
|
||||
|
||||
|
||||
/* See whether we are in an argument. We may also be in a redirection, or nothing at all. */
|
||||
size_t matching_arg_index = -1;
|
||||
for (size_t i=0; i < all_arguments.size(); i++)
|
||||
|
@ -1904,17 +1904,17 @@ void complete(const wcstring &cmd_with_subcmds, std::vector<completion_t> &comps
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool had_ddash = false;
|
||||
wcstring current_argument, previous_argument;
|
||||
if (matching_arg_index != (size_t)(-1))
|
||||
{
|
||||
/* Get the current argument and the previous argument, if we have one */
|
||||
current_argument = all_arguments.at(matching_arg_index)->get_source(cmd);
|
||||
|
||||
|
||||
if (matching_arg_index > 0)
|
||||
previous_argument = all_arguments.at(matching_arg_index - 1)->get_source(cmd);
|
||||
|
||||
|
||||
/* Check to see if we have a preceding double-dash */
|
||||
for (size_t i=0; i < matching_arg_index; i++)
|
||||
{
|
||||
|
@ -1925,34 +1925,34 @@ void complete(const wcstring &cmd_with_subcmds, std::vector<completion_t> &comps
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool do_file = false;
|
||||
|
||||
|
||||
wcstring current_command_unescape, previous_argument_unescape, current_argument_unescape;
|
||||
if (unescape_string(current_command, ¤t_command_unescape, UNESCAPE_DEFAULT) &&
|
||||
unescape_string(previous_argument, &previous_argument_unescape, UNESCAPE_DEFAULT) &&
|
||||
unescape_string(current_argument, ¤t_argument_unescape, UNESCAPE_INCOMPLETE))
|
||||
unescape_string(previous_argument, &previous_argument_unescape, UNESCAPE_DEFAULT) &&
|
||||
unescape_string(current_argument, ¤t_argument_unescape, UNESCAPE_INCOMPLETE))
|
||||
{
|
||||
do_file = completer.complete_param(current_command_unescape,
|
||||
previous_argument_unescape,
|
||||
current_argument_unescape,
|
||||
!had_ddash);
|
||||
}
|
||||
|
||||
|
||||
/* If we have found no command specific completions at all, fall back to using file completions. */
|
||||
if (completer.empty())
|
||||
do_file = true;
|
||||
|
||||
|
||||
/* And if we're autosuggesting, and the token is empty, don't do file suggestions */
|
||||
if ((flags & COMPLETION_REQUEST_AUTOSUGGESTION) && current_argument_unescape.empty())
|
||||
do_file = false;
|
||||
|
||||
|
||||
/* This function wants the unescaped string */
|
||||
completer.complete_param_expand(current_token, do_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
comps = completer.get_completions();
|
||||
}
|
||||
|
||||
|
|
|
@ -132,12 +132,12 @@ static int try_get_socket_once(void)
|
|||
if (connect(s, (struct sockaddr *)&local, sizeof local) == -1)
|
||||
{
|
||||
close(s);
|
||||
|
||||
|
||||
/* If it fails on first try, it's probably no serious error, but fishd hasn't been launched yet.
|
||||
This happens (at least) on the first concurrent session. */
|
||||
if (get_socket_count > 1)
|
||||
wperror(L"connect");
|
||||
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -438,8 +438,8 @@ void env_universal_set(const wcstring &name, const wcstring &value, bool exportv
|
|||
else
|
||||
{
|
||||
message_t *msg = create_message(exportv?SET_EXPORT:SET,
|
||||
name.c_str(),
|
||||
value.c_str());
|
||||
name.c_str(),
|
||||
value.c_str());
|
||||
|
||||
if (!msg)
|
||||
{
|
||||
|
|
|
@ -146,13 +146,13 @@ static int event_is_blocked(const event_t &e)
|
|||
{
|
||||
const block_t *block;
|
||||
parser_t &parser = parser_t::principal_parser();
|
||||
|
||||
|
||||
size_t idx = 0;
|
||||
while ((block = parser.block_at_index(idx++)))
|
||||
{
|
||||
if (event_block_list_blocks_type(block->event_blocks, e.type))
|
||||
return true;
|
||||
|
||||
|
||||
}
|
||||
return event_block_list_blocks_type(parser.global_event_blocks, e.type);
|
||||
}
|
||||
|
|
16
exec.cpp
16
exec.cpp
|
@ -582,7 +582,7 @@ static void exec_no_exec(parser_t &parser, const job_t *job)
|
|||
/* With the new parser, commands aren't responsible for pushing / popping blocks, so there's nothing to do */
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
/* Hack hack hack. If this is an 'end' job, then trigger a pop. If this is a job that would create a block, trigger a push. See #624 */
|
||||
const process_t *p = job->first_process;
|
||||
if (p && p->type == INTERNAL_BUILTIN)
|
||||
|
@ -988,7 +988,7 @@ void exec_job(parser_t &parser, job_t *j)
|
|||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case INTERNAL_BUILTIN:
|
||||
{
|
||||
int builtin_stdin=0;
|
||||
|
@ -1133,20 +1133,20 @@ void exec_job(parser_t &parser, job_t *j)
|
|||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case EXTERNAL:
|
||||
/* External commands are handled in the next switch statement below */
|
||||
break;
|
||||
|
||||
|
||||
case INTERNAL_EXEC:
|
||||
/* We should have handled exec up above */
|
||||
assert(0 && "INTERNAL_EXEC process found in pipeline, where it should never be. Aborting.");
|
||||
break;
|
||||
|
||||
|
||||
case INTERNAL_BUFFER:
|
||||
/* Internal buffers are handled in the next switch statement below */
|
||||
break;
|
||||
|
||||
|
||||
}
|
||||
|
||||
if (exec_error)
|
||||
|
@ -1507,7 +1507,7 @@ void exec_job(parser_t &parser, job_t *j)
|
|||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case INTERNAL_EXEC:
|
||||
{
|
||||
/* We should have handled exec up above */
|
||||
|
@ -1581,7 +1581,7 @@ static int exec_subshell_internal(const wcstring &cmd, wcstring_list_t *lst, boo
|
|||
int prev_subshell = is_subshell;
|
||||
const int prev_status = proc_get_last_status();
|
||||
char sep=0;
|
||||
|
||||
|
||||
//fprintf(stderr, "subcmd %ls\n", cmd.c_str());
|
||||
|
||||
const env_var_t ifs = env_get_string(L"IFS");
|
||||
|
|
|
@ -793,7 +793,7 @@ static int expand_pid(const wcstring &instr_with_sep,
|
|||
append_completion(out, instr_with_sep);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/* expand_string calls us with internal separators in instr...sigh */
|
||||
wcstring instr = instr_with_sep;
|
||||
remove_internal_separator(instr, false);
|
||||
|
@ -1624,7 +1624,7 @@ wcstring replace_home_directory_with_tilde(const wcstring &str)
|
|||
{
|
||||
home_directory.push_back(L'/');
|
||||
}
|
||||
|
||||
|
||||
// Now check if the home_directory prefixes the string
|
||||
if (string_prefixes_string(home_directory, result))
|
||||
{
|
||||
|
|
192
fish_tests.cpp
192
fish_tests.cpp
|
@ -149,7 +149,11 @@ static void err(const wchar_t *blah, ...)
|
|||
/* Test sane escapes */
|
||||
static void test_unescape_sane()
|
||||
{
|
||||
const struct test_t {const wchar_t * input; const wchar_t * expected;} tests[] =
|
||||
const struct test_t
|
||||
{
|
||||
const wchar_t * input;
|
||||
const wchar_t * expected;
|
||||
} tests[] =
|
||||
{
|
||||
{L"abcd", L"abcd"},
|
||||
{L"'abcd'", L"abcd"},
|
||||
|
@ -439,7 +443,7 @@ static void test_tok()
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Test redirection_type_for_string */
|
||||
if (redirection_type_for_string(L"<") != TOK_REDIRECT_IN) err(L"redirection_type_for_string failed on line %ld", (long)__LINE__);
|
||||
if (redirection_type_for_string(L"^") != TOK_REDIRECT_OUT) err(L"redirection_type_for_string failed on line %ld", (long)__LINE__);
|
||||
|
@ -623,45 +627,45 @@ static void test_parser()
|
|||
{
|
||||
err(L"'break' command outside of loop block context undetected");
|
||||
}
|
||||
|
||||
|
||||
if (parse_util_detect_errors(L"break --help"))
|
||||
{
|
||||
err(L"'break --help' incorrectly marked as error");
|
||||
}
|
||||
|
||||
|
||||
if (! parse_util_detect_errors(L"while false ; function foo ; break ; end ; end "))
|
||||
{
|
||||
err(L"'break' command inside function allowed to break from loop outside it");
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (!parse_util_detect_errors(L"exec ls|less") || !parse_util_detect_errors(L"echo|return"))
|
||||
{
|
||||
err(L"Invalid pipe command undetected");
|
||||
}
|
||||
|
||||
|
||||
if (parse_util_detect_errors(L"for i in foo ; switch $i ; case blah ; break; end; end "))
|
||||
{
|
||||
err(L"'break' command inside switch falsely reported as error");
|
||||
}
|
||||
|
||||
|
||||
if (parse_util_detect_errors(L"or cat | cat") || parse_util_detect_errors(L"and cat | cat"))
|
||||
{
|
||||
err(L"boolean command at beginning of pipeline falsely reported as error");
|
||||
}
|
||||
|
||||
|
||||
if (! parse_util_detect_errors(L"cat | and cat"))
|
||||
{
|
||||
err(L"'and' command in pipeline not reported as error");
|
||||
}
|
||||
|
||||
|
||||
if (! parse_util_detect_errors(L"cat | exec") || ! parse_util_detect_errors(L"exec | cat"))
|
||||
{
|
||||
err(L"'exec' command in pipeline not reported as error");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
say(L"Testing basic evaluation");
|
||||
#if 0
|
||||
|
@ -675,7 +679,7 @@ static void test_parser()
|
|||
{
|
||||
err(L"Invalid block mode when evaluating undetected");
|
||||
}
|
||||
|
||||
|
||||
/* Ensure that we don't crash on infinite self recursion and mutual recursion. These must use the principal parser because we cannot yet execute jobs on other parsers (!) */
|
||||
say(L"Testing recursion detection");
|
||||
parser_t::principal_parser().eval(L"function recursive ; recursive ; end ; recursive; ", io_chain_t(), TOP);
|
||||
|
@ -717,34 +721,34 @@ static void test_1_cancellation(const wchar_t *src)
|
|||
static void test_cancellation()
|
||||
{
|
||||
say(L"Testing Ctrl-C cancellation. If this hangs, that's a bug!");
|
||||
|
||||
|
||||
/* Enable fish's signal handling here. We need to make this interactive for fish to install its signal handlers */
|
||||
proc_push_interactive(1);
|
||||
signal_set_handlers();
|
||||
|
||||
|
||||
/* This tests that we can correctly ctrl-C out of certain loop constructs, and that nothing gets printed if we do */
|
||||
|
||||
|
||||
/* Here the command substitution is an infinite loop. echo never even gets its argument, so when we cancel we expect no output */
|
||||
test_1_cancellation(L"echo (while true ; echo blah ; end)");
|
||||
|
||||
|
||||
fprintf(stderr, ".");
|
||||
|
||||
|
||||
/* Nasty infinite loop that doesn't actually execute anything */
|
||||
test_1_cancellation(L"echo (while true ; end) (while true ; end) (while true ; end)");
|
||||
fprintf(stderr, ".");
|
||||
|
||||
|
||||
test_1_cancellation(L"while true ; end");
|
||||
fprintf(stderr, ".");
|
||||
|
||||
|
||||
test_1_cancellation(L"for i in (while true ; end) ; end");
|
||||
fprintf(stderr, ".");
|
||||
|
||||
fprintf(stderr, "\n");
|
||||
|
||||
|
||||
/* Restore signal handling */
|
||||
proc_pop_interactive();
|
||||
signal_reset_handlers();
|
||||
|
||||
|
||||
/* Ensure that we don't think we should cancel */
|
||||
reader_reset_interrupted();
|
||||
}
|
||||
|
@ -752,27 +756,28 @@ static void test_cancellation()
|
|||
static void test_indents()
|
||||
{
|
||||
say(L"Testing indents");
|
||||
|
||||
|
||||
// Here are the components of our source and the indents we expect those to be
|
||||
struct indent_component_t {
|
||||
struct indent_component_t
|
||||
{
|
||||
const wchar_t *txt;
|
||||
int indent;
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components1[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
{L"end", 0},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components2[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
{L"", 1}, //trailing newline!
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components3[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
|
@ -780,7 +785,7 @@ static void test_indents()
|
|||
{L"end", 0}, //trailing newline!
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components4[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
|
@ -790,7 +795,7 @@ static void test_indents()
|
|||
{L"", 0},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components5[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
|
@ -798,7 +803,7 @@ static void test_indents()
|
|||
{L"", 2},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components6[] =
|
||||
{
|
||||
{L"begin", 0},
|
||||
|
@ -806,7 +811,7 @@ static void test_indents()
|
|||
{L"", 1},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components7[] =
|
||||
{
|
||||
{L"begin; end", 0},
|
||||
|
@ -814,7 +819,7 @@ static void test_indents()
|
|||
{L"", 0},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components8[] =
|
||||
{
|
||||
{L"if foo", 0},
|
||||
|
@ -831,7 +836,7 @@ static void test_indents()
|
|||
{L"", 1},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components10[] =
|
||||
{
|
||||
{L"switch foo", 0},
|
||||
|
@ -841,7 +846,7 @@ static void test_indents()
|
|||
{L"", 2},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const indent_component_t components11[] =
|
||||
{
|
||||
{L"switch foo", 0},
|
||||
|
@ -850,7 +855,7 @@ static void test_indents()
|
|||
};
|
||||
|
||||
|
||||
|
||||
|
||||
const indent_component_t *tests[] = {components1, components2, components3, components4, components5, components6, components7, components8, components9, components10, components11};
|
||||
for (size_t which = 0; which < sizeof tests / sizeof *tests; which++)
|
||||
{
|
||||
|
@ -861,7 +866,7 @@ static void test_indents()
|
|||
{
|
||||
component_count++;
|
||||
}
|
||||
|
||||
|
||||
// Generate the expected indents
|
||||
wcstring text;
|
||||
std::vector<int> expected_indents;
|
||||
|
@ -876,10 +881,10 @@ static void test_indents()
|
|||
expected_indents.resize(text.size(), components[i].indent);
|
||||
}
|
||||
assert(expected_indents.size() == text.size());
|
||||
|
||||
|
||||
// Compute the indents
|
||||
std::vector<int> indents = parse_util_compute_indents(text);
|
||||
|
||||
|
||||
if (expected_indents.size() != indents.size())
|
||||
{
|
||||
err(L"Indent vector has wrong size! Expected %lu, actual %lu", expected_indents.size(), indents.size());
|
||||
|
@ -990,7 +995,7 @@ static int expand_test(const wchar_t *in, int flags, ...)
|
|||
size_t i=0;
|
||||
int res=1;
|
||||
wchar_t *arg;
|
||||
|
||||
|
||||
if (expand_string(in, output, flags))
|
||||
{
|
||||
|
||||
|
@ -1468,22 +1473,22 @@ static void test_complete(void)
|
|||
assert(completions.size() == 2);
|
||||
assert(completions.at(0).completion == L"$Foo1");
|
||||
assert(completions.at(1).completion == L"$Bar1");
|
||||
|
||||
|
||||
completions.clear();
|
||||
complete(L"echo (/bin/mkdi", completions, COMPLETION_REQUEST_DEFAULT);
|
||||
assert(completions.size() == 1);
|
||||
assert(completions.at(0).completion == L"r");
|
||||
|
||||
|
||||
completions.clear();
|
||||
complete(L"echo (ls /bin/mkdi", completions, COMPLETION_REQUEST_DEFAULT);
|
||||
assert(completions.size() == 1);
|
||||
assert(completions.at(0).completion == L"r");
|
||||
|
||||
|
||||
completions.clear();
|
||||
complete(L"echo (command ls /bin/mkdi", completions, COMPLETION_REQUEST_DEFAULT);
|
||||
assert(completions.size() == 1);
|
||||
assert(completions.at(0).completion == L"r");
|
||||
|
||||
|
||||
/* Add a function and test completing it in various ways */
|
||||
struct function_data_t func_data;
|
||||
func_data.name = L"scuttlebutt";
|
||||
|
@ -1505,7 +1510,7 @@ static void test_complete(void)
|
|||
completions.clear();
|
||||
complete(L"echo (builtin scuttlebut", completions, COMPLETION_REQUEST_DEFAULT);
|
||||
assert(completions.size() == 0);
|
||||
|
||||
|
||||
complete_set_variable_names(NULL);
|
||||
}
|
||||
|
||||
|
@ -2267,13 +2272,13 @@ static void test_new_parser_correctness(void)
|
|||
static inline bool string_for_permutation(const wcstring *fuzzes, size_t fuzz_count, size_t len, size_t permutation, wcstring *out_str)
|
||||
{
|
||||
out_str->clear();
|
||||
|
||||
|
||||
size_t remaining_permutation = permutation;
|
||||
for (size_t i=0; i < len; i++)
|
||||
{
|
||||
size_t idx = remaining_permutation % fuzz_count;
|
||||
remaining_permutation /= fuzz_count;
|
||||
|
||||
|
||||
out_str->append(fuzzes[idx]);
|
||||
out_str->push_back(L' ');
|
||||
}
|
||||
|
@ -2307,11 +2312,11 @@ static void test_new_parser_fuzzing(void)
|
|||
L"&",
|
||||
L";",
|
||||
};
|
||||
|
||||
|
||||
/* Generate a list of strings of all keyword / token combinations. */
|
||||
wcstring src;
|
||||
src.reserve(128);
|
||||
|
||||
|
||||
parse_node_tree_t node_tree;
|
||||
parse_error_list_t errors;
|
||||
|
||||
|
@ -2331,7 +2336,7 @@ static void test_new_parser_fuzzing(void)
|
|||
}
|
||||
if (log_it)
|
||||
fprintf(stderr, "done (%lu)\n", permutation);
|
||||
|
||||
|
||||
}
|
||||
double end = timef();
|
||||
if (log_it)
|
||||
|
@ -2344,7 +2349,7 @@ static bool test_1_parse_ll2(const wcstring &src, wcstring *out_cmd, wcstring *o
|
|||
out_cmd->clear();
|
||||
out_joined_args->clear();
|
||||
*out_deco = parse_statement_decoration_none;
|
||||
|
||||
|
||||
bool result = false;
|
||||
parse_node_tree_t tree;
|
||||
if (parse_tree_from_string(src, parse_flag_none, &tree, NULL))
|
||||
|
@ -2357,13 +2362,13 @@ static bool test_1_parse_ll2(const wcstring &src, wcstring *out_cmd, wcstring *o
|
|||
return false;
|
||||
}
|
||||
const parse_node_t &stmt = *stmt_nodes.at(0);
|
||||
|
||||
|
||||
/* Return its decoration */
|
||||
*out_deco = tree.decoration_for_plain_statement(stmt);
|
||||
|
||||
|
||||
/* Return its command */
|
||||
tree.command_for_plain_statement(stmt, src, out_cmd);
|
||||
|
||||
|
||||
/* Return arguments separated by spaces */
|
||||
const parse_node_tree_t::parse_node_list_t arg_nodes = tree.find_nodes(stmt, symbol_argument);
|
||||
for (size_t i=0; i < arg_nodes.size(); i++)
|
||||
|
@ -2380,7 +2385,7 @@ static bool test_1_parse_ll2(const wcstring &src, wcstring *out_cmd, wcstring *o
|
|||
static void test_new_parser_ll2(void)
|
||||
{
|
||||
say(L"Testing parser two-token lookahead");
|
||||
|
||||
|
||||
const struct
|
||||
{
|
||||
wcstring src;
|
||||
|
@ -2402,7 +2407,7 @@ static void test_new_parser_ll2(void)
|
|||
{L"function", L"function", L"", parse_statement_decoration_none},
|
||||
{L"function --help", L"function", L"--help", parse_statement_decoration_none}
|
||||
};
|
||||
|
||||
|
||||
for (size_t i=0; i < sizeof tests / sizeof *tests; i++)
|
||||
{
|
||||
wcstring cmd, args;
|
||||
|
@ -2417,7 +2422,7 @@ static void test_new_parser_ll2(void)
|
|||
if (deco != tests[i].deco)
|
||||
err(L"When parsing '%ls', expected decoration %d but got %d on line %ld", tests[i].src.c_str(), (int)tests[i].deco, (int)deco, (long)__LINE__);
|
||||
}
|
||||
|
||||
|
||||
/* Verify that 'function -h' and 'function --help' are plain statements but 'function --foo' is not (#1240) */
|
||||
const struct
|
||||
{
|
||||
|
@ -2438,7 +2443,7 @@ static void test_new_parser_ll2(void)
|
|||
{
|
||||
err(L"Failed to parse '%ls'", tests2[i].src.c_str());
|
||||
}
|
||||
|
||||
|
||||
const parse_node_tree_t::parse_node_list_t node_list = tree.find_nodes(tree.at(0), tests2[i].type);
|
||||
if (node_list.size() == 0)
|
||||
{
|
||||
|
@ -2455,7 +2460,7 @@ static void test_new_parser_ad_hoc()
|
|||
{
|
||||
/* Very ad-hoc tests for issues encountered */
|
||||
say(L"Testing new parser ad hoc tests");
|
||||
|
||||
|
||||
/* Ensure that 'case' terminates a job list */
|
||||
const wcstring src = L"switch foo ; case bar; case baz; end";
|
||||
parse_node_tree_t parse_tree;
|
||||
|
@ -2464,7 +2469,7 @@ static void test_new_parser_ad_hoc()
|
|||
{
|
||||
err(L"Parsing failed");
|
||||
}
|
||||
|
||||
|
||||
/* Expect three case_item_lists: one for each case, and a terminal one. The bug was that we'd try to run a command 'case' */
|
||||
const parse_node_t &root = parse_tree.at(0);
|
||||
const parse_node_tree_t::parse_node_list_t node_list = parse_tree.find_nodes(root, symbol_case_item_list);
|
||||
|
@ -2487,25 +2492,25 @@ static void test_new_parser_errors(void)
|
|||
{L"echo 'abc", parse_error_tokenizer_unterminated_quote},
|
||||
{L"'", parse_error_tokenizer_unterminated_quote},
|
||||
{L"echo (abc", parse_error_tokenizer_unterminated_subshell},
|
||||
|
||||
|
||||
{L"end", parse_error_unbalancing_end},
|
||||
{L"echo hi ; end", parse_error_unbalancing_end},
|
||||
|
||||
|
||||
{L"else", parse_error_unbalancing_else},
|
||||
{L"if true ; end ; else", parse_error_unbalancing_else},
|
||||
|
||||
|
||||
{L"case", parse_error_unbalancing_case},
|
||||
{L"if true ; case ; end", parse_error_unbalancing_case},
|
||||
|
||||
|
||||
{L"foo || bar", parse_error_double_pipe},
|
||||
{L"foo && bar", parse_error_double_background},
|
||||
};
|
||||
|
||||
|
||||
for (size_t i = 0; i < sizeof tests / sizeof *tests; i++)
|
||||
{
|
||||
const wcstring src = tests[i].src;
|
||||
parse_error_code_t expected_code = tests[i].code;
|
||||
|
||||
|
||||
parse_error_list_t errors;
|
||||
parse_node_tree_t parse_tree;
|
||||
bool success = parse_tree_from_string(src, parse_flag_none, &parse_tree, &errors);
|
||||
|
@ -2513,7 +2518,7 @@ static void test_new_parser_errors(void)
|
|||
{
|
||||
err(L"Source '%ls' was expected to fail to parse, but succeeded", src.c_str());
|
||||
}
|
||||
|
||||
|
||||
if (errors.size() != 1)
|
||||
{
|
||||
err(L"Source '%ls' was expected to produce 1 error, but instead produced %lu errors", src.c_str(), errors.size());
|
||||
|
@ -2526,9 +2531,9 @@ static void test_new_parser_errors(void)
|
|||
err(L"\t\t%ls", errors.at(i).describe(src).c_str());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
static void test_highlighting(void)
|
||||
|
@ -2537,13 +2542,14 @@ static void test_highlighting(void)
|
|||
if (system("mkdir -p /tmp/fish_highlight_test/")) err(L"mkdir failed");
|
||||
if (system("touch /tmp/fish_highlight_test/foo")) err(L"touch failed");
|
||||
if (system("touch /tmp/fish_highlight_test/bar")) err(L"touch failed");
|
||||
|
||||
|
||||
// Here are the components of our source and the colors we expect those to be
|
||||
struct highlight_component_t {
|
||||
struct highlight_component_t
|
||||
{
|
||||
const wchar_t *txt;
|
||||
int color;
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components1[] =
|
||||
{
|
||||
{L"echo", highlight_spec_command},
|
||||
|
@ -2551,7 +2557,7 @@ static void test_highlighting(void)
|
|||
{L"&", highlight_spec_statement_terminator},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components2[] =
|
||||
{
|
||||
{L"command", highlight_spec_command},
|
||||
|
@ -2561,7 +2567,7 @@ static void test_highlighting(void)
|
|||
{L"&", highlight_spec_statement_terminator},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components3[] =
|
||||
{
|
||||
{L"if command ls", highlight_spec_command},
|
||||
|
@ -2574,7 +2580,7 @@ static void test_highlighting(void)
|
|||
{L"end", highlight_spec_command},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
/* Verify that cd shows errors for non-directories */
|
||||
const highlight_component_t components4[] =
|
||||
{
|
||||
|
@ -2582,14 +2588,14 @@ static void test_highlighting(void)
|
|||
{L"/tmp/fish_highlight_test", highlight_spec_param | highlight_modifier_valid_path},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components5[] =
|
||||
{
|
||||
{L"cd", highlight_spec_command},
|
||||
{L"/tmp/fish_highlight_test/foo", highlight_spec_error},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components6[] =
|
||||
{
|
||||
{L"cd", highlight_spec_command},
|
||||
|
@ -2598,7 +2604,7 @@ static void test_highlighting(void)
|
|||
{L"definitely_not_a_directory", highlight_spec_error},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
// Command substitutions
|
||||
const highlight_component_t components7[] =
|
||||
{
|
||||
|
@ -2610,57 +2616,57 @@ static void test_highlighting(void)
|
|||
{L")", highlight_spec_operator},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
// Redirections substitutions
|
||||
const highlight_component_t components8[] =
|
||||
{
|
||||
{L"echo", highlight_spec_command},
|
||||
{L"param1", highlight_spec_param},
|
||||
|
||||
|
||||
/* Input redirection */
|
||||
{L"<", highlight_spec_redirection},
|
||||
{L"/bin/echo", highlight_spec_redirection},
|
||||
|
||||
|
||||
/* Output redirection to a valid fd */
|
||||
{L"1>&2", highlight_spec_redirection},
|
||||
|
||||
|
||||
/* Output redirection to an invalid fd */
|
||||
{L"2>&", highlight_spec_redirection},
|
||||
{L"LOL", highlight_spec_error},
|
||||
|
||||
/* Just a param, not a redirection */
|
||||
{L"/tmp/blah", highlight_spec_param},
|
||||
|
||||
|
||||
/* Input redirection from directory */
|
||||
{L"<", highlight_spec_redirection},
|
||||
{L"/tmp/", highlight_spec_error},
|
||||
|
||||
|
||||
/* Output redirection to an invalid path */
|
||||
{L"3>", highlight_spec_redirection},
|
||||
{L"/not/a/valid/path/nope", highlight_spec_error},
|
||||
|
||||
|
||||
/* Output redirection to directory */
|
||||
{L"3>", highlight_spec_redirection},
|
||||
{L"/tmp/nope/", highlight_spec_error},
|
||||
|
||||
|
||||
|
||||
/* Redirections to overflow fd */
|
||||
{L"99999999999999999999>&2", highlight_spec_error},
|
||||
{L"2>&", highlight_spec_redirection},
|
||||
{L"99999999999999999999", highlight_spec_error},
|
||||
|
||||
|
||||
/* Output redirection containing a command substitution */
|
||||
{L"4>", highlight_spec_redirection},
|
||||
{L"(", highlight_spec_operator},
|
||||
{L"echo", highlight_spec_command},
|
||||
{L"/tmp/somewhere", highlight_spec_param},
|
||||
{L")", highlight_spec_operator},
|
||||
|
||||
|
||||
/* Just another param */
|
||||
{L"param2", highlight_spec_param},
|
||||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
const highlight_component_t components9[] =
|
||||
{
|
||||
{L"end", highlight_spec_error},
|
||||
|
@ -2677,7 +2683,7 @@ static void test_highlighting(void)
|
|||
{NULL, -1}
|
||||
};
|
||||
|
||||
|
||||
|
||||
const highlight_component_t *tests[] = {components1, components2, components3, components4, components5, components6, components7, components8, components9, components10};
|
||||
for (size_t which = 0; which < sizeof tests / sizeof *tests; which++)
|
||||
{
|
||||
|
@ -2688,7 +2694,7 @@ static void test_highlighting(void)
|
|||
{
|
||||
component_count++;
|
||||
}
|
||||
|
||||
|
||||
// Generate the text
|
||||
wcstring text;
|
||||
std::vector<int> expected_colors;
|
||||
|
@ -2703,10 +2709,10 @@ static void test_highlighting(void)
|
|||
expected_colors.resize(text.size(), components[i].color);
|
||||
}
|
||||
assert(expected_colors.size() == text.size());
|
||||
|
||||
|
||||
std::vector<highlight_spec_t> colors(text.size());
|
||||
highlight_shell(text, colors, 20, NULL, env_vars_snapshot_t());
|
||||
|
||||
|
||||
if (expected_colors.size() != colors.size())
|
||||
{
|
||||
err(L"Color vector has wrong size! Expected %lu, actual %lu", expected_colors.size(), colors.size());
|
||||
|
@ -2717,7 +2723,7 @@ static void test_highlighting(void)
|
|||
// Hackish space handling. We don't care about the colors in spaces.
|
||||
if (text.at(i) == L' ')
|
||||
continue;
|
||||
|
||||
|
||||
if (expected_colors.at(i) != colors.at(i))
|
||||
{
|
||||
const wcstring spaces(i, L' ');
|
||||
|
@ -2725,7 +2731,7 @@ static void test_highlighting(void)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
system("rm -Rf /tmp/fish_highlight_test");
|
||||
}
|
||||
|
||||
|
@ -2750,7 +2756,7 @@ int main(int argc, char **argv)
|
|||
builtin_init();
|
||||
reader_init();
|
||||
env_init();
|
||||
|
||||
|
||||
/* Set default signal handlers, so we can ctrl-C out of this */
|
||||
signal_reset_handlers();
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ void function_add(const function_data_t &data, const parser_t &parser)
|
|||
{
|
||||
def_offset = parser.line_number_of_character_at_offset(parser.current_block()->tok_pos);
|
||||
}
|
||||
|
||||
|
||||
const function_map_t::value_type new_pair(data.name, function_info_t(data, filename, def_offset, is_autoload));
|
||||
loaded_functions.insert(new_pair);
|
||||
|
||||
|
|
120
highlight.cpp
120
highlight.cpp
|
@ -334,7 +334,7 @@ bool plain_statement_get_expanded_command(const wcstring &src, const parse_node_
|
|||
{
|
||||
assert(plain_statement.type == symbol_plain_statement);
|
||||
bool result = false;
|
||||
|
||||
|
||||
/* Get the command */
|
||||
wcstring cmd;
|
||||
if (tree.command_for_plain_statement(plain_statement, src, &cmd))
|
||||
|
@ -702,11 +702,11 @@ static bool has_expand_reserved(const wcstring &str)
|
|||
static bool autosuggest_parse_command(const wcstring &buff, wcstring *out_expanded_command, parse_node_t *out_last_arg)
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
|
||||
/* Parse the buffer */
|
||||
parse_node_tree_t parse_tree;
|
||||
parse_tree_from_string(buff, parse_flag_continue_after_error | parse_flag_accept_incomplete_tokens, &parse_tree, NULL);
|
||||
|
||||
|
||||
/* Find the last statement */
|
||||
const parse_node_t *last_statement = parse_tree.find_last_node_of_type(symbol_plain_statement, NULL);
|
||||
if (last_statement != NULL)
|
||||
|
@ -715,7 +715,7 @@ static bool autosuggest_parse_command(const wcstring &buff, wcstring *out_expand
|
|||
{
|
||||
/* We got it */
|
||||
result = true;
|
||||
|
||||
|
||||
/* Find the last argument. If we don't get one, return an invalid node. */
|
||||
const parse_node_t *last_arg = parse_tree.find_last_node_of_type(symbol_argument, last_statement);
|
||||
if (last_arg != NULL)
|
||||
|
@ -732,7 +732,7 @@ bool autosuggest_suggest_special(const wcstring &str, const wcstring &working_di
|
|||
{
|
||||
if (str.empty())
|
||||
return false;
|
||||
|
||||
|
||||
ASSERT_IS_BACKGROUND_THREAD();
|
||||
|
||||
/* Parse the string */
|
||||
|
@ -1665,43 +1665,43 @@ class highlighter_t
|
|||
{
|
||||
/* The string we're highlighting. Note this is a reference memmber variable (to avoid copying)! We must not outlive this! */
|
||||
const wcstring &buff;
|
||||
|
||||
|
||||
/* Cursor position */
|
||||
const size_t cursor_pos;
|
||||
|
||||
|
||||
/* Environment variables. Again, a reference member variable! */
|
||||
const env_vars_snapshot_t &vars;
|
||||
|
||||
|
||||
/* Working directory */
|
||||
const wcstring working_directory;
|
||||
|
||||
|
||||
/* The resulting colors */
|
||||
typedef std::vector<highlight_spec_t> color_array_t;
|
||||
color_array_t color_array;
|
||||
|
||||
|
||||
/* The parse tree of the buff */
|
||||
parse_node_tree_t parse_tree;
|
||||
|
||||
|
||||
/* Color an argument */
|
||||
void color_argument(const parse_node_t &node);
|
||||
|
||||
|
||||
/* Color a redirection */
|
||||
void color_redirection(const parse_node_t &node);
|
||||
|
||||
/* Color the arguments of the given node */
|
||||
void color_arguments(const parse_node_t &list_node);
|
||||
|
||||
|
||||
/* Color the redirections of the given node */
|
||||
void color_redirections(const parse_node_t &list_node);
|
||||
|
||||
|
||||
/* Color all the children of the command with the given type */
|
||||
void color_children(const parse_node_t &parent, parse_token_type_t type, int color);
|
||||
|
||||
|
||||
/* Colors the source range of a node with a given color */
|
||||
void color_node(const parse_node_t &node, int color);
|
||||
|
||||
public:
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/* Constructor */
|
||||
highlighter_t(const wcstring &str, size_t pos, const env_vars_snapshot_t &ev, const wcstring &wd) : buff(str), cursor_pos(pos), vars(ev), working_directory(wd), color_array(str.size())
|
||||
{
|
||||
|
@ -1709,7 +1709,7 @@ class highlighter_t
|
|||
this->parse_tree.clear();
|
||||
parse_tree_from_string(buff, parse_flag_continue_after_error | parse_flag_include_comments, &this->parse_tree, NULL);
|
||||
}
|
||||
|
||||
|
||||
/* Perform highlighting, returning an array of colors */
|
||||
const color_array_t &highlight();
|
||||
};
|
||||
|
@ -1733,16 +1733,16 @@ void highlighter_t::color_argument(const parse_node_t &node)
|
|||
{
|
||||
if (! node.has_source())
|
||||
return;
|
||||
|
||||
|
||||
const wcstring arg_str = node.get_source(this->buff);
|
||||
|
||||
|
||||
/* Get an iterator to the colors associated with the argument */
|
||||
const size_t arg_start = node.source_start;
|
||||
const color_array_t::iterator arg_colors = color_array.begin() + arg_start;
|
||||
|
||||
/* Color this argument without concern for command substitutions */
|
||||
color_argument_internal(arg_str, arg_colors);
|
||||
|
||||
|
||||
/* Now do command substitutions */
|
||||
size_t cmdsub_cursor = 0, cmdsub_start = 0, cmdsub_end = 0;
|
||||
wcstring cmdsub_contents;
|
||||
|
@ -1751,16 +1751,16 @@ void highlighter_t::color_argument(const parse_node_t &node)
|
|||
/* The cmdsub_start is the open paren. cmdsub_end is either the close paren or the end of the string. cmdsub_contents extends from one past cmdsub_start to cmdsub_end */
|
||||
assert(cmdsub_end > cmdsub_start);
|
||||
assert(cmdsub_end - cmdsub_start - 1 == cmdsub_contents.size());
|
||||
|
||||
|
||||
/* Found a command substitution. Compute the position of the start and end of the cmdsub contents, within our overall src. */
|
||||
const size_t arg_subcmd_start = arg_start + cmdsub_start, arg_subcmd_end = arg_start + cmdsub_end;
|
||||
|
||||
|
||||
/* Highlight the parens. The open paren must exist; the closed paren may not if it was incomplete. */
|
||||
assert(cmdsub_start < arg_str.size());
|
||||
this->color_array.at(arg_subcmd_start) = highlight_spec_operator;
|
||||
if (arg_subcmd_end < this->buff.size())
|
||||
this->color_array.at(arg_subcmd_end) = highlight_spec_operator;
|
||||
|
||||
|
||||
/* Compute the cursor's position within the cmdsub. We must be past the open paren (hence >) but can be at the end of the string or closed paren (hence <=) */
|
||||
size_t cursor_subpos = CURSOR_POSITION_INVALID;
|
||||
if (cursor_pos != CURSOR_POSITION_INVALID && cursor_pos > arg_subcmd_start && cursor_pos <= arg_subcmd_end)
|
||||
|
@ -1768,11 +1768,11 @@ void highlighter_t::color_argument(const parse_node_t &node)
|
|||
/* The -1 because the cmdsub_contents does not include the open paren */
|
||||
cursor_subpos = cursor_pos - arg_subcmd_start - 1;
|
||||
}
|
||||
|
||||
|
||||
/* Highlight it recursively. */
|
||||
highlighter_t cmdsub_highlighter(cmdsub_contents, cursor_subpos, this->vars, this->working_directory);
|
||||
const color_array_t &subcolors = cmdsub_highlighter.highlight();
|
||||
|
||||
|
||||
/* Copy out the subcolors back into our array */
|
||||
assert(subcolors.size() == cmdsub_contents.size());
|
||||
std::copy(subcolors.begin(), subcolors.end(), this->color_array.begin() + arg_subcmd_start + 1);
|
||||
|
@ -1794,7 +1794,7 @@ static bool node_is_potential_path(const wcstring &src, const parse_node_t &node
|
|||
/* Big hack: is_potential_path expects a tilde, but unescape_string gives us HOME_DIRECTORY. Put it back. */
|
||||
if (! token.empty() && token.at(0) == HOME_DIRECTORY)
|
||||
token.at(0) = L'~';
|
||||
|
||||
|
||||
const wcstring_list_t working_directory_list(1, working_directory);
|
||||
result = is_potential_path(token, working_directory_list, PATH_EXPAND_TILDE);
|
||||
}
|
||||
|
@ -1815,7 +1815,7 @@ void highlighter_t::color_arguments(const parse_node_t &list_node)
|
|||
cmd_is_cd = (cmd_str == L"cd");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Find all the arguments of this list */
|
||||
const parse_node_tree_t::parse_node_list_t nodes = this->parse_tree.find_nodes(list_node, symbol_argument);
|
||||
|
||||
|
@ -1824,7 +1824,7 @@ void highlighter_t::color_arguments(const parse_node_t &list_node)
|
|||
const parse_node_t *child = nodes.at(i);
|
||||
assert(child != NULL && child->type == symbol_argument);
|
||||
this->color_argument(*child);
|
||||
|
||||
|
||||
if (cmd_is_cd)
|
||||
{
|
||||
/* Mark this as an error if it's not 'help' and not a valid cd path */
|
||||
|
@ -1846,18 +1846,18 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
assert(redirection_node.type == symbol_redirection);
|
||||
if (! redirection_node.has_source())
|
||||
return;
|
||||
|
||||
|
||||
const parse_node_t *redirection_primitive = this->parse_tree.get_child(redirection_node, 0, parse_token_type_redirection); //like 2>
|
||||
const parse_node_t *redirection_target = this->parse_tree.get_child(redirection_node, 1, parse_token_type_string); //like &1 or file path
|
||||
|
||||
|
||||
if (redirection_primitive != NULL)
|
||||
{
|
||||
wcstring target;
|
||||
const enum token_type redirect_type = this->parse_tree.type_for_redirection(redirection_node, this->buff, NULL, &target);
|
||||
|
||||
|
||||
/* We may get a TOK_NONE redirection type, e.g. if the redirection is invalid */
|
||||
this->color_node(*redirection_primitive, redirect_type == TOK_NONE ? highlight_spec_error : highlight_spec_redirection);
|
||||
|
||||
|
||||
/* Check if the argument contains a command substitution. If so, highlight it as a param even though it's a command redirection, and don't try to do any other validation. */
|
||||
if (parse_util_locate_cmdsubst(target.c_str(), NULL, NULL, true) != 0)
|
||||
{
|
||||
|
@ -1886,12 +1886,12 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
const wchar_t *target_cstr = target.c_str();
|
||||
wchar_t *end = NULL;
|
||||
int fd = fish_wcstoi(target_cstr, &end, 10);
|
||||
|
||||
|
||||
/* The iswdigit check ensures there's no leading whitespace, the *end check ensures the entire string was consumed, and the numeric checks ensure the fd is at least zero and there was no overflow */
|
||||
target_is_valid = (iswdigit(target_cstr[0]) && *end == L'\0' && fd >= 0 && fd < INT_MAX);
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case TOK_REDIRECT_IN:
|
||||
{
|
||||
/* Input redirections must have a readable non-directory */
|
||||
|
@ -1899,7 +1899,7 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
target_is_valid = ! waccess(target_path, R_OK) && ! wstat(target_path, &buf) && ! S_ISDIR(buf.st_mode);
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case TOK_REDIRECT_OUT:
|
||||
case TOK_REDIRECT_APPEND:
|
||||
case TOK_REDIRECT_NOCLOB:
|
||||
|
@ -1907,13 +1907,13 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
/* Test whether the file exists, and whether it's writable (possibly after creating it). access() returns failure if the file does not exist. */
|
||||
bool file_exists = false, file_is_writable = false;
|
||||
int err = 0;
|
||||
|
||||
|
||||
struct stat buf = {};
|
||||
if (wstat(target_path, &buf) < 0)
|
||||
{
|
||||
err = errno;
|
||||
}
|
||||
|
||||
|
||||
if (string_suffixes_string(L"/", target))
|
||||
{
|
||||
/* Redirections to things that are directories is definitely not allowed */
|
||||
|
@ -1930,11 +1930,11 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
{
|
||||
/* File does not exist. Check if its parent directory is writable. */
|
||||
wcstring parent = wdirname(target_path);
|
||||
|
||||
|
||||
/* Ensure that the parent ends with the path separator. This will ensure that we get an error if the parent directory is not really a directory. */
|
||||
if (! string_suffixes_string(L"/", parent))
|
||||
parent.push_back(L'/');
|
||||
|
||||
|
||||
/* Now the file is considered writable if the parent directory is writable */
|
||||
file_exists = false;
|
||||
file_is_writable = (0 == waccess(parent, W_OK));
|
||||
|
@ -1945,19 +1945,19 @@ void highlighter_t::color_redirection(const parse_node_t &redirection_node)
|
|||
file_exists = false;
|
||||
file_is_writable = false;
|
||||
}
|
||||
|
||||
|
||||
/* NOCLOB means that we must not overwrite files that exist */
|
||||
target_is_valid = file_is_writable && ! (file_exists && redirect_type == TOK_REDIRECT_NOCLOB);
|
||||
target_is_valid = file_is_writable && !(file_exists && redirect_type == TOK_REDIRECT_NOCLOB);
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
/* We should not get here, since the node was marked as a redirection, but treat it as an error for paranoia */
|
||||
target_is_valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (redirection_target != NULL)
|
||||
{
|
||||
this->color_node(*redirection_target, target_is_valid ? highlight_spec_redirection : highlight_spec_error);
|
||||
|
@ -2010,30 +2010,30 @@ static bool command_is_valid(const wcstring &cmd, enum parse_statement_decoratio
|
|||
command_ok = false;
|
||||
implicit_cd_ok = false;
|
||||
}
|
||||
|
||||
|
||||
/* Check them */
|
||||
bool is_valid = false;
|
||||
|
||||
|
||||
/* Builtins */
|
||||
if (! is_valid && builtin_ok)
|
||||
is_valid = builtin_exists(cmd);
|
||||
|
||||
|
||||
/* Functions */
|
||||
if (! is_valid && function_ok)
|
||||
is_valid = function_exists_no_autoload(cmd, vars);
|
||||
|
||||
|
||||
/* Abbreviations */
|
||||
if (! is_valid && abbreviation_ok)
|
||||
is_valid = expand_abbreviation(cmd, NULL);
|
||||
|
||||
|
||||
/* Regular commands */
|
||||
if (! is_valid && command_ok)
|
||||
is_valid = path_get_path(cmd, NULL, vars);
|
||||
|
||||
|
||||
/* Implicit cd */
|
||||
if (! is_valid && implicit_cd_ok)
|
||||
is_valid = path_can_be_implicit_cd(cmd, NULL, working_directory.c_str(), vars);
|
||||
|
||||
|
||||
/* Return what we got */
|
||||
return is_valid;
|
||||
}
|
||||
|
@ -2041,16 +2041,16 @@ static bool command_is_valid(const wcstring &cmd, enum parse_statement_decoratio
|
|||
const highlighter_t::color_array_t & highlighter_t::highlight()
|
||||
{
|
||||
ASSERT_IS_BACKGROUND_THREAD();
|
||||
|
||||
|
||||
const size_t length = buff.size();
|
||||
assert(this->buff.size() == this->color_array.size());
|
||||
|
||||
|
||||
if (length == 0)
|
||||
return color_array;
|
||||
|
||||
/* Start out at zero */
|
||||
std::fill(this->color_array.begin(), this->color_array.end(), 0);
|
||||
|
||||
|
||||
/* Parse the buffer */
|
||||
parse_node_tree_t parse_tree;
|
||||
parse_tree_from_string(buff, parse_flag_continue_after_error | parse_flag_include_comments, &parse_tree, NULL);
|
||||
|
@ -2104,7 +2104,7 @@ const highlighter_t::color_array_t & highlighter_t::highlight()
|
|||
{
|
||||
bool is_valid_cmd = false;
|
||||
wcstring cmd(buff, cmd_node->source_start, cmd_node->source_length);
|
||||
|
||||
|
||||
/* Try expanding it. If we cannot, it's an error. */
|
||||
bool expanded = expand_one(cmd, EXPAND_SKIP_CMDSUBST | EXPAND_SKIP_VARIABLES | EXPAND_SKIP_JOBS);
|
||||
if (expanded && ! has_expand_reserved(cmd))
|
||||
|
@ -2142,7 +2142,7 @@ const highlighter_t::color_array_t & highlighter_t::highlight()
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (this->cursor_pos <= this->buff.size())
|
||||
{
|
||||
/* If the cursor is over an argument, and that argument is a valid path, underline it */
|
||||
|
@ -2153,7 +2153,7 @@ const highlighter_t::color_array_t & highlighter_t::highlight()
|
|||
/* Must be an argument with source */
|
||||
if (node.type != symbol_argument || ! node.has_source())
|
||||
continue;
|
||||
|
||||
|
||||
/* See if this node contains the cursor. We check <= source_length so that, when backspacing (and the cursor is just beyond the last token), we may still underline it */
|
||||
if (this->cursor_pos >= node.source_start && this->cursor_pos - node.source_start <= node.source_length)
|
||||
{
|
||||
|
@ -2173,7 +2173,7 @@ const highlighter_t::color_array_t & highlighter_t::highlight()
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return color_array;
|
||||
}
|
||||
|
||||
|
@ -2181,7 +2181,7 @@ void highlight_shell_new_parser(const wcstring &buff, std::vector<highlight_spec
|
|||
{
|
||||
/* Do something sucky and get the current working directory on this background thread. This should really be passed in. */
|
||||
const wcstring working_directory = env_get_pwd_slash();
|
||||
|
||||
|
||||
/* Highlight it! */
|
||||
highlighter_t highlighter(buff, pos, vars, working_directory);
|
||||
color = highlighter.highlight();
|
||||
|
|
|
@ -28,15 +28,15 @@ enum
|
|||
highlight_spec_quote, //quoted string
|
||||
highlight_spec_redirection, //redirection
|
||||
highlight_spec_autosuggestion, //autosuggestion
|
||||
|
||||
|
||||
HIGHLIGHT_SPEC_PRIMARY_MASK = 0xFF,
|
||||
|
||||
|
||||
/* The following values are modifiers */
|
||||
highlight_modifier_valid_path = 0x100,
|
||||
|
||||
|
||||
/* Very special value */
|
||||
highlight_spec_invalid = 0xFFFF
|
||||
|
||||
|
||||
};
|
||||
typedef uint32_t highlight_spec_t;
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public:
|
|||
{
|
||||
return contents;
|
||||
}
|
||||
|
||||
|
||||
bool empty() const
|
||||
{
|
||||
return contents.empty();
|
||||
|
|
|
@ -47,7 +47,7 @@ enum parse_token_type_t
|
|||
symbol_redirection,
|
||||
|
||||
symbol_optional_background,
|
||||
|
||||
|
||||
symbol_end_command,
|
||||
|
||||
// Terminal types
|
||||
|
@ -56,7 +56,7 @@ enum parse_token_type_t
|
|||
parse_token_type_redirection,
|
||||
parse_token_type_background,
|
||||
parse_token_type_end,
|
||||
|
||||
|
||||
// Special terminal type that means no more tokens forthcoming
|
||||
parse_token_type_terminate,
|
||||
|
||||
|
@ -90,7 +90,7 @@ enum parse_keyword_t
|
|||
parse_keyword_not,
|
||||
parse_keyword_command,
|
||||
parse_keyword_builtin,
|
||||
|
||||
|
||||
LAST_KEYWORD = parse_keyword_builtin
|
||||
};
|
||||
|
||||
|
@ -106,31 +106,32 @@ enum parse_statement_decoration_t
|
|||
enum parse_error_code_t
|
||||
{
|
||||
parse_error_none,
|
||||
|
||||
|
||||
/* Matching values from enum parser_error */
|
||||
parse_error_syntax,
|
||||
parse_error_eval,
|
||||
parse_error_cmdsubst,
|
||||
|
||||
|
||||
parse_error_generic, // unclassified error types
|
||||
|
||||
|
||||
//tokenizer errors
|
||||
parse_error_tokenizer_unterminated_quote,
|
||||
parse_error_tokenizer_unterminated_subshell,
|
||||
parse_error_tokenizer_unterminated_escape,
|
||||
parse_error_tokenizer_other,
|
||||
|
||||
|
||||
parse_error_unbalancing_end, //end outside of block
|
||||
parse_error_unbalancing_else, //else outside of if
|
||||
parse_error_unbalancing_case, //case outside of switch
|
||||
|
||||
|
||||
parse_error_double_pipe, // foo || bar, has special error message
|
||||
parse_error_double_background // foo && bar, has special error message
|
||||
};
|
||||
|
||||
enum {
|
||||
PARSER_TEST_ERROR = 1,
|
||||
PARSER_TEST_INCOMPLETE = 2
|
||||
enum
|
||||
{
|
||||
PARSER_TEST_ERROR = 1,
|
||||
PARSER_TEST_INCOMPLETE = 2
|
||||
};
|
||||
typedef unsigned int parser_test_error_bits_t;
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -22,33 +22,33 @@ enum parse_execution_result_t
|
|||
|
||||
/* The job did not execute due to some error (e.g. failed to wildcard expand). An error will have been printed and proc_last_status will have been set. */
|
||||
parse_execution_errored,
|
||||
|
||||
|
||||
/* The job was cancelled (e.g. Ctrl-C) */
|
||||
parse_execution_cancelled,
|
||||
|
||||
|
||||
/* The job was skipped (e.g. due to a not-taken 'and' command). This is a special return allowed only from the populate functions, not the run functions. */
|
||||
parse_execution_skipped
|
||||
};
|
||||
|
||||
class parse_execution_context_t
|
||||
{
|
||||
private:
|
||||
private:
|
||||
const parse_node_tree_t tree;
|
||||
const wcstring src;
|
||||
io_chain_t block_io;
|
||||
parser_t * const parser;
|
||||
//parse_error_list_t errors;
|
||||
|
||||
|
||||
int eval_level;
|
||||
std::vector<profile_item_t*> profile_items;
|
||||
|
||||
|
||||
/* No copying allowed */
|
||||
parse_execution_context_t(const parse_execution_context_t&);
|
||||
parse_execution_context_t& operator=(const parse_execution_context_t&);
|
||||
|
||||
|
||||
/* Should I cancel? */
|
||||
bool should_cancel_execution(const block_t *block) const;
|
||||
|
||||
|
||||
/* Ways that we can stop executing a block. These are in a sort of ascending order of importance, e.g. `exit` should trump `break` */
|
||||
enum execution_cancellation_reason_t
|
||||
{
|
||||
|
@ -58,32 +58,32 @@ class parse_execution_context_t
|
|||
execution_cancellation_exit
|
||||
};
|
||||
execution_cancellation_reason_t cancellation_reason(const block_t *block) const;
|
||||
|
||||
|
||||
/* Report an error. Always returns true. */
|
||||
parse_execution_result_t report_error(const parse_node_t &node, const wchar_t *fmt, ...);
|
||||
/* Wildcard error helper */
|
||||
parse_execution_result_t report_unmatched_wildcard_error(const parse_node_t &unmatched_wildcard);
|
||||
|
||||
|
||||
/* Command not found support */
|
||||
void handle_command_not_found(const wcstring &cmd, const parse_node_t &statement_node, int err_code);
|
||||
|
||||
|
||||
/* Utilities */
|
||||
wcstring get_source(const parse_node_t &node) const;
|
||||
const parse_node_t *get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type = token_type_invalid) const;
|
||||
node_offset_t get_offset(const parse_node_t &node) const;
|
||||
const parse_node_t *infinite_recursive_statement_in_job_list(const parse_node_t &job_list, wcstring *out_func_name) const;
|
||||
|
||||
|
||||
/* Indicates whether a job is a simple block (one block, no redirections) */
|
||||
bool job_is_simple_block(const parse_node_t &node) const;
|
||||
|
||||
|
||||
enum process_type_t process_type_for_command(const parse_node_t &plain_statement, const wcstring &cmd) const;
|
||||
|
||||
|
||||
/* These create process_t structures from statements */
|
||||
parse_execution_result_t populate_job_process(job_t *job, process_t *proc, const parse_node_t &statement_node);
|
||||
parse_execution_result_t populate_boolean_process(job_t *job, process_t *proc, const parse_node_t &bool_statement);
|
||||
parse_execution_result_t populate_plain_process(job_t *job, process_t *proc, const parse_node_t &statement);
|
||||
parse_execution_result_t populate_block_process(job_t *job, process_t *proc, const parse_node_t &statement_node);
|
||||
|
||||
|
||||
/* These encapsulate the actual logic of various (block) statements. */
|
||||
parse_execution_result_t run_block_statement(const parse_node_t &statement);
|
||||
parse_execution_result_t run_for_statement(const parse_node_t &header, const parse_node_t &contents);
|
||||
|
@ -92,22 +92,22 @@ class parse_execution_context_t
|
|||
parse_execution_result_t run_while_statement(const parse_node_t &header, const parse_node_t &contents);
|
||||
parse_execution_result_t run_function_statement(const parse_node_t &header, const parse_node_t &contents);
|
||||
parse_execution_result_t run_begin_statement(const parse_node_t &header, const parse_node_t &contents);
|
||||
|
||||
|
||||
wcstring_list_t determine_arguments(const parse_node_t &parent, const parse_node_t **out_unmatched_wildcard_node);
|
||||
|
||||
|
||||
/* Determines the IO chain. Returns true on success, false on error */
|
||||
bool determine_io_chain(const parse_node_t &statement, io_chain_t *out_chain);
|
||||
|
||||
|
||||
parse_execution_result_t run_1_job(const parse_node_t &job_node, const block_t *associated_block);
|
||||
parse_execution_result_t run_job_list(const parse_node_t &job_list_node, const block_t *associated_block);
|
||||
parse_execution_result_t populate_job_from_job_node(job_t *j, const parse_node_t &job_node, const block_t *associated_block);
|
||||
|
||||
public:
|
||||
|
||||
public:
|
||||
parse_execution_context_t(const parse_node_tree_t &t, const wcstring &s, parser_t *p);
|
||||
|
||||
|
||||
/* Start executing at the given node offset. Returns 0 if there was no error, 1 if there was an error */
|
||||
parse_execution_result_t eval_node_at_offset(node_offset_t offset, const block_t *associated_block, const io_chain_t &io);
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ RESOLVE(statement)
|
|||
If we are 'function', then we are a non-block if we are invoked with -h or --help
|
||||
If we are anything else, we require an argument, so do the same thing if the subsequent token is a statement terminator.
|
||||
*/
|
||||
|
||||
|
||||
if (token1.type == parse_token_type_string)
|
||||
{
|
||||
// If we are a function, then look for help arguments
|
||||
|
@ -135,14 +135,14 @@ RESOLVE(statement)
|
|||
{
|
||||
return 4;
|
||||
}
|
||||
|
||||
|
||||
// Likewise if the next token doesn't look like an argument at all. This corresponds to e.g. a "naked if".
|
||||
bool naked_invocation_invokes_help = (token1.keyword != parse_keyword_begin && token1.keyword != parse_keyword_end);
|
||||
if (naked_invocation_invokes_help && (token2.type == parse_token_type_end || token2.type == parse_token_type_terminate))
|
||||
{
|
||||
return 4;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
switch (token1.type)
|
||||
|
@ -369,7 +369,7 @@ RESOLVE(decorated_statement)
|
|||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
switch (token1.keyword)
|
||||
{
|
||||
default:
|
||||
|
|
158
parse_tree.cpp
158
parse_tree.cpp
|
@ -39,13 +39,13 @@ wcstring parse_error_t::describe(const wcstring &src, bool skip_caret) const
|
|||
{
|
||||
line_end = src.size();
|
||||
}
|
||||
|
||||
|
||||
assert(line_end >= line_start);
|
||||
assert(source_start >= line_start);
|
||||
|
||||
// Don't include the caret and line if we're interactive this is the first line, because then it's obvious
|
||||
bool skip_caret = (get_is_interactive() && source_start == 0);
|
||||
|
||||
|
||||
if (! skip_caret)
|
||||
{
|
||||
// Append the line of text.
|
||||
|
@ -54,8 +54,8 @@ wcstring parse_error_t::describe(const wcstring &src, bool skip_caret) const
|
|||
result.push_back(L'\n');
|
||||
}
|
||||
result.append(src, line_start, line_end - line_start);
|
||||
|
||||
|
||||
|
||||
|
||||
// Append the caret line. The input source may include tabs; for that reason we construct a "caret line" that has tabs in corresponding positions
|
||||
wcstring caret_space_line;
|
||||
caret_space_line.reserve(source_start - line_start);
|
||||
|
@ -247,28 +247,28 @@ static wcstring token_type_user_presentable_description(parse_token_type_t type,
|
|||
{
|
||||
return format_string(L"keyword '%ls'", keyword_description(keyword).c_str());
|
||||
}
|
||||
|
||||
|
||||
switch (type)
|
||||
{
|
||||
/* Hackish. We only support the following types. */
|
||||
/* Hackish. We only support the following types. */
|
||||
case symbol_statement:
|
||||
return L"a command";
|
||||
|
||||
|
||||
case parse_token_type_string:
|
||||
return L"a string";
|
||||
|
||||
|
||||
case parse_token_type_pipe:
|
||||
return L"a pipe";
|
||||
|
||||
|
||||
case parse_token_type_redirection:
|
||||
return L"a redirection";
|
||||
|
||||
|
||||
case parse_token_type_background:
|
||||
return L"a '&'";
|
||||
|
||||
|
||||
case parse_token_type_end:
|
||||
return L"end of the statement";
|
||||
|
||||
|
||||
default:
|
||||
return format_string(L"a %ls", token_type_description(type).c_str());
|
||||
}
|
||||
|
@ -351,14 +351,14 @@ static inline parse_token_type_t parse_token_type_from_tokenizer_token(enum toke
|
|||
static void dump_tree_recursive(const parse_node_tree_t &nodes, const wcstring &src, node_offset_t node_idx, size_t indent, wcstring *result, size_t *line, node_offset_t *inout_first_node_not_dumped)
|
||||
{
|
||||
assert(node_idx < nodes.size());
|
||||
|
||||
|
||||
// Update first_node_not_dumped
|
||||
// This takes a bit of explanation. While it's true that a parse tree may be a "forest", its individual trees are "compact," meaning they are not interleaved. Thus we keep track of the largest node index as we descend a tree. One past the largest is the start of the next tree.
|
||||
if (*inout_first_node_not_dumped <= node_idx)
|
||||
{
|
||||
*inout_first_node_not_dumped = node_idx + 1;
|
||||
}
|
||||
|
||||
|
||||
const parse_node_t &node = nodes.at(node_idx);
|
||||
|
||||
const size_t spacesPerIndent = 2;
|
||||
|
@ -376,14 +376,14 @@ static void dump_tree_recursive(const parse_node_tree_t &nodes, const wcstring &
|
|||
{
|
||||
append_format(*result, L" <%lu children>", node.child_count);
|
||||
}
|
||||
|
||||
|
||||
if (node.has_source() && node.type == parse_token_type_string)
|
||||
{
|
||||
result->append(L": \"");
|
||||
result->append(src, node.source_start, node.source_length);
|
||||
result->append(L"\"");
|
||||
}
|
||||
|
||||
|
||||
if (node.type != parse_token_type_string)
|
||||
{
|
||||
if (node.has_source())
|
||||
|
@ -392,10 +392,10 @@ static void dump_tree_recursive(const parse_node_tree_t &nodes, const wcstring &
|
|||
}
|
||||
else
|
||||
{
|
||||
append_format(*result, L" [no src]", (long)node.source_start, (long)node.source_length);
|
||||
append_format(*result, L" [no src]", (long)node.source_start, (long)node.source_length);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
result->push_back(L'\n');
|
||||
++*line;
|
||||
for (size_t child_idx = node.child_start; child_idx < node.child_start + node.child_count; child_idx++)
|
||||
|
@ -409,7 +409,7 @@ wcstring parse_dump_tree(const parse_node_tree_t &nodes, const wcstring &src)
|
|||
{
|
||||
if (nodes.empty())
|
||||
return L"(empty!)";
|
||||
|
||||
|
||||
node_offset_t first_node_not_dumped = 0;
|
||||
size_t line = 0;
|
||||
wcstring result;
|
||||
|
@ -448,7 +448,7 @@ struct parse_stack_element_t
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/* Returns a name that we can show to the user, e.g. "a command" */
|
||||
wcstring user_presentable_description(void) const
|
||||
{
|
||||
|
@ -461,19 +461,19 @@ class parse_ll_t
|
|||
{
|
||||
/* Traditional symbol stack of the LL parser */
|
||||
std::vector<parse_stack_element_t> symbol_stack;
|
||||
|
||||
|
||||
/* Parser output. This is a parse tree, but stored in an array. */
|
||||
parse_node_tree_t nodes;
|
||||
|
||||
/* Whether we ran into a fatal error, including parse errors or tokenizer errors */
|
||||
bool fatal_errored;
|
||||
|
||||
|
||||
/* Whether we should collect error messages or not */
|
||||
bool should_generate_error_messages;
|
||||
|
||||
|
||||
/* List of errors we have encountered */
|
||||
parse_error_list_t errors;
|
||||
|
||||
|
||||
/* The symbol stack can contain terminal types or symbols. Symbols go on to do productions, but terminal types are just matched against input tokens. */
|
||||
bool top_node_handle_terminal_types(parse_token_t token);
|
||||
|
||||
|
@ -521,7 +521,7 @@ class parse_ll_t
|
|||
}
|
||||
if (! count) fprintf(stderr, "\t<empty>\n");
|
||||
}
|
||||
|
||||
|
||||
// Get the parent index. But we can't get the parent parse node yet, since it may be made invalid by adding children
|
||||
const size_t parent_node_idx = symbol_stack.back().node_idx;
|
||||
|
||||
|
@ -569,8 +569,8 @@ class parse_ll_t
|
|||
}
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
public:
|
||||
|
||||
/* Constructor */
|
||||
parse_ll_t() : fatal_errored(false), should_generate_error_messages(true)
|
||||
{
|
||||
|
@ -581,31 +581,31 @@ class parse_ll_t
|
|||
|
||||
/* Input */
|
||||
void accept_tokens(parse_token_t token1, parse_token_t token2);
|
||||
|
||||
|
||||
/* Report tokenizer errors */
|
||||
void report_tokenizer_error(parse_token_t token, int tok_err, const wchar_t *tok_error);
|
||||
|
||||
|
||||
/* Indicate if we hit a fatal error */
|
||||
bool has_fatal_error(void) const
|
||||
{
|
||||
return this->fatal_errored;
|
||||
}
|
||||
|
||||
|
||||
/* Indicate whether we want to generate error messages */
|
||||
void set_should_generate_error_messages(bool flag)
|
||||
{
|
||||
this->should_generate_error_messages = flag;
|
||||
}
|
||||
|
||||
|
||||
/* Clear the parse symbol stack (but not the node tree). Add a new job_list_t goal node. This is called from the constructor */
|
||||
void reset_symbols(void);
|
||||
|
||||
/* Clear the parse symbol stack and the node tree. Add a new job_list_t goal node. This is called from the constructor. */
|
||||
void reset_symbols_and_nodes(void);
|
||||
|
||||
|
||||
/* Once parsing is complete, determine the ranges of intermediate nodes */
|
||||
void determine_node_ranges();
|
||||
|
||||
|
||||
/* Acquire output after parsing. This transfers directly from within self */
|
||||
void acquire_output(parse_node_tree_t *output, parse_error_list_t *errors);
|
||||
};
|
||||
|
@ -684,7 +684,7 @@ void parse_ll_t::acquire_output(parse_node_tree_t *output, parse_error_list_t *e
|
|||
std::swap(*output, this->nodes);
|
||||
}
|
||||
this->nodes.clear();
|
||||
|
||||
|
||||
if (errors != NULL)
|
||||
{
|
||||
std::swap(*errors, this->errors);
|
||||
|
@ -727,7 +727,7 @@ void parse_ll_t::parse_error_unbalancing_token(parse_token_t token)
|
|||
case parse_keyword_end:
|
||||
this->parse_error(token, parse_error_unbalancing_end, L"'end' outside of a block");
|
||||
break;
|
||||
|
||||
|
||||
case parse_keyword_else:
|
||||
this->parse_error(token, parse_error_unbalancing_else, L"'else' builtin not inside of if block");
|
||||
break;
|
||||
|
@ -735,7 +735,7 @@ void parse_ll_t::parse_error_unbalancing_token(parse_token_t token)
|
|||
case parse_keyword_case:
|
||||
this->parse_error(token, parse_error_unbalancing_case, L"'case' builtin not inside of switch block");
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
fprintf(stderr, "Unexpected token %ls passed to %s\n", token.describe().c_str(), __FUNCTION__);
|
||||
PARSER_DIE();
|
||||
|
@ -751,7 +751,7 @@ void parse_ll_t::parse_error_failed_production(struct parse_stack_element_t &sta
|
|||
if (this->should_generate_error_messages)
|
||||
{
|
||||
bool done = false;
|
||||
|
||||
|
||||
/* Check for || */
|
||||
if (token.type == parse_token_type_pipe && token.source_start > 0)
|
||||
{
|
||||
|
@ -764,7 +764,7 @@ void parse_ll_t::parse_error_failed_production(struct parse_stack_element_t &sta
|
|||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Check for && */
|
||||
if (! done && token.type == parse_token_type_background && token.source_start > 0)
|
||||
{
|
||||
|
@ -777,7 +777,7 @@ void parse_ll_t::parse_error_failed_production(struct parse_stack_element_t &sta
|
|||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (! done)
|
||||
{
|
||||
const wcstring expected = stack_elem.user_presentable_description();
|
||||
|
@ -901,17 +901,17 @@ bool parse_ll_t::top_node_handle_terminal_types(parse_token_t token)
|
|||
{
|
||||
// Keyword failure. We should unify this with the 'matched' computation above.
|
||||
assert(stack_top.keyword != parse_keyword_none && stack_top.keyword != token.keyword);
|
||||
|
||||
|
||||
// Check to see which keyword we got which was considered wrong
|
||||
switch (token.keyword)
|
||||
{
|
||||
// Some keywords are only valid in certain contexts. If this cascaded all the way down through the outermost job_list, it was not in a valid context.
|
||||
// Some keywords are only valid in certain contexts. If this cascaded all the way down through the outermost job_list, it was not in a valid context.
|
||||
case parse_keyword_case:
|
||||
case parse_keyword_end:
|
||||
case parse_keyword_else:
|
||||
this->parse_error_unbalancing_token(token);
|
||||
break;
|
||||
|
||||
|
||||
case parse_keyword_none:
|
||||
{
|
||||
// This is a random other string (not a keyword)
|
||||
|
@ -920,7 +920,7 @@ bool parse_ll_t::top_node_handle_terminal_types(parse_token_t token)
|
|||
break;
|
||||
}
|
||||
|
||||
|
||||
|
||||
default:
|
||||
{
|
||||
// Got a real keyword we can report
|
||||
|
@ -963,7 +963,7 @@ void parse_ll_t::accept_tokens(parse_token_t token1, parse_token_t token2)
|
|||
err_node.source_length = token1.source_length;
|
||||
nodes.push_back(err_node);
|
||||
consumed = true;
|
||||
|
||||
|
||||
/* tokenizer errors are fatal */
|
||||
if (token1.type == parse_special_type_tokenizer_error)
|
||||
this->fatal_errored = true;
|
||||
|
@ -999,22 +999,22 @@ void parse_ll_t::accept_tokens(parse_token_t token1, parse_token_t token2)
|
|||
else
|
||||
{
|
||||
bool is_terminate = (token1.type == parse_token_type_terminate);
|
||||
|
||||
|
||||
// When a job_list encounters something like 'else', it returns an empty production to return control to the outer block. But if it's unbalanced, then we'll end up with an empty stack! So make sure that doesn't happen. This is the primary mechanism by which we detect e.g. unbalanced end. However, if we get a true terminate token, then we allow (expect) this to empty the stack
|
||||
if (symbol_stack.size() == 1 && production_is_empty(production) && ! is_terminate)
|
||||
{
|
||||
this->parse_error_unbalancing_token(token1);
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// Manipulate the symbol stack.
|
||||
// Note that stack_elem is invalidated by popping the stack.
|
||||
symbol_stack_pop_push_production(production);
|
||||
|
||||
|
||||
// Expect to not have an empty stack, unless this was the terminate type
|
||||
// Note we may not have an empty stack with the terminate type (i.e. incomplete input)
|
||||
assert(is_terminate || ! symbol_stack.empty());
|
||||
|
||||
|
||||
if (symbol_stack.empty())
|
||||
{
|
||||
break;
|
||||
|
@ -1082,7 +1082,7 @@ static inline parse_token_t next_parse_token(tokenizer_t *tok)
|
|||
{
|
||||
return kTerminalToken;
|
||||
}
|
||||
|
||||
|
||||
token_type tok_type = static_cast<token_type>(tok_last_type(tok));
|
||||
int tok_start = tok_get_pos(tok);
|
||||
size_t tok_extent = tok_get_extent(tok);
|
||||
|
@ -1090,7 +1090,7 @@ static inline parse_token_t next_parse_token(tokenizer_t *tok)
|
|||
const wchar_t *tok_txt = tok_last(tok);
|
||||
|
||||
parse_token_t result;
|
||||
|
||||
|
||||
/* Set the type, keyword, and whether there's a dash prefix. Note that this is quite sketchy, because it ignores quotes. This is the historical behavior. For example, `builtin --names` lists builtins, but `builtin "--names"` attempts to run --names as a command. Amazingly as of this writing (10/12/13) nobody seems to have noticed this. Squint at it really hard and it even starts to look like a feature. */
|
||||
result.type = parse_token_type_from_tokenizer_token(tok_type);
|
||||
result.keyword = keyword_for_token(tok_type, tok_txt);
|
||||
|
@ -1098,7 +1098,7 @@ static inline parse_token_t next_parse_token(tokenizer_t *tok)
|
|||
result.is_help_argument = result.has_dash_prefix && is_help_argument(tok_txt);
|
||||
result.source_start = (size_t)tok_start;
|
||||
result.source_length = tok_extent;
|
||||
|
||||
|
||||
tok_next(tok);
|
||||
return result;
|
||||
}
|
||||
|
@ -1112,15 +1112,15 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
tok_flags_t tok_options = 0;
|
||||
if (parse_flags & parse_flag_include_comments)
|
||||
tok_options |= TOK_SHOW_COMMENTS;
|
||||
|
||||
|
||||
if (parse_flags & parse_flag_accept_incomplete_tokens)
|
||||
tok_options |= TOK_ACCEPT_UNFINISHED;
|
||||
|
||||
|
||||
if (errors == NULL)
|
||||
tok_options |= TOK_SQUASH_ERRORS;
|
||||
|
||||
|
||||
tokenizer_t tok = tokenizer_t(str.c_str(), tok_options);
|
||||
|
||||
|
||||
/* We are an LL(2) parser. We pass two tokens at a time. New tokens come in at index 1. Seed our queue with an initial token at index 1. */
|
||||
parse_token_t queue[2] = {kInvalidToken, kInvalidToken};
|
||||
|
||||
|
@ -1130,25 +1130,25 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
/* Push a new token onto the queue */
|
||||
queue[0] = queue[1];
|
||||
queue[1] = next_parse_token(&tok);
|
||||
|
||||
|
||||
/* If we are leaving things unterminated, then don't pass parse_token_type_terminate */
|
||||
if (queue[0].type == parse_token_type_terminate && (parse_flags & parse_flag_leave_unterminated))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
/* Pass these two tokens, unless we're still loading the queue. We know that queue[0] is valid; queue[1] may be invalid. */
|
||||
if (token_count > 0)
|
||||
{
|
||||
parser.accept_tokens(queue[0], queue[1]);
|
||||
}
|
||||
|
||||
|
||||
/* Handle tokenizer errors. This is a hack because really the parser should report this for itself; but it has no way of getting the tokenizer message */
|
||||
if (queue[1].type == parse_special_type_tokenizer_error)
|
||||
{
|
||||
parser.report_tokenizer_error(queue[1], tok_get_error(&tok), tok_last(&tok));
|
||||
}
|
||||
|
||||
|
||||
/* Handle errors */
|
||||
if (parser.has_fatal_error())
|
||||
{
|
||||
|
@ -1172,7 +1172,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
|
||||
// Teach each node where its source range is
|
||||
parser.determine_node_ranges();
|
||||
|
||||
|
||||
// Acquire the output from the parser
|
||||
parser.acquire_output(output, errors);
|
||||
|
||||
|
@ -1181,7 +1181,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
//fprintf(stderr, "Tree (%ld nodes):\n%ls", this->parser->nodes.size(), result.c_str());
|
||||
fprintf(stderr, "%lu nodes, node size %lu, %lu bytes\n", output->size(), sizeof(parse_node_t), output->size() * sizeof(parse_node_t));
|
||||
#endif
|
||||
|
||||
|
||||
// Indicate if we had a fatal error
|
||||
return ! parser.has_fatal_error();
|
||||
}
|
||||
|
@ -1189,7 +1189,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
const parse_node_t *parse_node_tree_t::get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type) const
|
||||
{
|
||||
const parse_node_t *result = NULL;
|
||||
|
||||
|
||||
/* We may get nodes with no children if we had an imcomplete parse. Don't consider than an error */
|
||||
if (parent.child_count > 0)
|
||||
{
|
||||
|
@ -1198,7 +1198,7 @@ const parse_node_t *parse_node_tree_t::get_child(const parse_node_t &parent, nod
|
|||
if (child_offset < this->size())
|
||||
{
|
||||
result = &this->at(child_offset);
|
||||
|
||||
|
||||
/* If we are given an expected type, then the node must be null or that type */
|
||||
assert(expected_type == token_type_invalid || expected_type == result->type);
|
||||
}
|
||||
|
@ -1321,7 +1321,7 @@ const parse_node_t *parse_node_tree_t::find_node_matching_source_location(parse_
|
|||
for (size_t idx=0; idx < len; idx++)
|
||||
{
|
||||
const parse_node_t &node = this->at(idx);
|
||||
|
||||
|
||||
/* Types must match */
|
||||
if (node.type != type)
|
||||
continue;
|
||||
|
@ -1329,11 +1329,11 @@ const parse_node_t *parse_node_tree_t::find_node_matching_source_location(parse_
|
|||
/* Must contain source location */
|
||||
if (! node.location_in_or_at_end_of_source_range(source_loc))
|
||||
continue;
|
||||
|
||||
|
||||
/* If a parent is given, it must be an ancestor */
|
||||
if (parent != NULL && node_has_ancestor(*this, node, *parent))
|
||||
continue;
|
||||
|
||||
|
||||
/* Found it */
|
||||
result = &node;
|
||||
break;
|
||||
|
@ -1390,7 +1390,7 @@ bool parse_node_tree_t::statement_is_in_pipeline(const parse_node_t &node, bool
|
|||
// This accepts a few statement types
|
||||
bool result = false;
|
||||
const parse_node_t *ancestor = &node;
|
||||
|
||||
|
||||
// If we're given a plain statement, try to get its decorated statement parent
|
||||
if (ancestor && ancestor->type == symbol_plain_statement)
|
||||
ancestor = this->get_parent(*ancestor, symbol_decorated_statement);
|
||||
|
@ -1398,7 +1398,7 @@ bool parse_node_tree_t::statement_is_in_pipeline(const parse_node_t &node, bool
|
|||
ancestor = this->get_parent(*ancestor, symbol_statement);
|
||||
if (ancestor)
|
||||
ancestor = this->get_parent(*ancestor);
|
||||
|
||||
|
||||
if (ancestor)
|
||||
{
|
||||
if (ancestor->type == symbol_job_continuation)
|
||||
|
@ -1413,7 +1413,7 @@ bool parse_node_tree_t::statement_is_in_pipeline(const parse_node_t &node, bool
|
|||
result = (continuation != NULL && continuation->child_count > 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -1423,7 +1423,7 @@ enum token_type parse_node_tree_t::type_for_redirection(const parse_node_t &redi
|
|||
enum token_type result = TOK_NONE;
|
||||
const parse_node_t *redirection_primitive = this->get_child(redirection_node, 0, parse_token_type_redirection); //like 2>
|
||||
const parse_node_t *redirection_target = this->get_child(redirection_node, 1, parse_token_type_string); //like &1 or file path
|
||||
|
||||
|
||||
if (redirection_primitive != NULL && redirection_primitive->has_source())
|
||||
{
|
||||
result = redirection_type_for_string(redirection_primitive->get_source(src), out_fd);
|
||||
|
@ -1453,10 +1453,10 @@ parse_node_tree_t::parse_node_list_t parse_node_tree_t::specific_statements_for_
|
|||
{
|
||||
assert(job.type == symbol_job);
|
||||
parse_node_list_t result;
|
||||
|
||||
|
||||
/* Initial statement (non-specific) */
|
||||
result.push_back(get_child(job, 0, symbol_statement));
|
||||
|
||||
|
||||
/* Our cursor variable. Walk over the list of continuations. */
|
||||
const parse_node_t *continuation = get_child(job, 1, symbol_job_continuation);
|
||||
while (continuation != NULL && continuation->child_count > 0)
|
||||
|
@ -1464,7 +1464,7 @@ parse_node_tree_t::parse_node_list_t parse_node_tree_t::specific_statements_for_
|
|||
result.push_back(get_child(*continuation, 1, symbol_statement));
|
||||
continuation = get_child(*continuation, 2, symbol_job_continuation);
|
||||
}
|
||||
|
||||
|
||||
/* Result now contains a list of statements. But we want a list of specific statements e.g. symbol_switch_statement. So replace them in-place in the vector. */
|
||||
for (size_t i=0; i < result.size(); i++)
|
||||
{
|
||||
|
@ -1472,25 +1472,25 @@ parse_node_tree_t::parse_node_list_t parse_node_tree_t::specific_statements_for_
|
|||
assert(statement->type == symbol_statement);
|
||||
result.at(i) = this->get_child(*statement, 0);
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const parse_node_t *parse_node_tree_t::next_node_in_node_list(const parse_node_t &node_list, parse_token_type_t entry_type, const parse_node_t **out_list_tail) const
|
||||
{
|
||||
parse_token_type_t list_type = node_list.type;
|
||||
|
||||
|
||||
/* Paranoia - it doesn't make sense for a list type to contain itself */
|
||||
assert(list_type != entry_type);
|
||||
|
||||
|
||||
const parse_node_t *list_cursor = &node_list;
|
||||
const parse_node_t *list_entry = NULL;
|
||||
|
||||
|
||||
/* Loop while we don't have an item but do have a list. Note that not every node in the list may contain an in item that we care about - e.g. job_list contains blank lines as a production */
|
||||
while (list_entry == NULL && list_cursor != NULL)
|
||||
{
|
||||
const parse_node_t *next_cursor = NULL;
|
||||
|
||||
|
||||
/* Walk through the children */
|
||||
for (size_t i=0; i < list_cursor->child_count; i++)
|
||||
{
|
||||
|
@ -1509,7 +1509,7 @@ const parse_node_t *parse_node_tree_t::next_node_in_node_list(const parse_node_t
|
|||
/* Go to the next entry, even if it's NULL */
|
||||
list_cursor = next_cursor;
|
||||
}
|
||||
|
||||
|
||||
/* Return what we got */
|
||||
assert(list_cursor == NULL || list_cursor->type == list_type);
|
||||
assert(list_entry == NULL || list_entry->type == entry_type);
|
||||
|
|
52
parse_tree.h
52
parse_tree.h
|
@ -25,7 +25,7 @@ struct parse_error_t
|
|||
{
|
||||
/** Text of the error */
|
||||
wcstring text;
|
||||
|
||||
|
||||
/** Code for the error */
|
||||
enum parse_error_code_t code;
|
||||
|
||||
|
@ -62,16 +62,16 @@ enum
|
|||
|
||||
/* Attempt to build a "parse tree" no matter what. This may result in a 'forest' of disconnected trees. This is intended to be used by syntax highlighting. */
|
||||
parse_flag_continue_after_error = 1 << 0,
|
||||
|
||||
|
||||
/* Include comment tokens */
|
||||
parse_flag_include_comments = 1 << 1,
|
||||
|
||||
|
||||
/* Indicate that the tokenizer should accept incomplete tokens */
|
||||
parse_flag_accept_incomplete_tokens = 1 << 2,
|
||||
|
||||
|
||||
/* Indicate that the parser should not generate the terminate token, allowing an 'unfinished' tree where some nodes may have no productions. */
|
||||
parse_flag_leave_unterminated = 1 << 3
|
||||
|
||||
|
||||
};
|
||||
typedef unsigned int parse_tree_flags_t;
|
||||
|
||||
|
@ -93,7 +93,7 @@ public:
|
|||
|
||||
/* Length of our range in the source code */
|
||||
size_t source_length;
|
||||
|
||||
|
||||
/* Parent */
|
||||
node_offset_t parent;
|
||||
|
||||
|
@ -123,7 +123,7 @@ public:
|
|||
{
|
||||
return source_start != (size_t)(-1);
|
||||
}
|
||||
|
||||
|
||||
/* Gets source for the node, or the empty string if it has no source */
|
||||
wcstring get_source(const wcstring &str) const
|
||||
{
|
||||
|
@ -132,7 +132,7 @@ public:
|
|||
else
|
||||
return wcstring(str, this->source_start, this->source_length);
|
||||
}
|
||||
|
||||
|
||||
/* Returns whether the given location is within the source range or at its end */
|
||||
bool location_in_or_at_end_of_source_range(size_t loc) const
|
||||
{
|
||||
|
@ -149,50 +149,50 @@ public:
|
|||
/* Get the node corresponding to a child of the given node, or NULL if there is no such child. If expected_type is provided, assert that the node has that type.
|
||||
*/
|
||||
const parse_node_t *get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type = token_type_invalid) const;
|
||||
|
||||
|
||||
/* Find the first direct child of the given node of the given type. asserts on failure
|
||||
*/
|
||||
const parse_node_t &find_child(const parse_node_t &parent, parse_token_type_t type) const;
|
||||
|
||||
|
||||
/* Get the node corresponding to the parent of the given node, or NULL if there is no such child. If expected_type is provided, only returns the parent if it is of that type. Note the asymmetry: get_child asserts since the children are known, but get_parent does not, since the parent may not be known. */
|
||||
const parse_node_t *get_parent(const parse_node_t &node, parse_token_type_t expected_type = token_type_invalid) const;
|
||||
|
||||
|
||||
/* Returns the first ancestor of the given type, or NULL. */
|
||||
const parse_node_t *get_first_ancestor_of_type(const parse_node_t &node, parse_token_type_t desired_type) const;
|
||||
|
||||
/* Find all the nodes of a given type underneath a given node, up to max_count of them */
|
||||
typedef std::vector<const parse_node_t *> parse_node_list_t;
|
||||
parse_node_list_t find_nodes(const parse_node_t &parent, parse_token_type_t type, size_t max_count = (size_t)(-1)) const;
|
||||
|
||||
|
||||
/* Finds the last node of a given type underneath a given node, or NULL if it could not be found. If parent is NULL, this finds the last node in the tree of that type. */
|
||||
const parse_node_t *find_last_node_of_type(parse_token_type_t type, const parse_node_t *parent = NULL) const;
|
||||
|
||||
|
||||
/* Finds a node containing the given source location. If 'parent' is not NULL, it must be an ancestor. */
|
||||
const parse_node_t *find_node_matching_source_location(parse_token_type_t type, size_t source_loc, const parse_node_t *parent) const;
|
||||
|
||||
|
||||
/* Indicate if the given argument_list or arguments_or_redirections_list is a root list, or has a parent */
|
||||
bool argument_list_is_root(const parse_node_t &node) const;
|
||||
|
||||
|
||||
/* Utilities */
|
||||
|
||||
|
||||
/* Given a plain statement, get the decoration (from the parent node), or none if there is no decoration */
|
||||
enum parse_statement_decoration_t decoration_for_plain_statement(const parse_node_t &node) const;
|
||||
|
||||
|
||||
/* Given a plain statement, get the command by reference (from the child node). Returns true if successful. Clears the command on failure. */
|
||||
bool command_for_plain_statement(const parse_node_t &node, const wcstring &src, wcstring *out_cmd) const;
|
||||
|
||||
|
||||
/* Given a plain statement, return true if the statement is part of a pipeline. If include_first is set, the first command in a pipeline is considered part of it; otherwise only the second or additional commands are */
|
||||
bool statement_is_in_pipeline(const parse_node_t &node, bool include_first) const;
|
||||
|
||||
|
||||
/* Given a redirection, get the redirection type (or TOK_NONE) and target (file path, or fd) */
|
||||
enum token_type type_for_redirection(const parse_node_t &node, const wcstring &src, int *out_fd, wcstring *out_target) const;
|
||||
|
||||
|
||||
/* If the given node is a block statement, returns the header node (for_header, while_header, begin_header, or function_header). Otherwise returns NULL */
|
||||
const parse_node_t *header_node_for_block_statement(const parse_node_t &node) const;
|
||||
|
||||
|
||||
/* Given a node list (e.g. of type symbol_job_list) and a node type (e.g. symbol_job), return the next element of the given type in that list, and the tail (by reference). Returns NULL if we've exhausted the list. */
|
||||
const parse_node_t *next_node_in_node_list(const parse_node_t &node_list, parse_token_type_t item_type, const parse_node_t **list_tail) const;
|
||||
|
||||
|
||||
/* Given a job, return all of its statements. These are 'specific statements' (e.g. symbol_decorated_statement, not symbol_statement) */
|
||||
parse_node_list_t specific_statements_for_job(const parse_node_t &job) const;
|
||||
};
|
||||
|
@ -231,7 +231,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t flags, parse
|
|||
case_item_list = <empty> |
|
||||
case_item case_item_list |
|
||||
<TOK_END> case_item_list
|
||||
|
||||
|
||||
case_item = CASE argument_list STATEMENT_TERMINATOR job_list
|
||||
|
||||
block_statement = block_header <TOK_END> job_list end_command arguments_or_redirections_list
|
||||
|
@ -239,7 +239,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t flags, parse
|
|||
for_header = FOR var_name IN argument_list
|
||||
while_header = WHILE job
|
||||
begin_header = BEGIN
|
||||
|
||||
|
||||
# Functions take arguments, and require at least one (the name). No redirections allowed.
|
||||
function_header = FUNCTION argument argument_list
|
||||
|
||||
|
@ -258,13 +258,13 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t flags, parse
|
|||
argument_or_redirection arguments_or_redirections_list
|
||||
argument_or_redirection = argument | redirection
|
||||
argument = <TOK_STRING>
|
||||
|
||||
|
||||
redirection = <TOK_REDIRECTION> <TOK_STRING>
|
||||
|
||||
terminator = <TOK_END> | <TOK_BACKGROUND>
|
||||
|
||||
optional_background = <empty> | <TOK_BACKGROUND>
|
||||
|
||||
|
||||
end_command = END
|
||||
|
||||
*/
|
||||
|
|
|
@ -241,11 +241,11 @@ int parse_util_locate_cmdsubst_range(const wcstring &str, size_t *inout_cursor_o
|
|||
out_contents->clear();
|
||||
*out_start = 0;
|
||||
*out_end = str.size();
|
||||
|
||||
|
||||
/* Nothing to do if the offset is at or past the end of the string. */
|
||||
if (*inout_cursor_offset >= str.size())
|
||||
return 0;
|
||||
|
||||
|
||||
/* Defer to the wonky version */
|
||||
const wchar_t * const buff = str.c_str();
|
||||
const wchar_t * const valid_range_start = buff + *inout_cursor_offset, *valid_range_end = buff + str.size();
|
||||
|
@ -256,15 +256,15 @@ int parse_util_locate_cmdsubst_range(const wcstring &str, size_t *inout_cursor_o
|
|||
/* The command substitutions must not be NULL and must be in the valid pointer range, and the end must be bigger than the beginning */
|
||||
assert(cmdsub_begin != NULL && cmdsub_begin >= valid_range_start && cmdsub_begin <= valid_range_end);
|
||||
assert(cmdsub_end != NULL && cmdsub_end > cmdsub_begin && cmdsub_end >= valid_range_start && cmdsub_end <= valid_range_end);
|
||||
|
||||
|
||||
/* Assign the substring to the out_contents */
|
||||
const wchar_t *interior_begin = cmdsub_begin + 1;
|
||||
out_contents->assign(interior_begin, cmdsub_end - interior_begin);
|
||||
|
||||
|
||||
/* Return the start and end */
|
||||
*out_start = cmdsub_begin - buff;
|
||||
*out_end = cmdsub_end - buff;
|
||||
|
||||
|
||||
/* Update the inout_cursor_offset. Note this may cause it to exceed str.size(), though overflow is not likely */
|
||||
*inout_cursor_offset = 1 + *out_end;
|
||||
}
|
||||
|
@ -803,9 +803,9 @@ wcstring parse_util_escape_string_with_quote(const wcstring &cmd, wchar_t quote)
|
|||
/* We are given a parse tree, the index of a node within the tree, its indent, and a vector of indents the same size as the original source string. Set the indent correspdonding to the node's source range, if appropriate.
|
||||
|
||||
trailing_indent is the indent for nodes with unrealized source, i.e. if I type 'if false <ret>' then we have an if node with an empty job list (without source) but we want the last line to be indented anyways.
|
||||
|
||||
|
||||
switch statements also indent.
|
||||
|
||||
|
||||
max_visited_node_idx is the largest index we visited.
|
||||
*/
|
||||
static void compute_indents_recursive(const parse_node_tree_t &tree, node_offset_t node_idx, int node_indent, parse_token_type_t parent_type, std::vector<int> *indents, int *trailing_indent, node_offset_t *max_visited_node_idx)
|
||||
|
@ -813,16 +813,16 @@ static void compute_indents_recursive(const parse_node_tree_t &tree, node_offset
|
|||
/* Guard against incomplete trees */
|
||||
if (node_idx > tree.size())
|
||||
return;
|
||||
|
||||
|
||||
/* Update max_visited_node_idx */
|
||||
if (node_idx > *max_visited_node_idx)
|
||||
*max_visited_node_idx = node_idx;
|
||||
|
||||
/* We could implement this by utilizing the fish grammar. But there's an easy trick instead: almost everything that wraps a job list should be indented by 1. So just find all of the job lists. One exception is switch; the other exception is job_list itself: a job_list is a job and a job_list, and we want that child list to be indented the same as the parent. So just find all job_lists whose parent is not a job_list, and increment their indent by 1. */
|
||||
|
||||
|
||||
const parse_node_t &node = tree.at(node_idx);
|
||||
const parse_token_type_t node_type = node.type;
|
||||
|
||||
|
||||
/* Increment the indent if we are either a root job_list, or root case_item_list */
|
||||
const bool is_root_job_list = (node_type == symbol_job_list && parent_type != symbol_job_list);
|
||||
const bool is_root_case_item_list = (node_type == symbol_case_item_list && parent_type != symbol_case_item_list);
|
||||
|
@ -830,22 +830,22 @@ static void compute_indents_recursive(const parse_node_tree_t &tree, node_offset
|
|||
{
|
||||
node_indent += 1;
|
||||
}
|
||||
|
||||
|
||||
/* If we have source, store the trailing indent unconditionally. If we do not have source, store the trailing indent only if ours is bigger; this prevents the trailing "run" of terminal job lists from affecting the trailing indent. For example, code like this:
|
||||
|
||||
|
||||
if foo
|
||||
|
||||
|
||||
will be parsed as this:
|
||||
|
||||
|
||||
job_list
|
||||
job
|
||||
if_statement
|
||||
job [if]
|
||||
job_list [empty]
|
||||
job_list [empty]
|
||||
|
||||
|
||||
There's two "terminal" job lists, and we want the innermost one.
|
||||
|
||||
|
||||
Note we are relying on the fact that nodes are in the same order as the source, i.e. an in-order traversal of the node tree also traverses the source from beginning to end.
|
||||
*/
|
||||
if (node.has_source() || node_indent > *trailing_indent)
|
||||
|
@ -853,7 +853,7 @@ static void compute_indents_recursive(const parse_node_tree_t &tree, node_offset
|
|||
*trailing_indent = node_indent;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* Store the indent into the indent array */
|
||||
if (node.has_source())
|
||||
{
|
||||
|
@ -861,7 +861,7 @@ static void compute_indents_recursive(const parse_node_tree_t &tree, node_offset
|
|||
indents->at(node.source_start) = node_indent;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* Recursive to all our children */
|
||||
for (node_offset_t idx = 0; idx < node.child_count; idx++)
|
||||
{
|
||||
|
@ -875,31 +875,31 @@ std::vector<int> parse_util_compute_indents(const wcstring &src)
|
|||
/* Make a vector the same size as the input string, which contains the indents. Initialize them to -1. */
|
||||
const size_t src_size = src.size();
|
||||
std::vector<int> indents(src_size, -1);
|
||||
|
||||
|
||||
/* Parse the string. We pass continue_after_error to produce a forest; the trailing indent of the last node we visited becomes the input indent of the next. I.e. in the case of 'switch foo ; cas', we get an invalid parse tree (since 'cas' is not valid) but we indent it as if it were a case item list */
|
||||
parse_node_tree_t tree;
|
||||
parse_tree_from_string(src, parse_flag_continue_after_error | parse_flag_accept_incomplete_tokens, &tree, NULL /* errors */);
|
||||
|
||||
|
||||
/* Start indenting at the first node. If we have a parse error, we'll have to start indenting from the top again */
|
||||
node_offset_t start_node_idx = 0;
|
||||
int last_trailing_indent = 0;
|
||||
|
||||
|
||||
while (start_node_idx < tree.size())
|
||||
{
|
||||
/* The indent that we'll get for the last line */
|
||||
int trailing_indent = 0;
|
||||
|
||||
|
||||
/* Biggest offset we visited */
|
||||
node_offset_t max_visited_node_idx = 0;
|
||||
|
||||
|
||||
/* Invoke the recursive version. As a hack, pass job_list for the 'parent' token type, which will prevent the really-root job list from indenting */
|
||||
compute_indents_recursive(tree, start_node_idx, last_trailing_indent, symbol_job_list, &indents, &trailing_indent, &max_visited_node_idx);
|
||||
|
||||
|
||||
/* We may have more to indent. The trailing indent becomes our current indent. Start at the node after the last we visited. */
|
||||
last_trailing_indent = trailing_indent;
|
||||
start_node_idx = max_visited_node_idx + 1;
|
||||
}
|
||||
|
||||
|
||||
int last_indent = 0;
|
||||
for (size_t i=0; i<src_size; i++)
|
||||
{
|
||||
|
@ -931,7 +931,7 @@ std::vector<int> parse_util_compute_indents(const wcstring &src)
|
|||
break;
|
||||
indents.at(suffix_idx) = last_trailing_indent;
|
||||
}
|
||||
|
||||
|
||||
return indents;
|
||||
}
|
||||
|
||||
|
@ -942,12 +942,12 @@ static bool append_syntax_error(parse_error_list_t *errors, const parse_node_t &
|
|||
error.source_start = node.source_start;
|
||||
error.source_length = node.source_length;
|
||||
error.code = parse_error_syntax;
|
||||
|
||||
|
||||
va_list va;
|
||||
va_start(va, fmt);
|
||||
error.text = vformat_string(fmt, va);
|
||||
va_end(va);
|
||||
|
||||
|
||||
errors->push_back(error);
|
||||
return true;
|
||||
}
|
||||
|
@ -984,14 +984,14 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
{
|
||||
parse_node_tree_t node_tree;
|
||||
parse_error_list_t parse_errors;
|
||||
|
||||
|
||||
// Whether we encountered a parse error
|
||||
bool errored = false;
|
||||
|
||||
|
||||
// Whether we encountered an unclosed block
|
||||
// We detect this via an 'end_command' block without source
|
||||
bool has_unclosed_block = false;
|
||||
|
||||
|
||||
// Whether there's an unclosed quote, and therefore unfinished
|
||||
bool has_unclosed_quote = false;
|
||||
|
||||
|
@ -1017,12 +1017,12 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
{
|
||||
errored = true;
|
||||
}
|
||||
|
||||
|
||||
// Expand all commands
|
||||
// Verify 'or' and 'and' not used inside pipelines
|
||||
// Verify pipes via parser_is_pipe_forbidden
|
||||
// Verify return only within a function
|
||||
|
||||
|
||||
if (! errored)
|
||||
{
|
||||
const size_t node_tree_size = node_tree.size();
|
||||
|
@ -1054,7 +1054,7 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
{
|
||||
errored = append_syntax_error(&parse_errors, node, ILLEGAL_CMD_ERR_MSG, command.c_str());
|
||||
}
|
||||
|
||||
|
||||
// Check that pipes are sound
|
||||
if (! errored && parser_is_pipe_forbidden(command))
|
||||
{
|
||||
|
@ -1064,7 +1064,7 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
errored = append_syntax_error(&parse_errors, node, EXEC_ERR_MSG, command.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check that we don't return from outside a function
|
||||
// But we allow it if it's 'return --help'
|
||||
if (! errored && command == L"return")
|
||||
|
@ -1087,7 +1087,7 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
errored = append_syntax_error(&parse_errors, node, INVALID_RETURN_ERR_MSG);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check that we don't break or continue from outside a loop
|
||||
if (! errored && (command == L"break" || command == L"continue"))
|
||||
{
|
||||
|
@ -1108,13 +1108,13 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
found_loop = true;
|
||||
end_search = true;
|
||||
break;
|
||||
|
||||
|
||||
case symbol_function_header:
|
||||
// this is a function header, so we cannot break or continue. We stop our search here.
|
||||
found_loop = false;
|
||||
end_search = true;
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
// most likely begin / end style block, which makes no difference
|
||||
break;
|
||||
|
@ -1122,7 +1122,7 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
}
|
||||
ancestor = node_tree.get_parent(*ancestor);
|
||||
}
|
||||
|
||||
|
||||
if (! found_loop && ! first_argument_is_help(node_tree, node, buff_src))
|
||||
{
|
||||
errored = append_syntax_error(&parse_errors, node, (command == L"break" ? INVALID_BREAK_ERR_MSG : INVALID_CONTINUE_ERR_MSG));
|
||||
|
@ -1134,13 +1134,13 @@ parser_test_error_bits_t parse_util_detect_errors(const wcstring &buff_src, pars
|
|||
}
|
||||
|
||||
parser_test_error_bits_t res = 0;
|
||||
|
||||
|
||||
if (errored)
|
||||
res |= PARSER_TEST_ERROR;
|
||||
|
||||
if (has_unclosed_block || has_unclosed_quote)
|
||||
res |= PARSER_TEST_INCOMPLETE;
|
||||
|
||||
|
||||
if (out_errors)
|
||||
{
|
||||
out_errors->swap(parse_errors);
|
||||
|
|
10
parse_util.h
10
parse_util.h
|
@ -41,11 +41,11 @@ int parse_util_locate_cmdsubst(const wchar_t *in,
|
|||
*/
|
||||
|
||||
int parse_util_locate_cmdsubst_range(const wcstring &str,
|
||||
size_t *inout_cursor_offset,
|
||||
wcstring *out_contents,
|
||||
size_t *out_start,
|
||||
size_t *out_end,
|
||||
bool accept_incomplete);
|
||||
size_t *inout_cursor_offset,
|
||||
wcstring *out_contents,
|
||||
size_t *out_start,
|
||||
size_t *out_end,
|
||||
bool accept_incomplete);
|
||||
|
||||
/**
|
||||
Find the beginning and end of the command substitution under the
|
||||
|
|
48
parser.cpp
48
parser.cpp
|
@ -372,7 +372,7 @@ void parser_t::push_block(block_t *new_current)
|
|||
|
||||
new_current->job = 0;
|
||||
new_current->loop_status=LOOP_NORMAL;
|
||||
|
||||
|
||||
this->block_stack.push_back(new_current);
|
||||
|
||||
if ((new_current->type() != FUNCTION_DEF) &&
|
||||
|
@ -553,7 +553,7 @@ void parser_t::error(int ec, size_t p, const wchar_t *str, ...)
|
|||
CHECK(str,);
|
||||
|
||||
error_code = ec;
|
||||
|
||||
|
||||
// note : p may be -1
|
||||
err_pos = static_cast<int>(p);
|
||||
|
||||
|
@ -802,7 +802,7 @@ void parser_t::stack_trace(size_t block_idx, wcstring &buff) const
|
|||
*/
|
||||
if (block_idx >= this->block_count())
|
||||
return;
|
||||
|
||||
|
||||
const block_t *b = this->block_at_index(block_idx);
|
||||
|
||||
if (b->type()==EVENT)
|
||||
|
@ -968,7 +968,7 @@ const wchar_t *parser_t::current_filename() const
|
|||
return function_get_definition_file(fb->name);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* We query a global array for the current file name, but only do that if we are the principal parser */
|
||||
if (this == &principal_parser())
|
||||
{
|
||||
|
@ -1193,7 +1193,7 @@ void parser_t::job_promote(job_t *job)
|
|||
{
|
||||
signal_block();
|
||||
|
||||
job_list_t::iterator loc = std::find(my_job_list.begin(), my_job_list.end(), job);
|
||||
job_list_t::iterator loc = std::find(my_job_list.begin(), my_job_list.end(), job);
|
||||
assert(loc != my_job_list.end());
|
||||
|
||||
/* Move the job to the beginning */
|
||||
|
@ -2557,32 +2557,32 @@ void parser_t::eval_job(tokenizer_t *tok)
|
|||
int parser_t::eval_new_parser(const wcstring &cmd, const io_chain_t &io, enum block_type_t block_type)
|
||||
{
|
||||
CHECK_BLOCK(1);
|
||||
|
||||
|
||||
if (block_type != TOP && block_type != SUBST)
|
||||
{
|
||||
debug(1, INVALID_SCOPE_ERR_MSG, parser_t::get_block_desc(block_type));
|
||||
bugreport();
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/* Parse the source into a tree, if we can */
|
||||
parse_node_tree_t tree;
|
||||
if (! parse_tree_from_string(cmd, parse_flag_none, &tree, NULL))
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/* Append to the execution context stack */
|
||||
parse_execution_context_t *ctx = new parse_execution_context_t(tree, cmd, this);
|
||||
execution_contexts.push_back(ctx);
|
||||
|
||||
|
||||
/* Execute the first node */
|
||||
int result = 1;
|
||||
if (! tree.empty())
|
||||
{
|
||||
result = this->eval_block_node(0, io, block_type);
|
||||
}
|
||||
|
||||
|
||||
/* Clean up the execution context stack */
|
||||
assert(! execution_contexts.empty() && execution_contexts.back() == ctx);
|
||||
execution_contexts.pop_back();
|
||||
|
@ -2596,9 +2596,9 @@ int parser_t::eval_block_node(node_offset_t node_idx, const io_chain_t &io, enum
|
|||
// Paranoia. It's a little frightening that we're given only a node_idx and we interpret this in the topmost execution context's tree. What happens if these were to be interleaved? Fortunately that cannot happen.
|
||||
parse_execution_context_t *ctx = execution_contexts.back();
|
||||
assert(ctx != NULL);
|
||||
|
||||
|
||||
CHECK_BLOCK(1);
|
||||
|
||||
|
||||
/* Handle cancellation requests. If our block stack is currently empty, then we already did successfully cancel (or there was nothing to cancel); clear the flag. If our block stack is not empty, we are still in the process of cancelling; refuse to evaluate anything */
|
||||
if (this->cancellation_requested)
|
||||
{
|
||||
|
@ -2611,7 +2611,7 @@ int parser_t::eval_block_node(node_offset_t node_idx, const io_chain_t &io, enum
|
|||
this->cancellation_requested = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Only certain blocks are allowed */
|
||||
if ((block_type != TOP) &&
|
||||
(block_type != SUBST))
|
||||
|
@ -2622,16 +2622,16 @@ int parser_t::eval_block_node(node_offset_t node_idx, const io_chain_t &io, enum
|
|||
bugreport();
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/* Not sure why we reap jobs here */
|
||||
job_reap(0);
|
||||
|
||||
|
||||
/* Start it up */
|
||||
const block_t * const start_current_block = current_block();
|
||||
block_t *scope_block = new scope_block_t(block_type);
|
||||
this->push_block(scope_block);
|
||||
int result = ctx->eval_node_at_offset(node_idx, scope_block, io);
|
||||
|
||||
|
||||
/* Clean up the block stack */
|
||||
this->pop_block();
|
||||
while (start_current_block != current_block())
|
||||
|
@ -2646,10 +2646,10 @@ int parser_t::eval_block_node(node_offset_t node_idx, const io_chain_t &io, enum
|
|||
}
|
||||
this->pop_block();
|
||||
}
|
||||
|
||||
|
||||
/* Reap again */
|
||||
job_reap(0);
|
||||
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
@ -2659,7 +2659,7 @@ int parser_t::eval(const wcstring &cmd_str, const io_chain_t &io, enum block_typ
|
|||
|
||||
if (parser_use_ast())
|
||||
return this->eval_new_parser(cmd_str, io, block_type);
|
||||
|
||||
|
||||
const wchar_t * const cmd = cmd_str.c_str();
|
||||
size_t forbid_count;
|
||||
int code;
|
||||
|
@ -2993,11 +2993,11 @@ void parser_t::get_backtrace(const wcstring &src, const parse_error_list_t &erro
|
|||
if (! errors.empty())
|
||||
{
|
||||
const parse_error_t &err = errors.at(0);
|
||||
|
||||
|
||||
// Determine which line we're on
|
||||
assert(err.source_start <= src.size());
|
||||
size_t which_line = 1 + std::count(src.begin(), src.begin() + err.source_start, L'\n');
|
||||
|
||||
|
||||
const wchar_t *filename = this->current_filename();
|
||||
if (filename)
|
||||
{
|
||||
|
@ -3007,13 +3007,13 @@ void parser_t::get_backtrace(const wcstring &src, const parse_error_list_t &erro
|
|||
{
|
||||
output->append(L"fish: ");
|
||||
}
|
||||
|
||||
|
||||
// Don't include the caret if we're interactive, this is the first line of text, and our source is at its beginning, because then it's obvious
|
||||
bool skip_caret = (get_is_interactive() && which_line == 1 && err.source_start == 0);
|
||||
|
||||
|
||||
output->append(err.describe(src, skip_caret));
|
||||
output->push_back(L'\n');
|
||||
|
||||
|
||||
this->stack_trace(0, *output);
|
||||
}
|
||||
}
|
||||
|
|
24
parser.h
24
parser.h
|
@ -95,7 +95,7 @@ public:
|
|||
bool skip; /**< Whether execution of the commands in this block should be skipped */
|
||||
bool had_command; /**< Set to non-zero once a command has been executed in this block */
|
||||
int tok_pos; /**< The start index of the block */
|
||||
|
||||
|
||||
node_offset_t node_offset; /* Offset of the node */
|
||||
|
||||
/** Status for the current loop block. Can be any of the values from the loop_status enum. */
|
||||
|
@ -286,10 +286,10 @@ private:
|
|||
|
||||
/** Position of last error */
|
||||
int err_pos;
|
||||
|
||||
|
||||
/** Indication that we should skip all blocks */
|
||||
bool cancellation_requested;
|
||||
|
||||
|
||||
/** Stack of execution contexts. We own these pointers and must delete them */
|
||||
std::vector<parse_execution_context_t *> execution_contexts;
|
||||
|
||||
|
@ -313,7 +313,7 @@ private:
|
|||
|
||||
/** The jobs associated with this parser */
|
||||
job_list_t my_job_list;
|
||||
|
||||
|
||||
/** The list of blocks, allocated with new. It's our responsibility to delete these */
|
||||
std::vector<block_t *> block_stack;
|
||||
|
||||
|
@ -327,7 +327,7 @@ private:
|
|||
/* No copying allowed */
|
||||
parser_t(const parser_t&);
|
||||
parser_t& operator=(const parser_t&);
|
||||
|
||||
|
||||
|
||||
void parse_job_argument_list(process_t *p, job_t *j, tokenizer_t *tok, std::vector<completion_t>&, bool);
|
||||
int parse_job(process_t *p, job_t *j, tokenizer_t *tok);
|
||||
|
@ -339,7 +339,7 @@ private:
|
|||
|
||||
/** Create a job */
|
||||
job_t *job_create(const io_chain_t &io);
|
||||
|
||||
|
||||
/** Adds a job to the beginning of the job list. */
|
||||
void job_add(job_t *job);
|
||||
|
||||
|
@ -382,10 +382,10 @@ public:
|
|||
*/
|
||||
int eval(const wcstring &cmd_str, const io_chain_t &io, enum block_type_t block_type);
|
||||
int eval_new_parser(const wcstring &cmd, const io_chain_t &io, enum block_type_t block_type);
|
||||
|
||||
|
||||
/** Evaluates a block node at the given node offset in the topmost execution context */
|
||||
int eval_block_node(node_offset_t node_idx, const io_chain_t &io, enum block_type_t block_type);
|
||||
|
||||
|
||||
/**
|
||||
Evaluate line as a list of parameters, i.e. tokenize it and perform parameter expansion and cmdsubst execution on the tokens.
|
||||
The output is inserted into output.
|
||||
|
@ -430,15 +430,15 @@ public:
|
|||
|
||||
/** Set the current position in the latest string of the tokenizer. */
|
||||
void set_pos(int p);
|
||||
|
||||
|
||||
/** Returns the block at the given index. 0 corresponds to the innermost block. Returns NULL when idx is at or equal to the number of blocks. */
|
||||
const block_t *block_at_index(size_t idx) const;
|
||||
block_t *block_at_index(size_t idx);
|
||||
|
||||
|
||||
/** Returns the current (innermost) block */
|
||||
const block_t *current_block() const;
|
||||
block_t *current_block();
|
||||
|
||||
|
||||
/** Count of blocks */
|
||||
size_t block_count() const
|
||||
{
|
||||
|
@ -459,7 +459,7 @@ public:
|
|||
|
||||
/** Remove the outermost block namespace */
|
||||
void pop_block();
|
||||
|
||||
|
||||
/** Remove the outermost block, asserting it's the given one */
|
||||
void pop_block(const block_t *b);
|
||||
|
||||
|
|
2
proc.cpp
2
proc.cpp
|
@ -640,7 +640,7 @@ int job_reap(bool interactive)
|
|||
static int locked = 0;
|
||||
|
||||
locked++;
|
||||
|
||||
|
||||
/* Preserve the exit status */
|
||||
const int saved_status = proc_get_last_status();
|
||||
|
||||
|
|
6
proc.h
6
proc.h
|
@ -73,10 +73,10 @@ enum process_type_t
|
|||
A block of commands
|
||||
*/
|
||||
INTERNAL_BLOCK,
|
||||
|
||||
|
||||
/** A block of commands, represented as a node */
|
||||
INTERNAL_BLOCK_NODE,
|
||||
|
||||
|
||||
/**
|
||||
The exec builtin
|
||||
*/
|
||||
|
@ -156,7 +156,7 @@ public:
|
|||
INTERNAL_EXEC, or INTERNAL_BUFFER
|
||||
*/
|
||||
enum process_type_t type;
|
||||
|
||||
|
||||
/* For internal block processes only, the node offset of the block */
|
||||
node_offset_t internal_block_node;
|
||||
|
||||
|
|
22
reader.cpp
22
reader.cpp
|
@ -658,31 +658,31 @@ bool reader_expand_abbreviation_in_command(const wcstring &cmdline, size_t curso
|
|||
|
||||
const wcstring subcmd = wcstring(cmdsub_begin, cmdsub_end - cmdsub_begin);
|
||||
const size_t subcmd_cursor_pos = cursor_pos - subcmd_offset;
|
||||
|
||||
|
||||
/* Parse this subcmd */
|
||||
parse_node_tree_t parse_tree;
|
||||
parse_tree_from_string(subcmd, parse_flag_continue_after_error | parse_flag_accept_incomplete_tokens, &parse_tree, NULL);
|
||||
|
||||
|
||||
/* Look for plain statements where the cursor is at the end of the command */
|
||||
const parse_node_t *matching_cmd_node = NULL;
|
||||
const size_t len = parse_tree.size();
|
||||
for (size_t i=0; i < len; i++)
|
||||
{
|
||||
const parse_node_t &node = parse_tree.at(i);
|
||||
|
||||
|
||||
/* Only interested in plain statements with source */
|
||||
if (node.type != symbol_plain_statement || ! node.has_source())
|
||||
continue;
|
||||
|
||||
|
||||
/* Skip decorated statements */
|
||||
if (parse_tree.decoration_for_plain_statement(node) != parse_statement_decoration_none)
|
||||
continue;
|
||||
|
||||
|
||||
/* Get the command node. Skip it if we can't or it has no source */
|
||||
const parse_node_t *cmd_node = parse_tree.get_child(node, 0, parse_token_type_string);
|
||||
if (cmd_node == NULL || ! cmd_node->has_source())
|
||||
continue;
|
||||
|
||||
|
||||
/* Now see if its source range contains our cursor, including at the end */
|
||||
if (subcmd_cursor_pos >= cmd_node->source_start && subcmd_cursor_pos <= cmd_node->source_start + cmd_node->source_length)
|
||||
{
|
||||
|
@ -691,7 +691,7 @@ bool reader_expand_abbreviation_in_command(const wcstring &cmdline, size_t curso
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Now if we found a command node, expand it */
|
||||
bool result = false;
|
||||
if (matching_cmd_node != NULL)
|
||||
|
@ -2468,10 +2468,10 @@ int reader_shell_test(const wchar_t *b)
|
|||
{
|
||||
assert(b != NULL);
|
||||
wcstring bstr = b;
|
||||
|
||||
|
||||
/* Append a newline, to act as a statement terminator */
|
||||
bstr.push_back(L'\n');
|
||||
|
||||
|
||||
parse_error_list_t errors;
|
||||
int res = parse_util_detect_errors(bstr, &errors);
|
||||
|
||||
|
@ -2479,7 +2479,7 @@ int reader_shell_test(const wchar_t *b)
|
|||
{
|
||||
wcstring error_desc;
|
||||
parser_t::principal_parser().get_backtrace(bstr, errors, &error_desc);
|
||||
|
||||
|
||||
// ensure we end with a newline. Also add an initial newline, because it's likely the user just hit enter and so there's junk on the current line
|
||||
if (! string_suffixes_string(L"\n", error_desc))
|
||||
{
|
||||
|
@ -3163,7 +3163,7 @@ const wchar_t *reader_readline(void)
|
|||
/* Figure out the extent of the token within the command substitution. Note we pass cmdsub_begin here, not buff */
|
||||
const wchar_t *token_begin, *token_end;
|
||||
parse_util_token_extent(cmdsub_begin, data->buff_pos - (cmdsub_begin-buff), &token_begin, &token_end, 0, 0);
|
||||
|
||||
|
||||
/* Hack: the token may extend past the end of the command substitution, e.g. in (echo foo) the last token is 'foo)'. Don't let that happen. */
|
||||
if (token_end > cmdsub_end) token_end = cmdsub_end;
|
||||
|
||||
|
|
|
@ -1215,7 +1215,8 @@ static screen_layout_t compute_layout(screen_t *s,
|
|||
// If the command wraps, and the prompt is not short, place the command on its own line.
|
||||
// A short prompt is 33% or less of the terminal's width.
|
||||
const size_t prompt_percent_width = (100 * left_prompt_width) / screen_width;
|
||||
if (left_prompt_width + first_command_line_width + 1 > screen_width && prompt_percent_width > 33) {
|
||||
if (left_prompt_width + first_command_line_width + 1 > screen_width && prompt_percent_width > 33)
|
||||
{
|
||||
result.prompts_get_own_line = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -441,7 +441,7 @@ static size_t read_redirection_or_fd_pipe(const wchar_t *buff, enum token_type *
|
|||
enum token_type redirection_mode = TOK_NONE;
|
||||
|
||||
size_t idx = 0;
|
||||
|
||||
|
||||
/* Determine the fd. This may be specified as a prefix like '2>...' or it may be implicit like '>' or '^'. Try parsing out a number; if we did not get any digits then infer it from the first character. Watch out for overflow. */
|
||||
long long big_fd = 0;
|
||||
for (; iswdigit(buff[idx]); idx++)
|
||||
|
@ -450,21 +450,29 @@ static size_t read_redirection_or_fd_pipe(const wchar_t *buff, enum token_type *
|
|||
if (big_fd <= INT_MAX)
|
||||
big_fd = big_fd * 10 + (buff[idx] - L'0');
|
||||
}
|
||||
|
||||
|
||||
fd = (big_fd > INT_MAX ? -1 : static_cast<int>(big_fd));
|
||||
|
||||
|
||||
if (idx == 0)
|
||||
{
|
||||
/* We did not find a leading digit, so there's no explicit fd. Infer it from the type */
|
||||
switch (buff[idx])
|
||||
{
|
||||
case L'>': fd = STDOUT_FILENO; break;
|
||||
case L'<': fd = STDIN_FILENO; break;
|
||||
case L'^': fd = STDERR_FILENO; break;
|
||||
default: errored = true; break;
|
||||
case L'>':
|
||||
fd = STDOUT_FILENO;
|
||||
break;
|
||||
case L'<':
|
||||
fd = STDIN_FILENO;
|
||||
break;
|
||||
case L'^':
|
||||
fd = STDERR_FILENO;
|
||||
break;
|
||||
default:
|
||||
errored = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Either way we should have ended on the redirection character itself like '>' */
|
||||
wchar_t redirect_char = buff[idx++]; //note increment of idx
|
||||
if (redirect_char == L'>' || redirect_char == L'^')
|
||||
|
@ -486,7 +494,7 @@ static size_t read_redirection_or_fd_pipe(const wchar_t *buff, enum token_type *
|
|||
/* Something else */
|
||||
errored = true;
|
||||
}
|
||||
|
||||
|
||||
/* Optional characters like & or ?, or the pipe char | */
|
||||
wchar_t opt_char = buff[idx];
|
||||
if (opt_char == L'&')
|
||||
|
@ -505,20 +513,20 @@ static size_t read_redirection_or_fd_pipe(const wchar_t *buff, enum token_type *
|
|||
redirection_mode = TOK_PIPE;
|
||||
idx++;
|
||||
}
|
||||
|
||||
|
||||
/* Don't return valid-looking stuff on error */
|
||||
if (errored)
|
||||
{
|
||||
idx = 0;
|
||||
redirection_mode = TOK_NONE;
|
||||
}
|
||||
|
||||
|
||||
/* Return stuff */
|
||||
if (out_redirection_mode != NULL)
|
||||
*out_redirection_mode = redirection_mode;
|
||||
if (out_fd != NULL)
|
||||
*out_fd = fd;
|
||||
|
||||
|
||||
return idx;
|
||||
}
|
||||
|
||||
|
@ -542,7 +550,7 @@ int fd_redirected_by_pipe(const wcstring &str)
|
|||
{
|
||||
return STDOUT_FILENO;
|
||||
}
|
||||
|
||||
|
||||
enum token_type mode = TOK_NONE;
|
||||
int fd = 0;
|
||||
read_redirection_or_fd_pipe(str.c_str(), &mode, &fd);
|
||||
|
@ -556,11 +564,15 @@ int oflags_for_redirection_type(enum token_type type)
|
|||
{
|
||||
switch (type)
|
||||
{
|
||||
case TOK_REDIRECT_APPEND: return O_CREAT | O_APPEND | O_WRONLY;
|
||||
case TOK_REDIRECT_OUT: return O_CREAT | O_WRONLY | O_TRUNC;
|
||||
case TOK_REDIRECT_NOCLOB: return O_CREAT | O_EXCL | O_WRONLY;
|
||||
case TOK_REDIRECT_IN: return O_RDONLY;
|
||||
|
||||
case TOK_REDIRECT_APPEND:
|
||||
return O_CREAT | O_APPEND | O_WRONLY;
|
||||
case TOK_REDIRECT_OUT:
|
||||
return O_CREAT | O_WRONLY | O_TRUNC;
|
||||
case TOK_REDIRECT_NOCLOB:
|
||||
return O_CREAT | O_EXCL | O_WRONLY;
|
||||
case TOK_REDIRECT_IN:
|
||||
return O_RDONLY;
|
||||
|
||||
default:
|
||||
return -1;
|
||||
}
|
||||
|
@ -703,7 +715,7 @@ void tok_next(tokenizer_t *tok)
|
|||
int fd = -1;
|
||||
if (iswdigit(*tok->buff))
|
||||
consumed = read_redirection_or_fd_pipe(tok->buff, &mode, &fd);
|
||||
|
||||
|
||||
if (consumed > 0)
|
||||
{
|
||||
/* It looks like a redirection or a pipe. But we don't support piping fd 0. Note that fd 0 may be -1, indicating overflow; but we don't treat that as a tokenizer error. */
|
||||
|
|
Loading…
Reference in a new issue