mirror of
https://github.com/fish-shell/fish-shell
synced 2024-12-25 20:33:08 +00:00
Cleanup thrashing around in parser_t. New parser execution lives in
parse_execution.cpp
This commit is contained in:
parent
924b8cbe24
commit
562946d055
4 changed files with 81 additions and 730 deletions
|
@ -10,6 +10,7 @@
|
|||
#include "parser.h"
|
||||
#include "expand.h"
|
||||
#include "wutil.h"
|
||||
#include "exec.h"
|
||||
#include "path.h"
|
||||
|
||||
|
||||
|
@ -24,7 +25,7 @@ wcstring parse_execution_context_t::get_source(const parse_node_t &node) const
|
|||
return node.get_source(this->src);
|
||||
}
|
||||
|
||||
const parse_node_t *parse_execution_context_t::get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type)
|
||||
const parse_node_t *parse_execution_context_t::get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type) const
|
||||
{
|
||||
return this->tree.get_child(parent, which, expected_type);
|
||||
}
|
||||
|
@ -91,6 +92,8 @@ bool parse_execution_context_t::append_error(const parse_node_t &node, const wch
|
|||
|
||||
process_t *parse_execution_context_t::create_plain_process(job_t *job, const parse_node_t &statement)
|
||||
{
|
||||
bool errored = false;
|
||||
|
||||
/* Get the decoration */
|
||||
assert(statement.type == symbol_plain_statement);
|
||||
|
||||
|
@ -99,29 +102,37 @@ process_t *parse_execution_context_t::create_plain_process(job_t *job, const par
|
|||
bool got_cmd = tree.command_for_plain_statement(statement, src, &cmd);
|
||||
assert(got_cmd);
|
||||
|
||||
/* Expand it as a command */
|
||||
/* Expand it as a command. Return NULL on failure. */
|
||||
bool expanded = expand_one(cmd, EXPAND_SKIP_CMDSUBST | EXPAND_SKIP_VARIABLES);
|
||||
if (! expanded)
|
||||
{
|
||||
append_error(statement, ILLEGAL_CMD_ERR_MSG, cmd.c_str());
|
||||
return 0;
|
||||
errored = append_error(statement, ILLEGAL_CMD_ERR_MSG, cmd.c_str());
|
||||
}
|
||||
|
||||
if (errored)
|
||||
return NULL;
|
||||
|
||||
/* The list of arguments. The command is the first argument. TODO: count hack */
|
||||
const parse_node_t *unmatched_wildcard = NULL;
|
||||
wcstring_list_t argument_list = this->determine_arguments(statement, &unmatched_wildcard);
|
||||
argument_list.insert(argument_list.begin(), cmd);
|
||||
|
||||
/* We were not able to expand any wildcards. Here is the first one that failed */
|
||||
/* If we were not able to expand any wildcards, here is the first one that failed */
|
||||
if (unmatched_wildcard != NULL)
|
||||
{
|
||||
job_set_flag(job, JOB_WILDCARD_ERROR, 1);
|
||||
proc_set_last_status(STATUS_UNMATCHED_WILDCARD);
|
||||
append_error(*unmatched_wildcard, WILDCARD_ERR_MSG, unmatched_wildcard->get_source(src).c_str());
|
||||
errored = append_error(*unmatched_wildcard, WILDCARD_ERR_MSG, unmatched_wildcard->get_source(src).c_str());
|
||||
}
|
||||
|
||||
if (errored)
|
||||
return NULL;
|
||||
|
||||
/* The set of IO redirections that we construct for the process */
|
||||
const io_chain_t process_io_chain = this->determine_io_chain(statement);
|
||||
io_chain_t process_io_chain;
|
||||
errored = ! this->determine_io_chain(statement, &process_io_chain);
|
||||
if (errored)
|
||||
return NULL;
|
||||
|
||||
/* Determine the process type, which depends on the statement decoration (command, builtin, etc) */
|
||||
enum parse_statement_decoration_t decoration = tree.decoration_for_plain_statement(statement);
|
||||
|
@ -165,16 +176,21 @@ process_t *parse_execution_context_t::create_plain_process(job_t *job, const par
|
|||
if (! has_command)
|
||||
{
|
||||
/* TODO: support fish_command_not_found, implicit cd, etc. here */
|
||||
errored = true;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Return the process */
|
||||
process_t *result = new process_t();
|
||||
result->type = process_type;
|
||||
result->set_argv(argument_list);
|
||||
result->set_io_chain(process_io_chain);
|
||||
result->actual_cmd = actual_cmd;
|
||||
/* Return the process, or NULL on error */
|
||||
process_t *result = NULL;
|
||||
if (! errored)
|
||||
{
|
||||
result = new process_t();
|
||||
result->type = process_type;
|
||||
result->set_argv(argument_list);
|
||||
result->set_io_chain(process_io_chain);
|
||||
result->actual_cmd = actual_cmd;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -252,9 +268,10 @@ wcstring_list_t parse_execution_context_t::determine_arguments(const parse_node_
|
|||
return argument_list;
|
||||
}
|
||||
|
||||
io_chain_t parse_execution_context_t::determine_io_chain(const parse_node_t &statement)
|
||||
bool parse_execution_context_t::determine_io_chain(const parse_node_t &statement, io_chain_t *out_chain)
|
||||
{
|
||||
io_chain_t result;
|
||||
bool errored = false;
|
||||
|
||||
/* Get all redirection nodes underneath the statement */
|
||||
const parse_node_tree_t::parse_node_list_t redirect_nodes = tree.find_nodes(statement, symbol_redirection);
|
||||
|
@ -271,7 +288,7 @@ io_chain_t parse_execution_context_t::determine_io_chain(const parse_node_t &sta
|
|||
if (! target_expanded || target.empty())
|
||||
{
|
||||
/* Should improve this error message */
|
||||
this->append_error(redirect_node,
|
||||
errored = append_error(redirect_node,
|
||||
_(L"Invalid redirection target: %ls"),
|
||||
target.c_str());
|
||||
}
|
||||
|
@ -295,7 +312,7 @@ io_chain_t parse_execution_context_t::determine_io_chain(const parse_node_t &sta
|
|||
int old_fd = fish_wcstoi(target.c_str(), &end, 10);
|
||||
if (old_fd < 0 || errno || *end)
|
||||
{
|
||||
this->append_error(redirect_node,
|
||||
errored = append_error(redirect_node,
|
||||
_(L"Requested redirection to something that is not a file descriptor %ls"),
|
||||
target.c_str());
|
||||
}
|
||||
|
@ -333,7 +350,12 @@ io_chain_t parse_execution_context_t::determine_io_chain(const parse_node_t &sta
|
|||
result.push_back(new_io);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
||||
if (out_chain && ! errored)
|
||||
{
|
||||
std::swap(*out_chain, result);
|
||||
}
|
||||
return ! errored;
|
||||
}
|
||||
|
||||
process_t *parse_execution_context_t::create_boolean_process(job_t *job, const parse_node_t &bool_statement)
|
||||
|
@ -416,6 +438,7 @@ process_t *parse_execution_context_t::create_job_process(job_t *job, const parse
|
|||
|
||||
case symbol_decorated_statement:
|
||||
{
|
||||
/* Get the plain statement. It will pull out the decoration itself */
|
||||
const parse_node_t &plain_statement = tree.find_child(specific_statement, symbol_plain_statement);
|
||||
result = this->create_plain_process(job, plain_statement);
|
||||
break;
|
||||
|
@ -431,7 +454,7 @@ process_t *parse_execution_context_t::create_job_process(job_t *job, const parse
|
|||
}
|
||||
|
||||
|
||||
void parse_execution_context_t::populate_job_from_job_node(job_t *j, const parse_node_t &job_node)
|
||||
bool parse_execution_context_t::populate_job_from_job_node(job_t *j, const parse_node_t &job_node)
|
||||
{
|
||||
assert(job_node.type == symbol_job);
|
||||
|
||||
|
@ -470,6 +493,9 @@ void parse_execution_context_t::populate_job_from_job_node(job_t *j, const parse
|
|||
last_process = last_process->next;
|
||||
job_cont = get_child(*job_cont, 2, symbol_job_continuation);
|
||||
}
|
||||
|
||||
/* Return success */
|
||||
return ! process_errored;
|
||||
}
|
||||
|
||||
int parse_execution_context_t::run_1_job(const parse_node_t &job_node)
|
||||
|
@ -515,8 +541,10 @@ int parse_execution_context_t::run_1_job(const parse_node_t &job_node)
|
|||
|
||||
parser->current_block()->job = j;
|
||||
|
||||
this->populate_job_from_job_node(j, job_node);
|
||||
/* Populate the job. This may fail for reasons like command_not_found */
|
||||
bool process_errored = ! this->populate_job_from_job_node(j, job_node);
|
||||
|
||||
/* Store time it took to 'parse' the command */
|
||||
if (do_profile)
|
||||
{
|
||||
parse_time = get_time();
|
||||
|
@ -524,34 +552,49 @@ int parse_execution_context_t::run_1_job(const parse_node_t &job_node)
|
|||
profile_item->skipped=parser->current_block()->skip;
|
||||
}
|
||||
|
||||
/* Check to see if this contained any external commands */
|
||||
bool job_contained_external_command = false;
|
||||
for (const process_t *proc = j->first_process; proc != NULL; proc = proc->next)
|
||||
if (! process_errored)
|
||||
{
|
||||
if (proc->type == EXTERNAL)
|
||||
/* Check to see if this contained any external commands */
|
||||
bool job_contained_external_command = false;
|
||||
for (const process_t *proc = j->first_process; proc != NULL; proc = proc->next)
|
||||
{
|
||||
job_contained_external_command = true;
|
||||
break;
|
||||
if (proc->type == EXTERNAL)
|
||||
{
|
||||
job_contained_external_command = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Actually execute the job */
|
||||
exec_job(*this->parser, j);
|
||||
|
||||
/* Only external commands require a new fishd barrier */
|
||||
if (!job_contained_external_command)
|
||||
{
|
||||
set_proc_had_barrier(false);
|
||||
}
|
||||
}
|
||||
|
||||
/* Only external commands require a new fishd barrier */
|
||||
if (!job_contained_external_command)
|
||||
set_proc_had_barrier(false);
|
||||
|
||||
/* Need support for skipped_exec here */
|
||||
|
||||
if (do_profile)
|
||||
{
|
||||
exec_time = get_time();
|
||||
profile_item->level=eval_level;
|
||||
profile_item->parse = (int)(parse_time-start_time);
|
||||
profile_item->exec=(int)(exec_time-parse_time);
|
||||
profile_item->skipped = process_errored;
|
||||
}
|
||||
|
||||
/* Set the last status to 1 if the job could not be executed */
|
||||
if (process_errored)
|
||||
proc_set_last_status(1);
|
||||
const int ret = proc_get_last_status();
|
||||
|
||||
/* Clean up jobs. Do this after we've determined the return value, since this may trigger event handlers */
|
||||
job_reap(0);
|
||||
|
||||
return proc_get_last_status();
|
||||
/* All done */
|
||||
return ret;
|
||||
}
|
||||
|
||||
void parse_execution_context_t::run_job_list(const parse_node_t &job_list_node)
|
||||
|
|
|
@ -35,9 +35,9 @@ class parse_execution_context_t
|
|||
/* Report an error. Always returns true. */
|
||||
bool append_error(const parse_node_t &node, const wchar_t *fmt, ...);
|
||||
|
||||
/* Utilities */
|
||||
wcstring get_source(const parse_node_t &node) const;
|
||||
const parse_node_t *get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type = token_type_invalid);
|
||||
|
||||
const parse_node_t *get_child(const parse_node_t &parent, node_offset_t which, parse_token_type_t expected_type = token_type_invalid) const;
|
||||
node_offset_t get_offset(const parse_node_t &node) const;
|
||||
|
||||
process_t *create_job_process(job_t *job, const parse_node_t &statement_node);
|
||||
|
@ -48,11 +48,13 @@ class parse_execution_context_t
|
|||
void run_while_process(const parse_node_t &header, const parse_node_t &statement);
|
||||
|
||||
wcstring_list_t determine_arguments(const parse_node_t &parent, const parse_node_t **out_unmatched_wildcard_node);
|
||||
io_chain_t determine_io_chain(const parse_node_t &statement);
|
||||
|
||||
/* Determines the IO chain. Returns true on success, false on error */
|
||||
bool determine_io_chain(const parse_node_t &statement, io_chain_t *out_chain);
|
||||
|
||||
int run_1_job(const parse_node_t &job_node);
|
||||
void run_job_list(const parse_node_t &job_list_node);
|
||||
void populate_job_from_job_node(job_t *j, const parse_node_t &job_node);
|
||||
bool populate_job_from_job_node(job_t *j, const parse_node_t &job_node);
|
||||
|
||||
void eval_next_stack_elem();
|
||||
|
||||
|
|
673
parser.cpp
673
parser.cpp
|
@ -1643,390 +1643,6 @@ void parser_t::parse_job_argument_list(process_t *p,
|
|||
p->set_io_chain(process_io_chain);
|
||||
}
|
||||
|
||||
/*
|
||||
static void print_block_stack( block_t *b )
|
||||
{
|
||||
if( !b )
|
||||
return;
|
||||
print_block_stack( b->outer );
|
||||
|
||||
debug( 0, L"Block type %ls, skip: %d", parser_get_block_desc( b->type ), b->skip );
|
||||
}
|
||||
*/
|
||||
|
||||
process_t *parser_t::create_boolean_process(job_t *job, const parse_node_t &bool_statement, const parser_context_t &ctx)
|
||||
{
|
||||
// Handle a boolean statement
|
||||
bool skip_job = false;
|
||||
assert(bool_statement.type == symbol_boolean_statement);
|
||||
switch (bool_statement.production_idx)
|
||||
{
|
||||
// These magic numbers correspond to productions for boolean_statement
|
||||
case 0:
|
||||
// AND. Skip if the last job failed.
|
||||
skip_job = (proc_get_last_status() != 0);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
// OR. Skip if the last job succeeded.
|
||||
skip_job = (proc_get_last_status() == 0);
|
||||
break;
|
||||
|
||||
case 2:
|
||||
// NOT. Negate it.
|
||||
job_set_flag(job, JOB_NEGATE, !job_get_flag(job, JOB_NEGATE));
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
fprintf(stderr, "Unexpected production in boolean statement\n");
|
||||
PARSER_DIE();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
process_t *result = NULL;
|
||||
if (! skip_job)
|
||||
{
|
||||
const parse_node_t &subject = *ctx.tree.get_child(bool_statement, 1, symbol_statement);
|
||||
result = this->create_job_process(job, subject, ctx);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
process_t *parser_t::create_for_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
process_t *parser_t::create_while_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
process_t *parser_t::create_begin_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
process_t *parser_t::create_plain_process(job_t *job, const parse_node_t &statement, const parser_context_t &ctx)
|
||||
{
|
||||
/* Get the decoration */
|
||||
assert(statement.type == symbol_plain_statement);
|
||||
|
||||
/* Get the command. We expect to always get it here. */
|
||||
wcstring cmd;
|
||||
bool got_cmd = ctx.tree.command_for_plain_statement(statement, ctx.src, &cmd);
|
||||
assert(got_cmd);
|
||||
|
||||
/* Expand it as a command */
|
||||
bool expanded = expand_one(cmd, EXPAND_SKIP_CMDSUBST | EXPAND_SKIP_VARIABLES);
|
||||
if (! expanded)
|
||||
{
|
||||
error(SYNTAX_ERROR,
|
||||
statement.source_start,
|
||||
ILLEGAL_CMD_ERR_MSG,
|
||||
cmd.c_str());
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* The list of arguments. The command is the first argument. TODO: count hack */
|
||||
const parse_node_t *unmatched_wildcard = NULL;
|
||||
wcstring_list_t argument_list = this->determine_arguments(statement, &unmatched_wildcard, ctx);
|
||||
argument_list.insert(argument_list.begin(), cmd);
|
||||
|
||||
/* We were not able to expand any wildcards. Here is the first one that failed */
|
||||
if (unmatched_wildcard != NULL)
|
||||
{
|
||||
job_set_flag(job, JOB_WILDCARD_ERROR, 1);
|
||||
proc_set_last_status(STATUS_UNMATCHED_WILDCARD);
|
||||
error(EVAL_ERROR, unmatched_wildcard->source_start, WILDCARD_ERR_MSG, unmatched_wildcard->get_source(ctx.src).c_str());
|
||||
}
|
||||
|
||||
/* The set of IO redirections that we construct for the process */
|
||||
const io_chain_t process_io_chain = this->determine_io_chain(statement, ctx);
|
||||
|
||||
/* Determine the process type, which depends on the statement decoration (command, builtin, etc) */
|
||||
enum parse_statement_decoration_t decoration = ctx.tree.decoration_for_plain_statement(statement);
|
||||
enum process_type_t process_type = EXTERNAL;
|
||||
|
||||
/* exec hack */
|
||||
if (decoration != parse_statement_decoration_command && cmd == L"exec")
|
||||
{
|
||||
/* Either 'builtin exec' or just plain 'exec', and definitely not 'command exec'. Note we don't allow overriding exec with a function. */
|
||||
process_type = INTERNAL_EXEC;
|
||||
}
|
||||
else if (decoration == parse_statement_decoration_command)
|
||||
{
|
||||
/* Always a command */
|
||||
process_type = EXTERNAL;
|
||||
}
|
||||
else if (decoration == parse_statement_decoration_builtin)
|
||||
{
|
||||
/* What happens if this builtin is not valid? */
|
||||
process_type = INTERNAL_BUILTIN;
|
||||
}
|
||||
else if (function_exists(cmd))
|
||||
{
|
||||
process_type = INTERNAL_FUNCTION;
|
||||
}
|
||||
else if (builtin_exists(cmd))
|
||||
{
|
||||
process_type = INTERNAL_BUILTIN;
|
||||
}
|
||||
else
|
||||
{
|
||||
process_type = EXTERNAL;
|
||||
}
|
||||
|
||||
wcstring actual_cmd;
|
||||
if (process_type == EXTERNAL)
|
||||
{
|
||||
/* Determine the actual command. Need to support implicit cd here */
|
||||
bool has_command = path_get_path(cmd, &actual_cmd);
|
||||
|
||||
if (! has_command)
|
||||
{
|
||||
/* TODO: support fish_command_not_found, implicit cd, etc. here */
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* Return the process */
|
||||
process_t *result = new process_t();
|
||||
result->type = process_type;
|
||||
result->set_argv(argument_list);
|
||||
result->set_io_chain(process_io_chain);
|
||||
result->actual_cmd = actual_cmd;
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Determine the list of arguments, expanding stuff. If we have a wildcard and none could be expanded, return the unexpandable wildcard node by reference. */
|
||||
wcstring_list_t parser_t::determine_arguments(const parse_node_t &statement, const parse_node_t **out_unmatched_wildcard_node, const parser_context_t &ctx)
|
||||
{
|
||||
wcstring_list_t argument_list;
|
||||
|
||||
/* Whether we failed to match any wildcards, and succeeded in matching any wildcards */
|
||||
bool unmatched_wildcard = false, matched_wildcard = false;
|
||||
|
||||
/* First node that failed to expand as a wildcard (if any) */
|
||||
const parse_node_t *unmatched_wildcard_node = NULL;
|
||||
|
||||
/* Get all argument nodes underneath the statement */
|
||||
const parse_node_tree_t::parse_node_list_t argument_nodes = ctx.tree.find_nodes(statement, symbol_argument);
|
||||
argument_list.reserve(argument_nodes.size());
|
||||
for (size_t i=0; i < argument_nodes.size(); i++)
|
||||
{
|
||||
const parse_node_t &arg_node = *argument_nodes.at(i);
|
||||
|
||||
/* Expect all arguments to have source */
|
||||
assert(arg_node.has_source());
|
||||
const wcstring arg_str = arg_node.get_source(ctx.src);
|
||||
|
||||
/* Expand this string */
|
||||
std::vector<completion_t> arg_expanded;
|
||||
int expand_ret = expand_string(arg_str, arg_expanded, 0);
|
||||
switch (expand_ret)
|
||||
{
|
||||
case EXPAND_ERROR:
|
||||
{
|
||||
error(SYNTAX_ERROR,
|
||||
arg_node.source_start,
|
||||
_(L"Could not expand string '%ls'"),
|
||||
arg_str.c_str());
|
||||
break;
|
||||
}
|
||||
|
||||
case EXPAND_WILDCARD_NO_MATCH:
|
||||
{
|
||||
/* Store the node that failed to expand */
|
||||
unmatched_wildcard = true;
|
||||
if (! unmatched_wildcard_node)
|
||||
{
|
||||
unmatched_wildcard_node = &arg_node;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case EXPAND_WILDCARD_MATCH:
|
||||
{
|
||||
matched_wildcard = true;
|
||||
break;
|
||||
}
|
||||
|
||||
case EXPAND_OK:
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Now copy over any expanded arguments */
|
||||
for (size_t i=0; i < arg_expanded.size(); i++)
|
||||
{
|
||||
argument_list.push_back(arg_expanded.at(i).completion);
|
||||
}
|
||||
}
|
||||
|
||||
/* Return if we had a wildcard problem */
|
||||
if (unmatched_wildcard && ! matched_wildcard)
|
||||
{
|
||||
*out_unmatched_wildcard_node = unmatched_wildcard_node;
|
||||
}
|
||||
|
||||
return argument_list;
|
||||
}
|
||||
|
||||
io_chain_t parser_t::determine_io_chain(const parse_node_t &statement,const parser_context_t &ctx)
|
||||
{
|
||||
io_chain_t result;
|
||||
|
||||
/* Get all redirection nodes underneath the statement */
|
||||
const parse_node_tree_t::parse_node_list_t redirect_nodes = ctx.tree.find_nodes(statement, symbol_redirection);
|
||||
for (size_t i=0; i < redirect_nodes.size(); i++)
|
||||
{
|
||||
const parse_node_t &redirect_node = *redirect_nodes.at(i);
|
||||
|
||||
int source_fd = -1; /* source fd */
|
||||
wcstring target; /* file path or target fd */
|
||||
enum token_type redirect_type = ctx.tree.type_for_redirection(redirect_node, ctx.src, &source_fd, &target);
|
||||
|
||||
/* PCA: I can't justify this EXPAND_SKIP_VARIABLES flag. It was like this when I got here. */
|
||||
bool target_expanded = expand_one(target, no_exec ? EXPAND_SKIP_VARIABLES : 0);
|
||||
if (! target_expanded || target.empty())
|
||||
{
|
||||
/* Should improve this error message */
|
||||
error(SYNTAX_ERROR,
|
||||
redirect_node.source_start,
|
||||
_(L"Invalid redirection target: %ls"),
|
||||
target.c_str());
|
||||
}
|
||||
|
||||
|
||||
/* Generate the actual IO redirection */
|
||||
shared_ptr<io_data_t> new_io;
|
||||
assert(redirect_type != TOK_NONE);
|
||||
switch (redirect_type)
|
||||
{
|
||||
case TOK_REDIRECT_FD:
|
||||
{
|
||||
if (target == L"-")
|
||||
{
|
||||
new_io.reset(new io_close_t(source_fd));
|
||||
}
|
||||
else
|
||||
{
|
||||
wchar_t *end = NULL;
|
||||
errno = 0;
|
||||
int old_fd = fish_wcstoi(target.c_str(), &end, 10);
|
||||
if (old_fd < 0 || errno || *end)
|
||||
{
|
||||
error(SYNTAX_ERROR,
|
||||
redirect_node.source_start,
|
||||
_(L"Requested redirection to something that is not a file descriptor %ls"),
|
||||
target.c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
new_io.reset(new io_fd_t(source_fd, old_fd));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case TOK_REDIRECT_OUT:
|
||||
case TOK_REDIRECT_APPEND:
|
||||
case TOK_REDIRECT_IN:
|
||||
case TOK_REDIRECT_NOCLOB:
|
||||
{
|
||||
int oflags = oflags_for_redirection_type(redirect_type);
|
||||
io_file_t *new_io_file = new io_file_t(source_fd, target, oflags);
|
||||
new_io.reset(new_io_file);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
// Should be unreachable
|
||||
fprintf(stderr, "Unexpected redirection type %ld. aborting.\n", (long)redirect_type);
|
||||
PARSER_DIE();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Append the new_io if we got one */
|
||||
if (new_io.get() != NULL)
|
||||
{
|
||||
result.push_back(new_io);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Returns a process_t allocated with new. It's the caller's responsibility to delete it (!) */
|
||||
process_t *parser_t::create_job_process(job_t *job, const parse_node_t &statement_node, const parser_context_t &ctx)
|
||||
{
|
||||
assert(statement_node.type == symbol_statement);
|
||||
assert(statement_node.child_count == 1);
|
||||
|
||||
// Get the "specific statement" which is boolean / block / if / switch / decorated
|
||||
const parse_node_t &specific_statement = *ctx.tree.get_child(statement_node, 0);
|
||||
|
||||
process_t *result = NULL;
|
||||
|
||||
switch (specific_statement.type)
|
||||
{
|
||||
case symbol_boolean_statement:
|
||||
{
|
||||
result = this->create_boolean_process(job, specific_statement, ctx);
|
||||
break;
|
||||
}
|
||||
|
||||
case symbol_block_statement:
|
||||
{
|
||||
const parse_node_t &header = *ctx.tree.get_child(specific_statement, 0, symbol_block_header);
|
||||
const parse_node_t &specific_header = *ctx.tree.get_child(header, 0);
|
||||
switch (specific_header.type)
|
||||
{
|
||||
case symbol_for_header:
|
||||
result = this->create_for_process(job, specific_header, specific_statement, ctx);
|
||||
break;
|
||||
|
||||
case symbol_while_header:
|
||||
result = this->create_while_process(job, specific_header, specific_statement, ctx);
|
||||
break;
|
||||
|
||||
case symbol_function_header:
|
||||
// No process is associated with creating a function
|
||||
// TODO: create the darn function!
|
||||
result = NULL;
|
||||
break;
|
||||
|
||||
case symbol_begin_header:
|
||||
result = this->create_begin_process(job, specific_header, specific_statement, ctx);
|
||||
break;
|
||||
|
||||
default:
|
||||
fprintf(stderr, "Unexpected header type\n");
|
||||
PARSER_DIE();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case symbol_decorated_statement:
|
||||
{
|
||||
const parse_node_t &plain_statement = ctx.tree.find_child(specific_statement, symbol_plain_statement);
|
||||
result = this->create_plain_process(job, plain_statement, ctx);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
fprintf(stderr, "'%ls' not handled by new parser yet\n", specific_statement.describe().c_str());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
Fully parse a single job. Does not call exec on it, but any command substitutions in the job will be executed.
|
||||
|
||||
|
@ -2727,88 +2343,6 @@ static bool job_should_skip_elseif(const job_t *job, const block_t *current_bloc
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Evaluates a job from a node tree.
|
||||
*/
|
||||
|
||||
void parser_t::eval_job(const parse_node_t &job_node, const parser_context_t &ctx)
|
||||
{
|
||||
assert(job_node.type == symbol_job);
|
||||
this->job_start_pos = (int)job_node.source_start;
|
||||
|
||||
// Get terminal modes
|
||||
struct termios tmodes = {};
|
||||
if (get_is_interactive())
|
||||
{
|
||||
if (tcgetattr(STDIN_FILENO, &tmodes))
|
||||
{
|
||||
// need real error handling here
|
||||
wperror(L"tcgetattr");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/* Track whether we had an error */
|
||||
bool process_errored = false;
|
||||
|
||||
/* Profiling support */
|
||||
long long t1 = 0, t2 = 0, t3 = 0;
|
||||
const bool do_profile = profile;
|
||||
profile_item_t *profile_item = NULL;
|
||||
if (do_profile)
|
||||
{
|
||||
profile_item = new profile_item_t();
|
||||
profile_item->skipped = 1;
|
||||
profile_items.push_back(profile_item);
|
||||
t1 = get_time();
|
||||
}
|
||||
|
||||
job_t *j = this->job_create();
|
||||
job_set_flag(j, JOB_FOREGROUND, 1);
|
||||
job_set_flag(j, JOB_TERMINAL, job_get_flag(j, JOB_CONTROL));
|
||||
job_set_flag(j, JOB_TERMINAL, job_get_flag(j, JOB_CONTROL) \
|
||||
&& (!is_subshell && !is_event));
|
||||
job_set_flag(j, JOB_SKIP_NOTIFICATION, is_subshell \
|
||||
|| is_block \
|
||||
|| is_event \
|
||||
|| (!get_is_interactive()));
|
||||
|
||||
current_block()->job = j;
|
||||
|
||||
/* Tell the job what its command is */
|
||||
j->set_command(job_node.get_source(ctx.src));
|
||||
|
||||
/* We are going to construct process_t structures for every statement in the job. Get the first statement. */
|
||||
const parse_node_t *statement_node = ctx.tree.get_child(job_node, 0, symbol_statement);
|
||||
assert(statement_node != NULL);
|
||||
|
||||
/* Create the process (may fail!) */
|
||||
j->first_process = this->create_job_process(j, *statement_node, ctx);
|
||||
if (j->first_process == NULL)
|
||||
process_errored = true;
|
||||
|
||||
/* Construct process_ts for job continuations (pipelines), by walking the list until we hit the terminal (empty) job continuationf */
|
||||
const parse_node_t *job_cont = ctx.tree.get_child(job_node, 1, symbol_job_continuation);
|
||||
process_t *last_process = j->first_process;
|
||||
while (! process_errored && job_cont != NULL && job_cont->child_count > 0)
|
||||
{
|
||||
assert(job_cont->type == symbol_job_continuation);
|
||||
|
||||
/* Get the statement node and make a process from it */
|
||||
const parse_node_t *statement_node = ctx.tree.get_child(*job_cont, 1, symbol_statement);
|
||||
assert(statement_node != NULL);
|
||||
|
||||
/* Store the new process (and maybe with an error) */
|
||||
last_process->next = this->create_job_process(j, *statement_node, ctx);
|
||||
if (last_process->next == NULL)
|
||||
process_errored = true;
|
||||
|
||||
/* Link the process and get the next continuation */
|
||||
last_process = last_process->next;
|
||||
job_cont = ctx.tree.get_child(*job_cont, 2, symbol_job_continuation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Evaluates a job from the specified tokenizer. First calls
|
||||
parse_job to parse the job and then calls exec to execute it.
|
||||
|
@ -3050,213 +2584,6 @@ void parser_t::eval_job(tokenizer_t *tok)
|
|||
job_reap(0);
|
||||
}
|
||||
|
||||
static void push_all_children(std::vector<node_offset_t> *execution_stack, const parse_node_t &node)
|
||||
{
|
||||
// push nodes in reverse order, so the first node ends up on top
|
||||
unsigned child_idx = node.child_count;
|
||||
while (child_idx--)
|
||||
{
|
||||
execution_stack->push_back(node.child_offset(child_idx));
|
||||
}
|
||||
}
|
||||
|
||||
void parser_t::execute_next(std::vector<node_offset_t> *execution_stack, const parser_context_t &ctx)
|
||||
{
|
||||
assert(execution_stack != NULL);
|
||||
assert(! execution_stack->empty());
|
||||
|
||||
/* Get the offset of the next node and remove it from the stack */
|
||||
node_offset_t next_offset = execution_stack->back();
|
||||
execution_stack->pop_back();
|
||||
|
||||
/* Get the node */
|
||||
assert(next_offset < ctx.tree.size());
|
||||
const parse_node_t &node = ctx.tree.at(next_offset);
|
||||
|
||||
/* Do something with it */
|
||||
switch (node.type)
|
||||
{
|
||||
case symbol_job_list:
|
||||
// These correspond to the three productions of job_list
|
||||
switch (node.production_idx)
|
||||
{
|
||||
case 0: // empty
|
||||
break;
|
||||
|
||||
case 1: //job, job_list
|
||||
push_all_children(execution_stack, node);
|
||||
break;
|
||||
|
||||
case 2: //blank line, job_list
|
||||
execution_stack->push_back(node.child_offset(1));
|
||||
break;
|
||||
|
||||
default: //if we get here, it means more productions have been added to job_list, which is bad
|
||||
PARSER_DIE();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
||||
case symbol_job: //statement, job_continuation
|
||||
push_all_children(execution_stack, node);
|
||||
break;
|
||||
|
||||
case symbol_job_continuation:
|
||||
switch (node.production_idx)
|
||||
{
|
||||
case 0: //empty
|
||||
break;
|
||||
|
||||
case 1: //pipe, statement, job_continuation
|
||||
execution_stack->push_back(node.child_offset(2));
|
||||
execution_stack->push_back(node.child_offset(1));
|
||||
break;
|
||||
|
||||
default:
|
||||
PARSER_DIE();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/* Executes the job list at the given node offset */
|
||||
void parser_t::execute_job_list(node_offset_t idx, const parser_context_t &ctx)
|
||||
{
|
||||
assert(idx < ctx.tree.size());
|
||||
|
||||
const parse_node_t *job_list = &ctx.tree.at(idx);
|
||||
assert(job_list->type == symbol_job_list);
|
||||
while (job_list != NULL)
|
||||
{
|
||||
// These correspond to the three productions of job_list
|
||||
// Try pulling out a job
|
||||
const parse_node_t *job = NULL;
|
||||
switch (job_list->production_idx)
|
||||
{
|
||||
case 0: // empty
|
||||
job_list = NULL;
|
||||
break;
|
||||
|
||||
case 1: //job, job_list
|
||||
job = ctx.tree.get_child(*job_list, 0, symbol_job);
|
||||
job_list = ctx.tree.get_child(*job_list, 1, symbol_job_list);
|
||||
break;
|
||||
|
||||
case 2: //blank line, job_list
|
||||
job = NULL;
|
||||
job_list = ctx.tree.get_child(*job_list, 1, symbol_job_list);
|
||||
break;
|
||||
|
||||
default: //if we get here, it means more productions have been added to job_list, which is bad
|
||||
PARSER_DIE();
|
||||
}
|
||||
|
||||
if (job != NULL)
|
||||
{
|
||||
this->eval_job(*job, ctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int parser_t::eval2(const wcstring &cmd_str, const io_chain_t &io, enum block_type_t block_type)
|
||||
{
|
||||
parser_context_t mut_ctx;
|
||||
mut_ctx.src = cmd_str;
|
||||
|
||||
/* Parse the tree */
|
||||
if (! parse_t::parse(cmd_str, parse_flag_none, &mut_ctx.tree, NULL))
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Make a const version for safety's sake */
|
||||
const parser_context_t &ctx = mut_ctx;
|
||||
|
||||
CHECK_BLOCK(1);
|
||||
|
||||
/* Record the current chain so we can put it back later */
|
||||
scoped_push<io_chain_t> block_io_push(&block_io, io);
|
||||
scoped_push<wcstring_list_t> forbidden_function_push(&forbidden_function);
|
||||
const size_t forbid_count = forbidden_function.size();
|
||||
const block_t * const start_current_block = this->current_block();
|
||||
|
||||
/* Do some stuff I haven't figured out yet */
|
||||
job_reap(0);
|
||||
|
||||
/* Only certain blocks are allowed */
|
||||
if ((block_type != TOP) &&
|
||||
(block_type != SUBST))
|
||||
{
|
||||
debug(1,
|
||||
INVALID_SCOPE_ERR_MSG,
|
||||
parser_t::get_block_desc(block_type));
|
||||
bugreport();
|
||||
return 1;
|
||||
}
|
||||
|
||||
eval_level++;
|
||||
|
||||
this->push_block(new scope_block_t(block_type));
|
||||
|
||||
error_code = 0;
|
||||
|
||||
event_fire(NULL);
|
||||
|
||||
/* Execute the top level job list */
|
||||
execute_job_list(0, ctx);
|
||||
|
||||
parser_t::pop_block();
|
||||
|
||||
while (start_current_block != this->current_block())
|
||||
{
|
||||
if (this->current_block() == NULL)
|
||||
{
|
||||
debug(0,
|
||||
_(L"End of block mismatch. Program terminating."));
|
||||
bugreport();
|
||||
FATAL_EXIT();
|
||||
break;
|
||||
}
|
||||
|
||||
if ((!error_code) && (!exit_status()) && (!proc_get_last_status()))
|
||||
{
|
||||
|
||||
//debug( 2, L"Status %d\n", proc_get_last_status() );
|
||||
|
||||
debug(1,
|
||||
L"%ls", parser_t::get_block_desc(current_block()->type()));
|
||||
debug(1,
|
||||
BLOCK_END_ERR_MSG);
|
||||
fwprintf(stderr, L"%ls", parser_t::current_line());
|
||||
|
||||
const wcstring h = builtin_help_get(*this, L"end");
|
||||
if (h.size())
|
||||
fwprintf(stderr, L"%ls", h.c_str());
|
||||
break;
|
||||
|
||||
}
|
||||
parser_t::pop_block();
|
||||
}
|
||||
|
||||
this->print_errors_stderr();
|
||||
|
||||
while (forbidden_function.size() > forbid_count)
|
||||
parser_t::allow_function();
|
||||
|
||||
/*
|
||||
Restore previous eval state
|
||||
*/
|
||||
eval_level--;
|
||||
|
||||
int code=error_code;
|
||||
error_code=0;
|
||||
|
||||
job_reap(0);
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
int parser_t::eval(const wcstring &cmd_str, const io_chain_t &io, enum block_type_t block_type)
|
||||
{
|
||||
|
|
21
parser.h
21
parser.h
|
@ -271,12 +271,6 @@ struct profile_item_t
|
|||
|
||||
struct tokenizer_t;
|
||||
|
||||
struct parser_context_t
|
||||
{
|
||||
parse_node_tree_t tree;
|
||||
wcstring src;
|
||||
};
|
||||
|
||||
class parser_t
|
||||
{
|
||||
friend class parse_execution_context_t;
|
||||
|
@ -327,22 +321,11 @@ private:
|
|||
parser_t(const parser_t&);
|
||||
parser_t& operator=(const parser_t&);
|
||||
|
||||
process_t *create_job_process(job_t *job, const parse_node_t &statement_node, const parser_context_t &ctx);
|
||||
process_t *create_boolean_process(job_t *job, const parse_node_t &bool_statement, const parser_context_t &ctx);
|
||||
process_t *create_for_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx);
|
||||
process_t *create_while_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx);
|
||||
process_t *create_begin_process(job_t *job, const parse_node_t &header, const parse_node_t &statement, const parser_context_t &ctx);
|
||||
process_t *create_plain_process(job_t *job, const parse_node_t &statement, const parser_context_t &ctx);
|
||||
|
||||
wcstring_list_t determine_arguments(const parse_node_t &statement, const parse_node_t **out_unmatched_wildcard_node, const parser_context_t &ctx);
|
||||
io_chain_t determine_io_chain(const parse_node_t &statement,const parser_context_t &ctx);
|
||||
|
||||
|
||||
void parse_job_argument_list(process_t *p, job_t *j, tokenizer_t *tok, std::vector<completion_t>&, bool);
|
||||
int parse_job(process_t *p, job_t *j, tokenizer_t *tok);
|
||||
void skipped_exec(job_t * j);
|
||||
void eval_job(tokenizer_t *tok);
|
||||
void eval_job(const parse_node_t &job_node, const parser_context_t &ctx);
|
||||
int parser_test_argument(const wchar_t *arg, wcstring *out, const wchar_t *prefix, int offset);
|
||||
void print_errors(wcstring &target, const wchar_t *prefix);
|
||||
void print_errors_stderr();
|
||||
|
@ -388,11 +371,7 @@ public:
|
|||
\return 0 on success, 1 otherwise
|
||||
*/
|
||||
int eval(const wcstring &cmdStr, const io_chain_t &io, enum block_type_t block_type);
|
||||
int eval2(const wcstring &cmd_str, const io_chain_t &io, enum block_type_t block_type);
|
||||
|
||||
void execute_job_list(node_offset_t idx, const parser_context_t &ctx);
|
||||
void execute_next(std::vector<node_offset_t> *execution_stack, const parser_context_t &ctx);
|
||||
|
||||
/**
|
||||
Evaluate line as a list of parameters, i.e. tokenize it and perform parameter expansion and cmdsubst execution on the tokens.
|
||||
The output is inserted into output.
|
||||
|
|
Loading…
Reference in a new issue