mirror of
https://github.com/fish-shell/fish-shell
synced 2024-12-26 04:43:10 +00:00
Fix some unused variable warnings
This commit is contained in:
parent
6e56637cf0
commit
abcc9647da
6 changed files with 11 additions and 6 deletions
|
@ -779,6 +779,8 @@ static void handle_escape_delay_change(const wcstring &op, const wcstring &var_n
|
|||
}
|
||||
|
||||
static void handle_change_emoji_width(const wcstring &op, const wcstring &var_name) {
|
||||
(void)op;
|
||||
(void)var_name;
|
||||
int new_width = 0;
|
||||
if (auto width_str = env_get(L"fish_emoji_width")) {
|
||||
new_width = fish_wcstol(width_str->as_string().c_str());
|
||||
|
|
|
@ -898,6 +898,7 @@ static expand_error_t expand_stage_braces(const wcstring &input, std::vector<com
|
|||
static expand_error_t expand_stage_home(const wcstring &input,
|
||||
std::vector<completion_t> *out,
|
||||
expand_flags_t flags, parse_error_list_t *errors) {
|
||||
(void)errors;
|
||||
wcstring next = input;
|
||||
|
||||
if (!(EXPAND_SKIP_HOME_DIRECTORIES & flags)) {
|
||||
|
|
|
@ -270,6 +270,7 @@ int g_fish_emoji_width = 0;
|
|||
int g_guessed_fish_emoji_width = 1;
|
||||
|
||||
int fish_get_emoji_width(wchar_t c) {
|
||||
(void)c;
|
||||
// Respect an explicit value. If we don't have one, use the guessed value. Do not try to fall
|
||||
// back to wcwidth(), it's hopeless.
|
||||
if (g_fish_emoji_width > 0) return g_fish_emoji_width;
|
||||
|
|
|
@ -371,8 +371,7 @@ static bool input_mapping_is_match(const input_mapping_t &m) {
|
|||
debug(4, L"trying to match mapping %ls", escape_string(m.seq.c_str(), ESCAPE_ALL).c_str());
|
||||
|
||||
bool timed_first_char = iswcntrl(str[0]);
|
||||
// i must be signed because we reverse direction below
|
||||
for (ssize_t i = 0; i < str.size(); ++i) {
|
||||
for (size_t i = 0; i < str.size(); ++i) {
|
||||
// Treat all strings beginning with control codes (0x00-0x1F) as timed characters, meaning they are assumed to be
|
||||
// their literal representation if not followed up with another character within the defined timeout. Obviously
|
||||
// we never time out on the first character in the sequence.
|
||||
|
@ -383,7 +382,7 @@ static bool input_mapping_is_match(const input_mapping_t &m) {
|
|||
// We didn't match the bind sequence/input mapping, (it timed out or they entered something else)
|
||||
// Undo consumption of the read characters since we didn't match the bind sequence and abort.
|
||||
input_common_next_ch(read);
|
||||
while (--i >= 0) {
|
||||
while (i--) {
|
||||
input_common_next_ch(str[i]);
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -337,6 +337,7 @@ RESOLVE(arguments_or_redirections_list) {
|
|||
|
||||
RESOLVE(optional_newlines) {
|
||||
UNUSED(token2);
|
||||
UNUSED(out_tag);
|
||||
if (token1.is_newline) return production_for<newlines>();
|
||||
return production_for<empty>();
|
||||
}
|
||||
|
|
|
@ -461,7 +461,7 @@ class parse_ll_t {
|
|||
void accept_tokens(parse_token_t token1, parse_token_t token2);
|
||||
|
||||
/// Report tokenizer errors.
|
||||
void report_tokenizer_error(const tokenizer_t &tokenizer, const tok_t &tok);
|
||||
void report_tokenizer_error(const tok_t &tok);
|
||||
|
||||
/// Indicate if we hit a fatal error.
|
||||
bool has_fatal_error() const { return this->fatal_errored; }
|
||||
|
@ -599,6 +599,7 @@ void parse_ll_t::parse_error(parse_token_t token, parse_error_code_t code, const
|
|||
void parse_ll_t::parse_error_at_location(size_t source_start, size_t source_length,
|
||||
size_t error_location, parse_error_code_t code,
|
||||
const wchar_t *fmt, ...) {
|
||||
(void)error_location;
|
||||
this->fatal_errored = true;
|
||||
if (this->should_generate_error_messages) {
|
||||
// this->dump_stack();
|
||||
|
@ -667,7 +668,7 @@ void parse_ll_t::parse_error_failed_production(struct parse_stack_element_t &sta
|
|||
}
|
||||
}
|
||||
|
||||
void parse_ll_t::report_tokenizer_error(const tokenizer_t &tokenizer, const tok_t &tok) {
|
||||
void parse_ll_t::report_tokenizer_error(const tok_t &tok) {
|
||||
parse_error_code_t parse_error_code = tok.error->parser_error;
|
||||
this->parse_error_at_location(tok.offset, tok.length, tok.offset + tok.error_offset,
|
||||
parse_error_code, L"%ls",
|
||||
|
@ -1066,7 +1067,7 @@ bool parse_tree_from_string(const wcstring &str, parse_tree_flags_t parse_flags,
|
|||
// Handle tokenizer errors. This is a hack because really the parser should report this for
|
||||
// itself; but it has no way of getting the tokenizer message.
|
||||
if (queue[1].type == parse_special_type_tokenizer_error) {
|
||||
parser.report_tokenizer_error(tok, tokenizer_token);
|
||||
parser.report_tokenizer_error(tokenizer_token);
|
||||
}
|
||||
|
||||
if (!parser.has_fatal_error()) {
|
||||
|
|
Loading…
Reference in a new issue