define the var when the read limit is exceeded

This problem was found when testing the change for issue #4163 to make
dereferencing an undefined variable an error.

Fixes #4180
This commit is contained in:
Kurtis Rader 2017-07-03 16:14:28 -07:00
parent d51f82d1b5
commit d383e3b92c
2 changed files with 13 additions and 6 deletions

View file

@ -396,6 +396,9 @@ int builtin_read(parser_t &parser, io_streams_t &streams, wchar_t **argv) {
}
if (optind == argc || exit_res != STATUS_CMD_OK) {
// Define the var without any data. We do this because when this happens we want the user to
// be able to use the var but have it expand to nothing.
env_set(argv[optind], NULL, opts.place);
return exit_res;
}

View file

@ -141,9 +141,12 @@ yes $line | dd bs=1024 count=(math "1 + $FISH_READ_BYTE_LIMIT / 1024") ^/dev/nul
if test $status -ne 122
echo reading too much data did not terminate with failure status
end
if test (string length "$x") -ne 0
echo reading too much data resulted in a var with unexpected data
end
# The read var should be defined but not have any elements when the read
# aborts due to too much data.
set -q x
or echo reading too much data did not define the var
set -q x[1]
and echo reading too much data resulted in a var with unexpected data
# Ensure the `read` command terminates if asked to read too much data even if
# given an explicit limit. The var should be empty. We throw away any data we
@ -152,9 +155,10 @@ yes $line | read --null --nchars=(math "$FISH_READ_BYTE_LIMIT + 1") x
if test $status -ne 122
echo reading too much data did not terminate with failure status
end
if test (string length "$x") -ne 0
echo reading too much data resulted in a var with unexpected data
end
set -q x
or echo reading too much data with --nchars did not define the var
set -q x[1]
and echo reading too much data with --nchars resulted in a var with unexpected data
# Now do the opposite of the previous test and confirm we can read reasonable
# amounts of data.