Rearrange wopen and stat to avoid a race and hopefully improve performance a little bit.

This commit is contained in:
ridiculousfish 2012-03-02 11:12:08 -08:00
parent 17a75a5aa5
commit 2da8df6202
2 changed files with 95 additions and 95 deletions

View file

@ -2746,27 +2746,28 @@ static int builtin_source( parser_t &parser, wchar_t ** argv )
else
{
if( wstat(argv[1], &buf) == -1 )
if( ( fd = wopen_cloexec( argv[1], O_RDONLY ) ) == -1 )
{
append_format(stderr_buffer, _(L"%ls: Error encountered while sourcing file '%ls':\n"), argv[0], argv[1] );
builtin_wperror( L"." );
return STATUS_BUILTIN_ERROR;
}
if( fstat(fd, &buf) == -1 )
{
close(fd);
append_format(stderr_buffer, _(L"%ls: Error encountered while sourcing file '%ls':\n"), argv[0], argv[1] );
builtin_wperror( L"." );
return STATUS_BUILTIN_ERROR;
}
if( !S_ISREG(buf.st_mode) )
{
close(fd);
append_format(stderr_buffer, _( L"%ls: '%ls' is not a file\n" ), argv[0], argv[1] );
return STATUS_BUILTIN_ERROR;
}
if( ( fd = wopen_cloexec( argv[1], O_RDONLY ) ) == -1 )
{
append_format(stderr_buffer, _(L"%ls: Error encountered while sourcing file '%ls':\n"), argv[0], argv[1] );
builtin_wperror( L"." );
return STATUS_BUILTIN_ERROR;
}
fn = wrealpath( argv[1], 0 );
if( !fn )

View file

@ -2307,8 +2307,7 @@ void parser_t::eval_job( tokenizer *tok )
if( job_start_pos < tok_get_pos( tok ) )
{
int stop_pos = tok_get_pos( tok );
const wchar_t *newline = wcschr( tok_string(tok)+start_pos,
L'\n' );
const wchar_t *newline = wcschr(tok_string(tok)+start_pos, L'\n');
if( newline )
stop_pos = mini( stop_pos, newline - tok_string(tok) );