Merge pull request #4810 from mqudsi/tokenizer_gettext

Restore localization to tokenizer error strings                                                                                                       Work around #4810 by retrieving localizations at runtime to avoid issues   possibly caused by inserting into the static unordered_map during static   initialization.                                                                                                                                       Closes #810.
This commit is contained in:
Mahmoud Al-Qudsi 2018-03-13 13:50:08 -05:00 committed by GitHub
commit ce4bc001f2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 10 additions and 3 deletions

View file

@ -671,7 +671,7 @@ void parse_ll_t::report_tokenizer_error(const tokenizer_t &tokenizer, const tok_
parse_error_code_t parse_error_code = tok.error->parser_error;
this->parse_error_at_location(tok.offset, tok.length, tok.offset + tok.error_offset,
parse_error_code, L"%ls",
tok.error->Message);
tok.error->Message());
}
void parse_ll_t::parse_error_unexpected_token(const wchar_t *expected, parse_token_t token) {

View file

@ -30,6 +30,10 @@ tokenizer_error *TOK_UNTERMINATED_BRACE = new tokenizer_error((L"Unexpected end
tokenizer_error *TOK_EXPECTED_PCLOSE_FOUND_BCLOSE = new tokenizer_error((L"Unexpected '}' found, expecting ')'"));
tokenizer_error *TOK_EXPECTED_BCLOSE_FOUND_PCLOSE = new tokenizer_error((L"Unexpected ')' found, expecting '}'"));
const wchar_t *tokenizer_error::Message() const {
return _(_message);
}
/// Return an error token and mark that we no longer have a next token.
tok_t tokenizer_t::call_error(tokenizer_error *error_type, const wchar_t *token_start,
const wchar_t *error_loc) {

View file

@ -24,10 +24,13 @@ enum token_type {
};
struct tokenizer_error {
const wchar_t *Message;
private:
const wchar_t *_message;
public:
const wchar_t *Message() const;
enum parse_error_code_t parser_error; //the parser error associated with this tokenizer error
tokenizer_error(const wchar_t *msg, enum parse_error_code_t perr = parse_error_tokenizer_other)
: Message(msg), parser_error(perr) {}
: _message(msg), parser_error(perr) {}
tokenizer_error(const tokenizer_error&) = delete;
};