1
0
mirror of https://github.com/nlohmann/json synced 2024-11-30 15:40:07 +00:00

avoid recursion in the scanner

This commit is contained in:
Niels 2016-07-19 22:10:27 +02:00
parent b64367e2f7
commit 2478be807c
2 changed files with 705 additions and 678 deletions

View File

@ -7392,6 +7392,8 @@ class basic_json
@return the class of the next token read from the buffer
*/
token_type scan() noexcept
{
while (true)
{
// pointer for backtracking information
m_marker = nullptr;
@ -7561,13 +7563,15 @@ class basic_json
basic_json_parser_2:
++m_cursor;
{
return token_type::end_of_input;
last_token_type = token_type::end_of_input;
break;
}
basic_json_parser_4:
++m_cursor;
basic_json_parser_5:
{
return token_type::parse_error;
last_token_type = token_type::parse_error;
break;
}
basic_json_parser_6:
++m_cursor;
@ -7581,7 +7585,7 @@ basic_json_parser_6:
goto basic_json_parser_6;
}
{
return scan();
continue;
}
basic_json_parser_9:
yyaccept = 0;
@ -7594,7 +7598,8 @@ basic_json_parser_9:
basic_json_parser_10:
++m_cursor;
{
return token_type::value_separator;
last_token_type = token_type::value_separator;
break;
}
basic_json_parser_12:
yych = *++m_cursor;
@ -7634,7 +7639,8 @@ basic_json_parser_13:
}
basic_json_parser_14:
{
return token_type::value_number;
last_token_type = token_type::value_number;
break;
}
basic_json_parser_15:
yyaccept = 1;
@ -7671,17 +7677,20 @@ basic_json_parser_15:
basic_json_parser_17:
++m_cursor;
{
return token_type::name_separator;
last_token_type = token_type::name_separator;
break;
}
basic_json_parser_19:
++m_cursor;
{
return token_type::begin_array;
last_token_type = token_type::begin_array;
break;
}
basic_json_parser_21:
++m_cursor;
{
return token_type::end_array;
last_token_type = token_type::end_array;
break;
}
basic_json_parser_23:
yyaccept = 0;
@ -7710,12 +7719,14 @@ basic_json_parser_25:
basic_json_parser_26:
++m_cursor;
{
return token_type::begin_object;
last_token_type = token_type::begin_object;
break;
}
basic_json_parser_28:
++m_cursor;
{
return token_type::end_object;
last_token_type = token_type::end_object;
break;
}
basic_json_parser_30:
yyaccept = 0;
@ -7759,7 +7770,8 @@ basic_json_parser_33:
basic_json_parser_34:
++m_cursor;
{
return token_type::value_string;
last_token_type = token_type::value_string;
break;
}
basic_json_parser_36:
++m_cursor;
@ -8022,7 +8034,7 @@ basic_json_parser_51:
basic_json_parser_52:
++m_cursor;
{
return scan();
continue;
}
basic_json_parser_54:
++m_cursor;
@ -8069,12 +8081,14 @@ basic_json_parser_55:
basic_json_parser_56:
++m_cursor;
{
return token_type::literal_null;
last_token_type = token_type::literal_null;
break;
}
basic_json_parser_58:
++m_cursor;
{
return token_type::literal_true;
last_token_type = token_type::literal_true;
break;
}
basic_json_parser_60:
++m_cursor;
@ -8114,7 +8128,8 @@ basic_json_parser_60:
basic_json_parser_61:
++m_cursor;
{
return token_type::literal_false;
last_token_type = token_type::literal_false;
break;
}
basic_json_parser_63:
++m_cursor;
@ -8155,6 +8170,9 @@ basic_json_parser_63:
}
return last_token_type;
}
/// append data from the stream to the internal buffer
void yyfill() noexcept
{
@ -8500,6 +8518,8 @@ basic_json_parser_63:
const lexer_char_t* m_cursor = nullptr;
/// pointer to the end of the buffer
const lexer_char_t* m_limit = nullptr;
/// the last token type
token_type last_token_type = token_type::end_of_input;
};
/*!

View File

@ -7392,6 +7392,8 @@ class basic_json
@return the class of the next token read from the buffer
*/
token_type scan() noexcept
{
while (true)
{
// pointer for backtracking information
m_marker = nullptr;
@ -7413,24 +7415,24 @@ class basic_json
// ignore whitespace
ws = [ \t\n\r]+;
ws { return scan(); }
ws { continue; }
// ignore byte-order-mark
bom = "\xEF\xBB\xBF";
bom { return scan(); }
bom { continue; }
// structural characters
"[" { return token_type::begin_array; }
"]" { return token_type::end_array; }
"{" { return token_type::begin_object; }
"}" { return token_type::end_object; }
"," { return token_type::value_separator; }
":" { return token_type::name_separator; }
"[" { last_token_type = token_type::begin_array; break; }
"]" { last_token_type = token_type::end_array; break; }
"{" { last_token_type = token_type::begin_object; break; }
"}" { last_token_type = token_type::end_object; break; }
"," { last_token_type = token_type::value_separator; break; }
":" { last_token_type = token_type::name_separator; break; }
// literal names
"null" { return token_type::literal_null; }
"true" { return token_type::literal_true; }
"false" { return token_type::literal_false; }
"null" { last_token_type = token_type::literal_null; break; }
"true" { last_token_type = token_type::literal_true; break; }
"false" { last_token_type = token_type::literal_false; break; }
// number
decimal_point = [.];
@ -7444,7 +7446,7 @@ class basic_json
frac = decimal_point digit+;
int = (zero|digit_1_9 digit*);
number = minus? int frac? exp?;
number { return token_type::value_number; }
number { last_token_type = token_type::value_number; break; }
// string
quotation_mark = ["];
@ -7455,16 +7457,19 @@ class basic_json
escaped = escape (single_escaped | unicode_escaped);
char = unescaped | escaped;
string = quotation_mark char* quotation_mark;
string { return token_type::value_string; }
string { last_token_type = token_type::value_string; break; }
// end of file
'\000' { return token_type::end_of_input; }
'\000' { last_token_type = token_type::end_of_input; break; }
// anything else is an error
. { return token_type::parse_error; }
. { last_token_type = token_type::parse_error; break; }
*/
}
return last_token_type;
}
/// append data from the stream to the internal buffer
void yyfill() noexcept
{
@ -7810,6 +7815,8 @@ class basic_json
const lexer_char_t* m_cursor = nullptr;
/// pointer to the end of the buffer
const lexer_char_t* m_limit = nullptr;
/// the last token type
token_type last_token_type = token_type::end_of_input;
};
/*!