mirror of
https://github.com/nlohmann/json.git
synced 2024-11-28 00:59:02 +08:00
Merge pull request #2332 from nlohmann/issue2330
Fix lexer to properly cope with repeated comments
This commit is contained in:
commit
8a54617240
@ -1511,7 +1511,7 @@ scan_number_done:
|
||||
skip_whitespace();
|
||||
|
||||
// ignore comments
|
||||
if (ignore_comments && current == '/')
|
||||
while (ignore_comments && current == '/')
|
||||
{
|
||||
if (!scan_comment())
|
||||
{
|
||||
|
@ -7390,7 +7390,7 @@ scan_number_done:
|
||||
skip_whitespace();
|
||||
|
||||
// ignore comments
|
||||
if (ignore_comments && current == '/')
|
||||
while (ignore_comments && current == '/')
|
||||
{
|
||||
if (!scan_comment())
|
||||
{
|
||||
|
@ -241,5 +241,8 @@ TEST_CASE("lexer class")
|
||||
CHECK((scan_string("/* true */", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/*/**/", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/*/* */", true) == json::lexer::token_type::end_of_input));
|
||||
|
||||
CHECK((scan_string("//\n//\n", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/**//**//**/", true) == json::lexer::token_type::end_of_input));
|
||||
}
|
||||
}
|
||||
|
@ -478,4 +478,11 @@ TEST_CASE("regression tests 2")
|
||||
CHECK(jsonObj["aaaa"] == 11);
|
||||
CHECK(jsonObj["bbb"] == 222);
|
||||
}
|
||||
|
||||
SECTION("issue #2330 - ignore_comment=true fails on multiple consecutive lines starting with comments")
|
||||
{
|
||||
std::string ss = "//\n//\n{\n}\n";
|
||||
json j = json::parse(ss, nullptr, true, true);
|
||||
CHECK(j.dump() == "{}");
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user