2018-01-10 17:18:31 +08:00
|
|
|
#pragma once
|
2017-08-14 23:02:40 +08:00
|
|
|
|
2017-08-15 01:28:01 +08:00
|
|
|
#include <cassert> // assert
|
|
|
|
#include <cmath> // isfinite
|
|
|
|
#include <cstdint> // uint8_t
|
|
|
|
#include <functional> // function
|
|
|
|
#include <string> // string
|
|
|
|
#include <utility> // move
|
2019-03-15 21:55:13 +08:00
|
|
|
#include <vector> // vector
|
2017-08-14 23:02:40 +08:00
|
|
|
|
2018-01-29 18:21:11 +08:00
|
|
|
#include <nlohmann/detail/exceptions.hpp>
|
|
|
|
#include <nlohmann/detail/input/input_adapters.hpp>
|
2018-02-27 03:08:12 +08:00
|
|
|
#include <nlohmann/detail/input/json_sax.hpp>
|
2018-01-29 18:21:11 +08:00
|
|
|
#include <nlohmann/detail/input/lexer.hpp>
|
2019-03-17 19:01:49 +08:00
|
|
|
#include <nlohmann/detail/macro_scope.hpp>
|
|
|
|
#include <nlohmann/detail/meta/is_sax.hpp>
|
2018-01-29 18:21:11 +08:00
|
|
|
#include <nlohmann/detail/value_t.hpp>
|
2017-08-14 23:02:40 +08:00
|
|
|
|
|
|
|
namespace nlohmann
|
|
|
|
{
|
|
|
|
namespace detail
|
|
|
|
{
|
|
|
|
////////////
|
|
|
|
// parser //
|
|
|
|
////////////
|
|
|
|
|
|
|
|
/*!
|
|
|
|
@brief syntax analysis
|
|
|
|
|
|
|
|
This class implements a recursive decent parser.
|
|
|
|
*/
|
|
|
|
template<typename BasicJsonType>
|
|
|
|
class parser
|
|
|
|
{
|
|
|
|
using number_integer_t = typename BasicJsonType::number_integer_t;
|
|
|
|
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
|
|
|
|
using number_float_t = typename BasicJsonType::number_float_t;
|
2018-03-13 02:15:11 +08:00
|
|
|
using string_t = typename BasicJsonType::string_t;
|
2017-08-14 23:02:40 +08:00
|
|
|
using lexer_t = lexer<BasicJsonType>;
|
|
|
|
using token_type = typename lexer_t::token_type;
|
|
|
|
|
|
|
|
public:
|
|
|
|
enum class parse_event_t : uint8_t
|
|
|
|
{
|
|
|
|
/// the parser read `{` and started to process a JSON object
|
|
|
|
object_start,
|
|
|
|
/// the parser read `}` and finished processing a JSON object
|
|
|
|
object_end,
|
|
|
|
/// the parser read `[` and started to process a JSON array
|
|
|
|
array_start,
|
|
|
|
/// the parser read `]` and finished processing a JSON array
|
|
|
|
array_end,
|
|
|
|
/// the parser read a key of a value in an object
|
|
|
|
key,
|
|
|
|
/// the parser finished reading a JSON value
|
|
|
|
value
|
|
|
|
};
|
|
|
|
|
|
|
|
using parser_callback_t =
|
|
|
|
std::function<bool(int depth, parse_event_t event, BasicJsonType& parsed)>;
|
|
|
|
|
|
|
|
/// a parser reading from an input adapter
|
2018-03-21 05:39:08 +08:00
|
|
|
explicit parser(detail::input_adapter_t&& adapter,
|
2017-08-14 23:02:40 +08:00
|
|
|
const parser_callback_t cb = nullptr,
|
|
|
|
const bool allow_exceptions_ = true)
|
2018-03-21 05:39:08 +08:00
|
|
|
: callback(cb), m_lexer(std::move(adapter)), allow_exceptions(allow_exceptions_)
|
2018-03-07 01:17:07 +08:00
|
|
|
{
|
|
|
|
// read first token
|
|
|
|
get_token();
|
|
|
|
}
|
2018-02-25 01:04:07 +08:00
|
|
|
|
2017-08-14 23:02:40 +08:00
|
|
|
/*!
|
|
|
|
@brief public parser interface
|
|
|
|
|
|
|
|
@param[in] strict whether to expect the last token to be EOF
|
|
|
|
@param[in,out] result parsed JSON value
|
|
|
|
|
|
|
|
@throw parse_error.101 in case of an unexpected token
|
|
|
|
@throw parse_error.102 if to_unicode fails or surrogate error
|
|
|
|
@throw parse_error.103 if to_unicode fails
|
|
|
|
*/
|
|
|
|
void parse(const bool strict, BasicJsonType& result)
|
|
|
|
{
|
2018-03-07 01:17:07 +08:00
|
|
|
if (callback)
|
|
|
|
{
|
2018-03-18 22:13:53 +08:00
|
|
|
json_sax_dom_callback_parser<BasicJsonType> sdp(result, callback, allow_exceptions);
|
|
|
|
sax_parse_internal(&sdp);
|
|
|
|
result.assert_invariant();
|
|
|
|
|
|
|
|
// in strict mode, input must be completely read
|
|
|
|
if (strict and (get_token() != token_type::end_of_input))
|
|
|
|
{
|
|
|
|
sdp.parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::end_of_input, "value")));
|
2018-03-18 22:13:53 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// in case of an error, return discarded value
|
|
|
|
if (sdp.is_errored())
|
|
|
|
{
|
|
|
|
result = value_t::discarded;
|
|
|
|
return;
|
|
|
|
}
|
2017-08-14 23:02:40 +08:00
|
|
|
|
2018-03-07 01:17:07 +08:00
|
|
|
// set top-level value to null if it was discarded by the callback
|
|
|
|
// function
|
|
|
|
if (result.is_discarded())
|
|
|
|
{
|
|
|
|
result = nullptr;
|
|
|
|
}
|
2017-08-14 23:02:40 +08:00
|
|
|
}
|
2018-03-07 01:17:07 +08:00
|
|
|
else
|
2017-08-14 23:02:40 +08:00
|
|
|
{
|
2018-03-07 01:17:07 +08:00
|
|
|
json_sax_dom_parser<BasicJsonType> sdp(result, allow_exceptions);
|
|
|
|
sax_parse_internal(&sdp);
|
|
|
|
result.assert_invariant();
|
|
|
|
|
|
|
|
// in strict mode, input must be completely read
|
|
|
|
if (strict and (get_token() != token_type::end_of_input))
|
|
|
|
{
|
|
|
|
sdp.parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::end_of_input, "value")));
|
2018-03-07 01:17:07 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// in case of an error, return discarded value
|
|
|
|
if (sdp.is_errored())
|
|
|
|
{
|
|
|
|
result = value_t::discarded;
|
|
|
|
return;
|
|
|
|
}
|
2017-08-14 23:02:40 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*!
|
|
|
|
@brief public accept interface
|
|
|
|
|
|
|
|
@param[in] strict whether to expect the last token to be EOF
|
|
|
|
@return whether the input is a proper JSON text
|
|
|
|
*/
|
|
|
|
bool accept(const bool strict = true)
|
|
|
|
{
|
2018-03-08 05:40:48 +08:00
|
|
|
json_sax_acceptor<BasicJsonType> sax_acceptor;
|
2018-03-21 05:39:08 +08:00
|
|
|
return sax_parse(&sax_acceptor, strict);
|
|
|
|
}
|
|
|
|
|
2018-07-02 16:14:37 +08:00
|
|
|
template <typename SAX>
|
|
|
|
bool sax_parse(SAX* sax, const bool strict = true)
|
2018-03-21 05:39:08 +08:00
|
|
|
{
|
2018-08-17 00:20:30 +08:00
|
|
|
(void)detail::is_sax_static_asserts<SAX, BasicJsonType> {};
|
2018-03-21 05:39:08 +08:00
|
|
|
const bool result = sax_parse_internal(sax);
|
2018-03-08 05:40:48 +08:00
|
|
|
|
2018-03-21 05:39:08 +08:00
|
|
|
// strict mode: next byte must be EOF
|
|
|
|
if (result and strict and (get_token() != token_type::end_of_input))
|
2017-08-14 23:02:40 +08:00
|
|
|
{
|
2018-03-21 05:39:08 +08:00
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::end_of_input, "value")));
|
2017-08-14 23:02:40 +08:00
|
|
|
}
|
|
|
|
|
2018-03-21 05:39:08 +08:00
|
|
|
return result;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2017-08-14 23:02:40 +08:00
|
|
|
private:
|
2018-07-02 16:14:37 +08:00
|
|
|
template <typename SAX>
|
|
|
|
bool sax_parse_internal(SAX* sax)
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-10-28 21:20:20 +08:00
|
|
|
// stack to remember the hierarchy of structured values we are parsing
|
2018-03-30 00:45:43 +08:00
|
|
|
// true = array; false = object
|
|
|
|
std::vector<bool> states;
|
2018-03-17 21:46:50 +08:00
|
|
|
// value to avoid a goto (see comment where set to true)
|
|
|
|
bool skip_to_state_evaluation = false;
|
|
|
|
|
|
|
|
while (true)
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
if (not skip_to_state_evaluation)
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
// invariant: get_token() was called before each iteration
|
|
|
|
switch (last_token)
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::begin_object:
|
|
|
|
{
|
2018-08-17 00:20:30 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
2018-02-25 01:04:07 +08:00
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
// closing } -> we are done
|
2018-03-30 06:38:18 +08:00
|
|
|
if (get_token() == token_type::end_object)
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->end_object()))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2018-02-25 01:04:07 +08:00
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
// parse key
|
|
|
|
if (JSON_UNLIKELY(last_token != token_type::value_string))
|
|
|
|
{
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::value_string, "object key")));
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
2018-10-08 01:07:58 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2018-10-08 01:07:58 +08:00
|
|
|
return false;
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// parse separator (:)
|
2018-03-30 06:38:18 +08:00
|
|
|
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::name_separator, "object separator")));
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// remember we are now inside an object
|
2018-03-30 00:45:43 +08:00
|
|
|
states.push_back(false);
|
2018-03-17 21:46:50 +08:00
|
|
|
|
|
|
|
// parse values
|
|
|
|
get_token();
|
|
|
|
continue;
|
2018-02-26 00:10:30 +08:00
|
|
|
}
|
2018-03-17 21:46:50 +08:00
|
|
|
|
|
|
|
case token_type::begin_array:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-08-17 00:20:30 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
2018-03-17 21:46:50 +08:00
|
|
|
|
|
|
|
// closing ] -> we are done
|
2018-03-30 06:38:18 +08:00
|
|
|
if (get_token() == token_type::end_array)
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->end_array()))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// remember we are now inside an array
|
2018-03-30 00:45:43 +08:00
|
|
|
states.push_back(true);
|
2018-03-17 21:46:50 +08:00
|
|
|
|
|
|
|
// parse values (no need to call get_token)
|
|
|
|
continue;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::value_float:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
const auto res = m_lexer.get_number_float();
|
|
|
|
|
|
|
|
if (JSON_UNLIKELY(not std::isfinite(res)))
|
|
|
|
{
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
|
|
|
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2019-03-19 16:17:14 +08:00
|
|
|
return false;
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::literal_false:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->boolean(false)))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::literal_null:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->null()))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::literal_true:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->boolean(true)))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
2018-03-12 05:47:25 +08:00
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::value_integer:
|
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->number_integer(m_lexer.get_number_integer())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::value_string:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-22 03:12:06 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->string(m_lexer.get_string())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::value_unsigned:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-18 23:07:21 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->number_unsigned(m_lexer.get_number_unsigned())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
break;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
case token_type::parse_error:
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
// using "uninitialized" to avoid "expected" message
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::uninitialized, "value")));
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
2018-03-17 21:46:50 +08:00
|
|
|
|
|
|
|
default: // the last token was unexpected
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-02-26 00:10:30 +08:00
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
2018-03-05 23:46:35 +08:00
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::literal_or_value, "value")));
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-03-17 21:46:50 +08:00
|
|
|
else
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-03-17 21:46:50 +08:00
|
|
|
skip_to_state_evaluation = false;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
|
2018-03-17 21:46:50 +08:00
|
|
|
// we reached this line after we successfully parsed a value
|
|
|
|
if (states.empty())
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2018-10-28 21:20:20 +08:00
|
|
|
// empty stack: we reached the end of the hierarchy: done
|
2018-03-17 21:46:50 +08:00
|
|
|
return true;
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
if (states.back()) // array
|
2018-02-25 01:04:07 +08:00
|
|
|
{
|
2019-03-19 16:17:14 +08:00
|
|
|
// comma -> next value
|
|
|
|
if (get_token() == token_type::value_separator)
|
|
|
|
{
|
|
|
|
// parse a new value
|
|
|
|
get_token();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// closing ]
|
|
|
|
if (JSON_LIKELY(last_token == token_type::end_array))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2019-03-19 16:17:14 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->end_array()))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2019-03-19 16:17:14 +08:00
|
|
|
return false;
|
2018-03-30 00:45:43 +08:00
|
|
|
}
|
|
|
|
|
2019-03-19 16:17:14 +08:00
|
|
|
// We are done with this array. Before we can parse a
|
|
|
|
// new value, we need to evaluate the new state first.
|
|
|
|
// By setting skip_to_state_evaluation to false, we
|
|
|
|
// are effectively jumping to the beginning of this if.
|
|
|
|
assert(not states.empty());
|
|
|
|
states.pop_back();
|
|
|
|
skip_to_state_evaluation = true;
|
|
|
|
continue;
|
|
|
|
}
|
2018-02-25 01:04:07 +08:00
|
|
|
|
2019-03-19 16:17:14 +08:00
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::end_array, "array")));
|
|
|
|
}
|
|
|
|
else // object
|
|
|
|
{
|
|
|
|
// comma -> next value
|
|
|
|
if (get_token() == token_type::value_separator)
|
|
|
|
{
|
|
|
|
// parse key
|
|
|
|
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
2018-03-30 00:45:43 +08:00
|
|
|
{
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
2019-03-19 16:17:14 +08:00
|
|
|
exception_message(token_type::value_string, "object key")));
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
2018-02-25 01:04:07 +08:00
|
|
|
|
2019-03-19 16:17:14 +08:00
|
|
|
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
2018-03-17 21:46:50 +08:00
|
|
|
{
|
2019-03-19 16:17:14 +08:00
|
|
|
return false;
|
2018-03-30 00:45:43 +08:00
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
// parse separator (:)
|
|
|
|
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
2018-03-30 00:45:43 +08:00
|
|
|
{
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
2018-10-08 04:39:17 +08:00
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
2019-03-19 16:17:14 +08:00
|
|
|
exception_message(token_type::name_separator, "object separator")));
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
// parse values
|
|
|
|
get_token();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// closing }
|
|
|
|
if (JSON_LIKELY(last_token == token_type::end_object))
|
|
|
|
{
|
|
|
|
if (JSON_UNLIKELY(not sax->end_object()))
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We are done with this object. Before we can parse a
|
|
|
|
// new value, we need to evaluate the new state first.
|
|
|
|
// By setting skip_to_state_evaluation to false, we
|
|
|
|
// are effectively jumping to the beginning of this if.
|
|
|
|
assert(not states.empty());
|
|
|
|
states.pop_back();
|
|
|
|
skip_to_state_evaluation = true;
|
|
|
|
continue;
|
2018-03-17 21:46:50 +08:00
|
|
|
}
|
2019-03-19 16:17:14 +08:00
|
|
|
|
|
|
|
return sax->parse_error(m_lexer.get_position(),
|
|
|
|
m_lexer.get_token_string(),
|
|
|
|
parse_error::create(101, m_lexer.get_position(),
|
|
|
|
exception_message(token_type::end_object, "object")));
|
2018-02-25 01:04:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-14 23:02:40 +08:00
|
|
|
/// get next token from lexer
|
|
|
|
token_type get_token()
|
|
|
|
{
|
2019-03-18 20:53:48 +08:00
|
|
|
return last_token = m_lexer.scan();
|
2017-08-14 23:02:40 +08:00
|
|
|
}
|
|
|
|
|
2018-10-08 04:39:17 +08:00
|
|
|
std::string exception_message(const token_type expected, const std::string& context)
|
2017-08-14 23:02:40 +08:00
|
|
|
{
|
2018-10-08 04:39:17 +08:00
|
|
|
std::string error_msg = "syntax error ";
|
|
|
|
|
|
|
|
if (not context.empty())
|
|
|
|
{
|
|
|
|
error_msg += "while parsing " + context + " ";
|
|
|
|
}
|
|
|
|
|
|
|
|
error_msg += "- ";
|
|
|
|
|
2017-08-14 23:02:40 +08:00
|
|
|
if (last_token == token_type::parse_error)
|
|
|
|
{
|
|
|
|
error_msg += std::string(m_lexer.get_error_message()) + "; last read: '" +
|
|
|
|
m_lexer.get_token_string() + "'";
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
error_msg += "unexpected " + std::string(lexer_t::token_type_name(last_token));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expected != token_type::uninitialized)
|
|
|
|
{
|
|
|
|
error_msg += "; expected " + std::string(lexer_t::token_type_name(expected));
|
|
|
|
}
|
|
|
|
|
2018-03-05 23:46:35 +08:00
|
|
|
return error_msg;
|
2017-08-14 23:02:40 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
/// callback function
|
|
|
|
const parser_callback_t callback = nullptr;
|
|
|
|
/// the type of the last read token
|
|
|
|
token_type last_token = token_type::uninitialized;
|
|
|
|
/// the lexer
|
|
|
|
lexer_t m_lexer;
|
|
|
|
/// whether to throw exceptions in case of errors
|
|
|
|
const bool allow_exceptions = true;
|
|
|
|
};
|
2018-10-08 00:39:18 +08:00
|
|
|
} // namespace detail
|
|
|
|
} // namespace nlohmann
|