112 #define JSON_NUM_TERMINALS 13
113 #define JSON_NUM_NONTERMINALS 5
114 #define JSON_NT_OFFSET JSON_NT_JSON
116 #define OFS(NT) (NT) - JSON_NT_OFFSET
118 #define IS_SEM(x) ((x) & 0x40)
119 #define IS_NT(x) ((x) & 0x20)
178 #define TD_ENTRY(PROD) { sizeof(PROD) - 1, (PROD) }
215 bool *num_err,
size_t *total_len);
226 NULL, NULL, NULL, NULL, NULL,
227 NULL, NULL, NULL, NULL, NULL
259 #define JSON_ALPHANUMERIC_CHAR(c) \
260 (((c) >= 'a' && (c) <= 'z') || \
261 ((c) >= 'A' && (c) <= 'Z') || \
262 ((c) >= '0' && (c) <= '9') || \
306 return (!numeric_error) && (total_len == dummy_lex.
input_length);
361 #define JS_STACK_CHUNK_SIZE 64
362 #define JS_MAX_PROD_LEN 10
363 #define JSON_TD_MAX_STACK 6400
524 #ifdef FORCE_JSON_PSTACK
796 if ((ostart != NULL || oend != NULL) && lex->
strval != NULL)
1069 if ((ostart != NULL || oend != NULL) && lex->
strval != NULL)
1084 result = (*ostart) (
sem->
semstate, fname, isnull);
1336 bool tok_done =
false;
1340 if (ptok->
data[0] ==
'"')
1348 for (
int i = ptok->
len - 1;
i > 0;
i--)
1351 if (ptok->
data[
i] ==
'\\')
1363 if (
c ==
'"' && escapes % 2 == 0)
1377 char c = ptok->
data[0];
1379 if (
c ==
'-' || (
c >=
'0' &&
c <=
'9'))
1383 bool numend =
false;
1461 lex->
input += added;
1472 partial_result =
json_lex(&dummy_lex);
1506 return partial_result;
1511 while (s < end && (*s ==
' ' || *s ==
'\t' || *s ==
'\n' || *s ==
'\r'))
1639 if (memcmp(s,
"true", 4) == 0)
1641 else if (memcmp(s,
"null", 4) == 0)
1646 else if (p - s == 5 && memcmp(s,
"false", 5) == 0)
1676 int hi_surrogate = -1;
1679 #define FAIL_OR_INCOMPLETE_AT_CHAR_START(code) \
1681 if (lex->incremental && !lex->inc_state->is_last_chunk) \
1683 appendBinaryStringInfo(&lex->inc_state->partial_token, \
1684 lex->token_start, end - lex->token_start); \
1685 return JSON_INCOMPLETE; \
1687 lex->token_terminator = s; \
1690 #define FAIL_AT_CHAR_END(code) \
1692 const char *term = s + pg_encoding_mblen(lex->input_encoding, s); \
1693 lex->token_terminator = (term <= end) ? term : end; \
1710 else if (*s ==
'\\')
1721 for (
i = 1;
i <= 4;
i++)
1726 else if (*s >=
'0' && *s <=
'9')
1727 ch = (ch * 16) + (*s -
'0');
1728 else if (*s >=
'a' && *s <=
'f')
1729 ch = (ch * 16) + (*s -
'a') + 10;
1730 else if (*s >=
'A' && *s <=
'F')
1731 ch = (ch * 16) + (*s -
'A') + 10;
1742 if (hi_surrogate != -1)
1749 if (hi_surrogate == -1)
1755 if (hi_surrogate != -1)
1794 else if (ch <= 0x007f)
1804 else if (lex->
strval != NULL)
1806 if (hi_surrogate != -1)
1842 else if (strchr(
"\"\\/bfnrt", *s) == NULL)
1859 if (hi_surrogate != -1)
1866 while (p < end -
sizeof(
Vector8) &&
1872 for (; p < end; p++)
1874 if (*p ==
'\\' || *p ==
'"')
1876 else if ((
unsigned char) *p <= 31)
1899 if (hi_surrogate != -1)
1910 #undef FAIL_OR_INCOMPLETE_AT_CHAR_START
1911 #undef FAIL_AT_CHAR_END
1944 bool *num_err,
size_t *total_len)
1953 if (len < lex->input_length && *s ==
'0')
1958 else if (len < lex->input_length && *s >=
'1' && *s <=
'9')
1964 }
while (len < lex->input_length && *s >=
'0' && *s <=
'9');
1970 if (len < lex->input_length && *s ==
'.')
1982 }
while (len < lex->input_length && *s >=
'0' && *s <=
'9');
1987 if (len < lex->input_length && (*s ==
'e' || *s ==
'E'))
1991 if (len < lex->input_length && (*s ==
'+' || *s ==
'-'))
2004 }
while (len < lex->input_length && *s >=
'0' && *s <=
'9');
2016 if (total_len != NULL)
2024 if (num_err != NULL)
2029 else if (num_err != NULL)
2108 #define json_token_error(lex, format) \
2109 appendStringInfo((lex)->errormsg, _(format), \
2110 (int) ((lex)->token_terminator - (lex)->token_start), \
2111 (lex)->token_start);
2121 return _(
"Recursive descent parser cannot use incremental lexer.");
2123 return _(
"Incremental parser requires incremental lexer.");
2125 return (
_(
"JSON nested too deep, maximum permitted depth is 6400."));
2131 _(
"Character with value 0x%02x must be escaped."),
2138 json_token_error(lex,
"Expected array element or \"]\", but found \"%.*s\".");
2150 return _(
"The input string ended unexpectedly.");
2164 return _(
"\\u0000 cannot be converted to text.");
2166 return _(
"\"\\u\" must be followed by four hexadecimal digits.");
2169 return _(
"Unicode escape values cannot be used for code point values above 007F when the encoding is not UTF8.");
2178 return psprintf(
_(
"Unicode escape value could not be translated to the server's encoding %s."),
2185 return _(
"Unicode high surrogate must not follow a high surrogate.");
2187 return _(
"Unicode low surrogate must follow a high surrogate.");
2192 #undef json_token_error
2201 "unexpected json parse error type: %d",
#define Assert(condition)
JsonParseErrorType pg_parse_json_incremental(JsonLexContext *lex, const JsonSemAction *sem, const char *json, size_t len, bool is_last)
#define JSON_TD_MAX_STACK
@ JSON_PARSE_OBJECT_LABEL
@ JSON_PARSE_OBJECT_START
@ JSON_PARSE_OBJECT_COMMA
static void set_fnull(JsonLexContext *lex, bool fnull)
#define JSON_NUM_TERMINALS
static char JSON_PROD_MORE_KEY_PAIRS[]
bool IsValidJsonNumber(const char *str, size_t len)
static JsonParseErrorType json_lex_string(JsonLexContext *lex)
static void inc_lex_level(JsonLexContext *lex)
#define JSON_ALPHANUMERIC_CHAR(c)
static char JSON_PROD_KEY_PAIRS[]
#define JSON_NUM_NONTERMINALS
static char JSON_PROD_SCALAR_STRING[]
JsonParseErrorType pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
static char JSON_PROD_ARRAY_ELEMENTS[]
static bool have_prediction(JsonParserStack *pstack)
static void set_fname(JsonLexContext *lex, char *fname)
static char JSON_PROD_SCALAR_NUMBER[]
#define json_token_error(lex, format)
static char next_prediction(JsonParserStack *pstack)
static void push_prediction(JsonParserStack *pstack, td_entry entry)
static char JSON_PROD_GOAL[]
static JsonTokenType lex_peek(JsonLexContext *lex)
static char JSON_PROD_EPSILON[]
char * json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
static JsonParseErrorType parse_object(JsonLexContext *lex, const JsonSemAction *sem)
#define JS_STACK_CHUNK_SIZE
static char JSON_PROD_SCALAR_NULL[]
static JsonParseErrorType report_parse_error(JsonParseContext ctx, JsonLexContext *lex)
static JsonParseErrorType lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token)
static JsonParseErrorType json_lex_number(JsonLexContext *lex, const char *s, bool *num_err, size_t *total_len)
static char JSON_PROD_MORE_ARRAY_ELEMENTS[]
const JsonSemAction nullSemAction
static td_entry td_parser_table[JSON_NUM_NONTERMINALS][JSON_NUM_TERMINALS]
static JsonParseErrorType parse_scalar(JsonLexContext *lex, const JsonSemAction *sem)
static char * get_fname(JsonLexContext *lex)
static char pop_prediction(JsonParserStack *pstack)
static JsonParseErrorType parse_object_field(JsonLexContext *lex, const JsonSemAction *sem)
static char JSON_PROD_SCALAR_FALSE[]
static bool get_fnull(JsonLexContext *lex)
JsonParseErrorType json_lex(JsonLexContext *lex)
static char JSON_PROD_OBJECT[]
JsonLexContext * makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json, size_t len, int encoding, bool need_escapes)
JsonParseErrorType json_count_array_elements(JsonLexContext *lex, int *elements)
static JsonParseErrorType parse_array(JsonLexContext *lex, const JsonSemAction *sem)
static JsonParseErrorType parse_array_element(JsonLexContext *lex, const JsonSemAction *sem)
void freeJsonLexContext(JsonLexContext *lex)
@ JSON_NT_MORE_ARRAY_ELEMENTS
static char JSON_PROD_ARRAY[]
#define FAIL_OR_INCOMPLETE_AT_CHAR_START(code)
JsonLexContext * makeJsonLexContextIncremental(JsonLexContext *lex, int encoding, bool need_escapes)
static char JSON_PROD_SCALAR_TRUE[]
#define FAIL_AT_CHAR_END(code)
static void dec_lex_level(JsonLexContext *lex)
JsonParseErrorType(* json_struct_action)(void *state)
JsonParseErrorType(* json_aelem_action)(void *state, bool isnull)
#define JSONLEX_FREE_STRVAL
@ JSON_EXPECTED_ARRAY_FIRST
@ JSON_UNICODE_HIGH_SURROGATE
@ JSON_EXPECTED_OBJECT_FIRST
@ JSON_UNICODE_CODE_POINT_ZERO
@ JSON_INVALID_LEXER_TYPE
@ JSON_UNICODE_ESCAPE_FORMAT
@ JSON_UNICODE_UNTRANSLATABLE
@ JSON_EXPECTED_OBJECT_NEXT
@ JSON_EXPECTED_ARRAY_NEXT
@ JSON_UNICODE_HIGH_ESCAPE
@ JSON_UNICODE_LOW_SURROGATE
JsonParseErrorType(* json_ofield_action)(void *state, char *fname, bool isnull)
#define JSONLEX_FREE_STRUCT
@ JSON_TOKEN_OBJECT_START
JsonParseErrorType(* json_scalar_action)(void *state, char *token, JsonTokenType tokentype)
bool pg_unicode_to_server_noerror(pg_wchar c, unsigned char *s)
const char * GetDatabaseEncodingName(void)
char * pstrdup(const char *in)
void pfree(void *pointer)
void * palloc0(Size size)
void * repalloc(void *pointer, Size size)
static bool pg_lfind8_le(uint8 key, uint8 *base, uint32 nelem)
static bool pg_lfind8(uint8 key, uint8 *base, uint32 nelem)
static unsigned char * unicode_to_utf8(pg_wchar c, unsigned char *utf8string)
#define MAX_UNICODE_EQUIVALENT_STRING
static pg_wchar surrogate_pair_to_codepoint(pg_wchar first, pg_wchar second)
static bool is_utf16_surrogate_first(pg_wchar c)
static bool is_utf16_surrogate_second(pg_wchar c)
void check_stack_depth(void)
char * psprintf(const char *fmt,...)
void destroyStringInfo(StringInfo str)
StringInfo makeStringInfo(void)
void resetStringInfo(StringInfo str)
void appendStringInfo(StringInfo str, const char *fmt,...)
void appendBinaryStringInfo(StringInfo str, const void *data, int datalen)
void appendStringInfoString(StringInfo str, const char *s)
void appendStringInfoChar(StringInfo str, char ch)
void initStringInfo(StringInfo str)
#define appendStringInfoCharMacro(str, ch)
StringInfoData partial_token
const char * prev_token_terminator
JsonIncrementalState * inc_state
const char * token_terminator
json_struct_action array_end
json_struct_action object_start
json_ofield_action object_field_start
json_aelem_action array_element_start
json_scalar_action scalar
json_aelem_action array_element_end
json_struct_action array_start
json_struct_action object_end
json_ofield_action object_field_end