PostgreSQL Source Code git master
Loading...
Searching...
No Matches
json.c
Go to the documentation of this file.
1/*-------------------------------------------------------------------------
2 *
3 * json.c
4 * JSON data type support.
5 *
6 * Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
7 * Portions Copyright (c) 1994, Regents of the University of California
8 *
9 * IDENTIFICATION
10 * src/backend/utils/adt/json.c
11 *
12 *-------------------------------------------------------------------------
13 */
14#include "postgres.h"
15
16#include "access/htup_details.h"
17#include "catalog/pg_type.h"
18#include "common/hashfn.h"
19#include "funcapi.h"
20#include "libpq/pqformat.h"
21#include "miscadmin.h"
22#include "port/simd.h"
23#include "utils/array.h"
24#include "utils/builtins.h"
25#include "utils/date.h"
26#include "utils/datetime.h"
27#include "utils/fmgroids.h"
28#include "utils/hsearch.h"
29#include "utils/json.h"
30#include "utils/jsonfuncs.h"
31#include "utils/lsyscache.h"
32#include "utils/typcache.h"
33
34
35/*
36 * Support for fast key uniqueness checking.
37 *
38 * We maintain a hash table of used keys in JSON objects for fast detection
39 * of duplicates.
40 */
41/* Common context for key uniqueness check */
42typedef struct HTAB *JsonUniqueCheckState; /* hash table for key names */
43
44/* Hash entry for JsonUniqueCheckState */
45typedef struct JsonUniqueHashEntry
46{
47 const char *key;
51
52/* Stack element for key uniqueness check during JSON parsing */
58
59/* Context struct for key uniqueness check during JSON parsing */
68
69/* Context struct for key uniqueness check during JSON building */
71{
72 JsonUniqueCheckState check; /* unique check */
73 StringInfoData skipped_keys; /* skipped keys with NULL values */
74 MemoryContext mcxt; /* context for saving skipped keys */
76
77
78/* State struct for JSON aggregation */
88
89static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
90 const Datum *vals, const bool *nulls, int *valcount,
92 bool use_line_feeds);
93static void array_to_json_internal(Datum array, StringInfo result,
94 bool use_line_feeds);
95static void datum_to_json_internal(Datum val, bool is_null, StringInfo result,
97 bool key_scalar);
98static void add_json(Datum val, bool is_null, StringInfo result,
99 Oid val_type, bool key_scalar);
100static text *catenate_stringinfo_string(StringInfo buffer, const char *addon);
101
102/*
103 * Input.
104 */
105Datum
107{
108 char *json = PG_GETARG_CSTRING(0);
109 text *result = cstring_to_text(json);
110 JsonLexContext lex;
111
112 /* validate it */
113 makeJsonLexContext(&lex, result, false);
114 if (!pg_parse_json_or_errsave(&lex, &nullSemAction, fcinfo->context))
116
117 /* Internal representation is the same as text */
118 PG_RETURN_TEXT_P(result);
119}
120
121/*
122 * Output.
123 */
124Datum
126{
127 /* we needn't detoast because text_to_cstring will handle that */
129
131}
132
133/*
134 * Binary send.
135 */
136Datum
146
147/*
148 * Binary receive.
149 */
150Datum
152{
154 char *str;
155 int nbytes;
156 JsonLexContext lex;
157
158 str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes);
159
160 /* Validate it. */
162 false);
164
166}
167
168/*
169 * Turn a Datum into JSON text, appending the string to "result".
170 *
171 * tcategory and outfuncoid are from a previous call to json_categorize_type,
172 * except that if is_null is true then they can be invalid.
173 *
174 * If key_scalar is true, the value is being printed as a key, so insist
175 * it's of an acceptable type, and force it to be quoted.
176 */
177static void
180 bool key_scalar)
181{
182 char *outputstr;
183 text *jsontext;
184
186
187 /* callers are expected to ensure that null keys are not passed in */
189
190 if (is_null)
191 {
192 appendBinaryStringInfo(result, "null", strlen("null"));
193 return;
194 }
195
196 if (key_scalar &&
203 errmsg("key value must be scalar, not array, composite, or json")));
204
205 switch (tcategory)
206 {
207 case JSONTYPE_ARRAY:
208 array_to_json_internal(val, result, false);
209 break;
211 composite_to_json(val, result, false);
212 break;
213 case JSONTYPE_BOOL:
214 if (key_scalar)
215 appendStringInfoChar(result, '"');
216 if (DatumGetBool(val))
217 appendBinaryStringInfo(result, "true", strlen("true"));
218 else
219 appendBinaryStringInfo(result, "false", strlen("false"));
220 if (key_scalar)
221 appendStringInfoChar(result, '"');
222 break;
223 case JSONTYPE_NUMERIC:
225
226 /*
227 * Don't quote a non-key if it's a valid JSON number (i.e., not
228 * "Infinity", "-Infinity", or "NaN"). Since we know this is a
229 * numeric data type's output, we simplify and open-code the
230 * validation for better performance.
231 */
232 if (!key_scalar &&
233 ((*outputstr >= '0' && *outputstr <= '9') ||
234 (*outputstr == '-' &&
235 (outputstr[1] >= '0' && outputstr[1] <= '9'))))
237 else
238 {
239 appendStringInfoChar(result, '"');
241 appendStringInfoChar(result, '"');
242 }
244 break;
245 case JSONTYPE_DATE:
246 {
247 char buf[MAXDATELEN + 1];
248
250 appendStringInfoChar(result, '"');
252 appendStringInfoChar(result, '"');
253 }
254 break;
256 {
257 char buf[MAXDATELEN + 1];
258
260 appendStringInfoChar(result, '"');
262 appendStringInfoChar(result, '"');
263 }
264 break;
266 {
267 char buf[MAXDATELEN + 1];
268
270 appendStringInfoChar(result, '"');
272 appendStringInfoChar(result, '"');
273 }
274 break;
275 case JSONTYPE_JSON:
276 /* JSON and JSONB output will already be escaped */
280 break;
281 case JSONTYPE_CAST:
282 /* outfuncoid refers to a cast function, not an output function */
287 break;
288 default:
289 /* special-case text types to save useless palloc/memcpy cycles */
293 else
294 {
296 escape_json(result, outputstr);
298 }
299 break;
300 }
301}
302
303/*
304 * Encode 'value' of datetime type 'typid' into JSON string in ISO format using
305 * optionally preallocated buffer 'buf'. Optional 'tzp' determines time-zone
306 * offset (in seconds) in which we want to show timestamptz.
307 */
308char *
309JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp)
310{
311 if (!buf)
312 buf = palloc(MAXDATELEN + 1);
313
314 switch (typid)
315 {
316 case DATEOID:
317 {
319 struct pg_tm tm;
320
322
323 /* Same as date_out(), but forcing DateStyle */
326 else
327 {
329 &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday));
331 }
332 }
333 break;
334 case TIMEOID:
335 {
337 struct pg_tm tt,
338 *tm = &tt;
339 fsec_t fsec;
340
341 /* Same as time_out(), but forcing DateStyle */
342 time2tm(time, tm, &fsec);
343 EncodeTimeOnly(tm, fsec, false, 0, USE_XSD_DATES, buf);
344 }
345 break;
346 case TIMETZOID:
347 {
349 struct pg_tm tt,
350 *tm = &tt;
351 fsec_t fsec;
352 int tz;
353
354 /* Same as timetz_out(), but forcing DateStyle */
355 timetz2tm(time, tm, &fsec, &tz);
356 EncodeTimeOnly(tm, fsec, true, tz, USE_XSD_DATES, buf);
357 }
358 break;
359 case TIMESTAMPOID:
360 {
362 struct pg_tm tm;
363 fsec_t fsec;
364
366 /* Same as timestamp_out(), but forcing DateStyle */
369 else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
370 EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
371 else
374 errmsg("timestamp out of range")));
375 }
376 break;
377 case TIMESTAMPTZOID:
378 {
380 struct pg_tm tm;
381 int tz;
382 fsec_t fsec;
383 const char *tzn = NULL;
384
386
387 /*
388 * If a time zone is specified, we apply the time-zone shift,
389 * convert timestamptz to pg_tm as if it were without a time
390 * zone, and then use the specified time zone for converting
391 * the timestamp into a string.
392 */
393 if (tzp)
394 {
395 tz = *tzp;
397 }
398
399 /* Same as timestamptz_out(), but forcing DateStyle */
402 else if (timestamp2tm(timestamp, tzp ? NULL : &tz, &tm, &fsec,
403 tzp ? NULL : &tzn, NULL) == 0)
404 {
405 if (tzp)
406 tm.tm_isdst = 1; /* set time-zone presence flag */
407
408 EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
409 }
410 else
413 errmsg("timestamp out of range")));
414 }
415 break;
416 default:
417 elog(ERROR, "unknown jsonb value datetime type oid %u", typid);
418 return NULL;
419 }
420
421 return buf;
422}
423
424/*
425 * Process a single dimension of an array.
426 * If it's the innermost dimension, output the values, otherwise call
427 * ourselves recursively to process the next dimension.
428 */
429static void
430array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, const Datum *vals,
431 const bool *nulls, int *valcount, JsonTypeCategory tcategory,
433{
434 int i;
435 const char *sep;
436
437 Assert(dim < ndims);
438
439 sep = use_line_feeds ? ",\n " : ",";
440
441 appendStringInfoChar(result, '[');
442
443 for (i = 1; i <= dims[dim]; i++)
444 {
445 if (i > 1)
447
448 if (dim + 1 == ndims)
449 {
451 result, tcategory,
452 outfuncoid, false);
453 (*valcount)++;
454 }
455 else
456 {
457 /*
458 * Do we want line feeds on inner dimensions of arrays? For now
459 * we'll say no.
460 */
461 array_dim_to_json(result, dim + 1, ndims, dims, vals, nulls,
462 valcount, tcategory, outfuncoid, false);
463 }
464 }
465
466 appendStringInfoChar(result, ']');
467}
468
469/*
470 * Turn an array into JSON.
471 */
472static void
474{
475 ArrayType *v = DatumGetArrayTypeP(array);
476 Oid element_type = ARR_ELEMTYPE(v);
477 int *dim;
478 int ndim;
479 int nitems;
480 int count = 0;
481 Datum *elements;
482 bool *nulls;
483 int16 typlen;
484 bool typbyval;
485 char typalign;
488
489 ndim = ARR_NDIM(v);
490 dim = ARR_DIMS(v);
491 nitems = ArrayGetNItems(ndim, dim);
492
493 if (nitems <= 0)
494 {
495 appendStringInfoString(result, "[]");
496 return;
497 }
498
499 get_typlenbyvalalign(element_type,
500 &typlen, &typbyval, &typalign);
501
502 json_categorize_type(element_type, false,
504
505 deconstruct_array(v, element_type, typlen, typbyval,
506 typalign, &elements, &nulls,
507 &nitems);
508
509 array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory,
511
512 pfree(elements);
513 pfree(nulls);
514}
515
516/*
517 * Turn a composite / record into JSON.
518 * Exported so COPY TO can use it.
519 */
520void
522{
524 Oid tupType;
526 TupleDesc tupdesc;
528 *tuple;
529 int i;
530 bool needsep = false;
531 const char *sep;
532 int seplen;
533
534 /*
535 * We can avoid expensive strlen() calls by precalculating the separator
536 * length.
537 */
538 sep = use_line_feeds ? ",\n " : ",";
539 seplen = use_line_feeds ? strlen(",\n ") : strlen(",");
540
541 td = DatumGetHeapTupleHeader(composite);
542
543 /* Extract rowtype info and find a tupdesc */
547
548 /* Build a temporary HeapTuple control structure */
550 tmptup.t_data = td;
551 tuple = &tmptup;
552
553 appendStringInfoChar(result, '{');
554
555 for (i = 0; i < tupdesc->natts; i++)
556 {
557 Datum val;
558 bool isnull;
559 char *attname;
563
564 if (att->attisdropped)
565 continue;
566
567 if (needsep)
569 needsep = true;
570
571 attname = NameStr(att->attname);
572 escape_json(result, attname);
573 appendStringInfoChar(result, ':');
574
575 val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
576
577 if (isnull)
578 {
581 }
582 else
583 json_categorize_type(att->atttypid, false, &tcategory,
584 &outfuncoid);
585
587 false);
588 }
589
590 appendStringInfoChar(result, '}');
591 ReleaseTupleDesc(tupdesc);
592}
593
594/*
595 * Append JSON text for "val" to "result".
596 *
597 * This is just a thin wrapper around datum_to_json. If the same type will be
598 * printed many times, avoid using this; better to do the json_categorize_type
599 * lookups only once.
600 */
601static void
603 Oid val_type, bool key_scalar)
604{
607
608 if (val_type == InvalidOid)
611 errmsg("could not determine input data type")));
612
613 if (is_null)
614 {
617 }
618 else
621
623 key_scalar);
624}
625
626/*
627 * SQL function array_to_json(row)
628 */
629Datum
631{
632 Datum array = PG_GETARG_DATUM(0);
633 StringInfoData result;
634
635 initStringInfo(&result);
636
637 array_to_json_internal(array, &result, false);
638
640}
641
642/*
643 * SQL function array_to_json(row, prettybool)
644 */
645Datum
647{
648 Datum array = PG_GETARG_DATUM(0);
650 StringInfoData result;
651
652 initStringInfo(&result);
653
654 array_to_json_internal(array, &result, use_line_feeds);
655
657}
658
659/*
660 * SQL function row_to_json(row)
661 */
662Datum
664{
665 Datum array = PG_GETARG_DATUM(0);
666 StringInfoData result;
667
668 initStringInfo(&result);
669
670 composite_to_json(array, &result, false);
671
673}
674
675/*
676 * SQL function row_to_json(row, prettybool)
677 */
678Datum
680{
681 Datum array = PG_GETARG_DATUM(0);
683 StringInfoData result;
684
685 initStringInfo(&result);
686
687 composite_to_json(array, &result, use_line_feeds);
688
690}
691
692/*
693 * Is the given type immutable when coming out of a JSON context?
694 */
695bool
697{
698 bool has_mutable = false;
699
700 json_check_mutability(typoid, false, &has_mutable);
701 return !has_mutable;
702}
703
704/*
705 * SQL function to_json(anyvalue)
706 */
707Datum
709{
711 Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
714
715 if (val_type == InvalidOid)
718 errmsg("could not determine input data type")));
719
722
724}
725
726/*
727 * Turn a Datum into JSON text.
728 *
729 * tcategory and outfuncoid are from a previous call to json_categorize_type.
730 */
731Datum
733{
734 StringInfoData result;
735
736 initStringInfo(&result);
738 false);
739
740 return PointerGetDatum(cstring_to_text_with_len(result.data, result.len));
741}
742
743/*
744 * json_agg transition function
745 *
746 * aggregate input column as a json array value.
747 */
748static Datum
749json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
750{
751 MemoryContext aggcontext,
752 oldcontext;
754 Datum val;
755
756 if (!AggCheckCallContext(fcinfo, &aggcontext))
757 {
758 /* cannot be called directly because of internal-type argument */
759 elog(ERROR, "json_agg_transfn called in non-aggregate context");
760 }
761
762 if (PG_ARGISNULL(0))
763 {
765
766 if (arg_type == InvalidOid)
769 errmsg("could not determine input data type")));
770
771 /*
772 * Make this state object in a context where it will persist for the
773 * duration of the aggregate call. MemoryContextSwitchTo is only
774 * needed the first time, as the StringInfo routines make sure they
775 * use the right context to enlarge the object if necessary.
776 */
777 oldcontext = MemoryContextSwitchTo(aggcontext);
779 state->str = makeStringInfo();
780 MemoryContextSwitchTo(oldcontext);
781
782 appendStringInfoChar(state->str, '[');
783 json_categorize_type(arg_type, false, &state->val_category,
784 &state->val_output_func);
785 }
786 else
787 {
789 }
790
791 if (absent_on_null && PG_ARGISNULL(1))
793
794 if (state->str->len > 1)
795 appendStringInfoString(state->str, ", ");
796
797 /* fast path for NULLs */
798 if (PG_ARGISNULL(1))
799 {
801 InvalidOid, false);
803 }
804
805 val = PG_GETARG_DATUM(1);
806
807 /* add some whitespace if structured type and not first item */
808 if (!PG_ARGISNULL(0) && state->str->len > 1 &&
809 (state->val_category == JSONTYPE_ARRAY ||
810 state->val_category == JSONTYPE_COMPOSITE))
811 {
812 appendStringInfoString(state->str, "\n ");
813 }
814
815 datum_to_json_internal(val, false, state->str, state->val_category,
816 state->val_output_func, false);
817
818 /*
819 * The transition type for json_agg() is declared to be "internal", which
820 * is a pass-by-value type the same size as a pointer. So we can safely
821 * pass the JsonAggState pointer through nodeAgg.c's machinations.
822 */
824}
825
826
827/*
828 * json_agg aggregate function
829 */
830Datum
832{
833 return json_agg_transfn_worker(fcinfo, false);
834}
835
836/*
837 * json_agg_strict aggregate function
838 */
839Datum
844
845/*
846 * json_agg final function
847 */
848Datum
850{
852
853 /* cannot be called directly because of internal-type argument */
855
856 state = PG_ARGISNULL(0) ?
857 NULL :
859
860 /* NULL result for no rows in, as is standard with aggregates */
861 if (state == NULL)
863
864 /* Else return state with appropriate array terminator added */
866}
867
868/* Functions implementing hash table for key uniqueness check */
869static uint32
870json_unique_hash(const void *key, Size keysize)
871{
872 const JsonUniqueHashEntry *entry = (const JsonUniqueHashEntry *) key;
874
875 hash ^= hash_bytes((const unsigned char *) entry->key, entry->key_len);
876
877 return hash;
878}
879
880static int
881json_unique_hash_match(const void *key1, const void *key2, Size keysize)
882{
883 const JsonUniqueHashEntry *entry1 = (const JsonUniqueHashEntry *) key1;
885
886 if (entry1->object_id != entry2->object_id)
887 return entry1->object_id > entry2->object_id ? 1 : -1;
888
889 if (entry1->key_len != entry2->key_len)
890 return entry1->key_len > entry2->key_len ? 1 : -1;
891
892 return strncmp(entry1->key, entry2->key, entry1->key_len);
893}
894
895/*
896 * Uniqueness detection support.
897 *
898 * In order to detect uniqueness during building or parsing of a JSON
899 * object, we maintain a hash table of key names already seen.
900 */
901static void
903{
904 HASHCTL ctl;
905
906 memset(&ctl, 0, sizeof(ctl));
907 ctl.keysize = sizeof(JsonUniqueHashEntry);
908 ctl.entrysize = sizeof(JsonUniqueHashEntry);
910 ctl.hash = json_unique_hash;
912
913 *cxt = hash_create("json object hashtable",
914 32,
915 &ctl,
917}
918
919static void
926
927static bool
928json_unique_check_key(JsonUniqueCheckState *cxt, const char *key, int object_id)
929{
931 bool found;
932
933 entry.key = key;
934 entry.key_len = strlen(key);
935 entry.object_id = object_id;
936
937 (void) hash_search(*cxt, &entry, HASH_ENTER, &found);
938
939 return !found;
940}
941
942/*
943 * On-demand initialization of a throwaway StringInfo. This is used to
944 * read a key name that we don't need to store in the output object, for
945 * duplicate key detection when the value is NULL.
946 */
947static StringInfo
949{
950 StringInfo out = &cxt->skipped_keys;
951
952 if (!out->data)
953 {
955
956 initStringInfo(out);
958 }
959 else
960 /* Just reset the string to empty */
961 out->len = 0;
962
963 return out;
964}
965
966/*
967 * json_object_agg transition function.
968 *
969 * aggregate two input columns as a single json object value.
970 */
971static Datum
973 bool absent_on_null, bool unique_keys)
974{
975 MemoryContext aggcontext,
976 oldcontext;
978 StringInfo out;
979 Datum arg;
980 bool skip;
981 int key_offset;
982
983 if (!AggCheckCallContext(fcinfo, &aggcontext))
984 {
985 /* cannot be called directly because of internal-type argument */
986 elog(ERROR, "json_object_agg_transfn called in non-aggregate context");
987 }
988
989 if (PG_ARGISNULL(0))
990 {
992
993 /*
994 * Make the StringInfo in a context where it will persist for the
995 * duration of the aggregate call. Switching context is only needed
996 * for this initial step, as the StringInfo and dynahash routines make
997 * sure they use the right context to enlarge the object if necessary.
998 */
999 oldcontext = MemoryContextSwitchTo(aggcontext);
1001 state->str = makeStringInfo();
1002 if (unique_keys)
1003 json_unique_builder_init(&state->unique_check);
1004 else
1005 memset(&state->unique_check, 0, sizeof(state->unique_check));
1006 MemoryContextSwitchTo(oldcontext);
1007
1008 arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
1009
1010 if (arg_type == InvalidOid)
1011 ereport(ERROR,
1013 errmsg("could not determine data type for argument %d", 1)));
1014
1015 json_categorize_type(arg_type, false, &state->key_category,
1016 &state->key_output_func);
1017
1018 arg_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
1019
1020 if (arg_type == InvalidOid)
1021 ereport(ERROR,
1023 errmsg("could not determine data type for argument %d", 2)));
1024
1025 json_categorize_type(arg_type, false, &state->val_category,
1026 &state->val_output_func);
1027
1028 appendStringInfoString(state->str, "{ ");
1029 }
1030 else
1031 {
1033 }
1034
1035 /*
1036 * Note: since json_object_agg() is declared as taking type "any", the
1037 * parser will not do any type conversion on unknown-type literals (that
1038 * is, undecorated strings or NULLs). Such values will arrive here as
1039 * type UNKNOWN, which fortunately does not matter to us, since
1040 * unknownout() works fine.
1041 */
1042
1043 if (PG_ARGISNULL(1))
1044 ereport(ERROR,
1046 errmsg("null value not allowed for object key")));
1047
1048 /* Skip null values if absent_on_null */
1049 skip = absent_on_null && PG_ARGISNULL(2);
1050
1051 if (skip)
1052 {
1053 /*
1054 * We got a NULL value and we're not storing those; if we're not
1055 * testing key uniqueness, we're done. If we are, use the throwaway
1056 * buffer to store the key name so that we can check it.
1057 */
1058 if (!unique_keys)
1060
1061 out = json_unique_builder_get_throwawaybuf(&state->unique_check);
1062 }
1063 else
1064 {
1065 out = state->str;
1066
1067 /*
1068 * Append comma delimiter only if we have already output some fields
1069 * after the initial string "{ ".
1070 */
1071 if (out->len > 2)
1072 appendStringInfoString(out, ", ");
1073 }
1074
1075 arg = PG_GETARG_DATUM(1);
1076
1077 key_offset = out->len;
1078
1079 datum_to_json_internal(arg, false, out, state->key_category,
1080 state->key_output_func, true);
1081
1082 if (unique_keys)
1083 {
1084 /*
1085 * Copy the key first, instead of pointing into the buffer. It will be
1086 * added to the hash table, but the buffer may get reallocated as
1087 * we're appending more data to it. That would invalidate pointers to
1088 * keys in the current buffer.
1089 */
1090 const char *key = MemoryContextStrdup(aggcontext,
1091 &out->data[key_offset]);
1092
1093 if (!json_unique_check_key(&state->unique_check.check, key, 0))
1094 ereport(ERROR,
1096 errmsg("duplicate JSON object key value: %s", key));
1097
1098 if (skip)
1100 }
1101
1102 appendStringInfoString(state->str, " : ");
1103
1104 if (PG_ARGISNULL(2))
1105 arg = (Datum) 0;
1106 else
1107 arg = PG_GETARG_DATUM(2);
1108
1110 state->val_category,
1111 state->val_output_func, false);
1112
1114}
1115
1116/*
1117 * json_object_agg aggregate function
1118 */
1119Datum
1121{
1122 return json_object_agg_transfn_worker(fcinfo, false, false);
1123}
1124
1125/*
1126 * json_object_agg_strict aggregate function
1127 */
1128Datum
1133
1134/*
1135 * json_object_agg_unique aggregate function
1136 */
1137Datum
1142
1143/*
1144 * json_object_agg_unique_strict aggregate function
1145 */
1146Datum
1151
1152/*
1153 * json_object_agg final function.
1154 */
1155Datum
1157{
1159
1160 /* cannot be called directly because of internal-type argument */
1162
1164
1165 /* NULL result for no rows in, as is standard with aggregates */
1166 if (state == NULL)
1168
1169 /* Else return state with appropriate object terminator added */
1171}
1172
1173/*
1174 * Helper function for aggregates: return given StringInfo's contents plus
1175 * specified trailing string, as a text datum. We need this because aggregate
1176 * final functions are not allowed to modify the aggregate state.
1177 */
1178static text *
1180{
1181 /* custom version of cstring_to_text_with_len */
1182 int buflen = buffer->len;
1183 int addlen = strlen(addon);
1184 text *result = (text *) palloc(buflen + addlen + VARHDRSZ);
1185
1186 SET_VARSIZE(result, buflen + addlen + VARHDRSZ);
1187 memcpy(VARDATA(result), buffer->data, buflen);
1188 memcpy(VARDATA(result) + buflen, addon, addlen);
1189
1190 return result;
1191}
1192
1193Datum
1194json_build_object_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types,
1195 bool absent_on_null, bool unique_keys)
1196{
1197 int i;
1198 const char *sep = "";
1199 StringInfo result;
1200 JsonUniqueBuilderState unique_check;
1201
1202 if (nargs % 2 != 0)
1203 ereport(ERROR,
1205 errmsg("argument list must have even number of elements"),
1206 /* translator: %s is a SQL function name */
1207 errhint("The arguments of %s must consist of alternating keys and values.",
1208 "json_build_object()")));
1209
1210 result = makeStringInfo();
1211
1212 appendStringInfoChar(result, '{');
1213
1214 if (unique_keys)
1215 json_unique_builder_init(&unique_check);
1216
1217 for (i = 0; i < nargs; i += 2)
1218 {
1219 StringInfo out;
1220 bool skip;
1221 int key_offset;
1222
1223 /* Skip null values if absent_on_null */
1224 skip = absent_on_null && nulls[i + 1];
1225
1226 if (skip)
1227 {
1228 /* If key uniqueness check is needed we must save skipped keys */
1229 if (!unique_keys)
1230 continue;
1231
1232 out = json_unique_builder_get_throwawaybuf(&unique_check);
1233 }
1234 else
1235 {
1236 appendStringInfoString(result, sep);
1237 sep = ", ";
1238 out = result;
1239 }
1240
1241 /* process key */
1242 if (nulls[i])
1243 ereport(ERROR,
1245 errmsg("null value not allowed for object key")));
1246
1247 /* save key offset before appending it */
1248 key_offset = out->len;
1249
1250 add_json(args[i], false, out, types[i], true);
1251
1252 if (unique_keys)
1253 {
1254 /*
1255 * check key uniqueness after key appending
1256 *
1257 * Copy the key first, instead of pointing into the buffer. It
1258 * will be added to the hash table, but the buffer may get
1259 * reallocated as we're appending more data to it. That would
1260 * invalidate pointers to keys in the current buffer.
1261 */
1262 const char *key = pstrdup(&out->data[key_offset]);
1263
1264 if (!json_unique_check_key(&unique_check.check, key, 0))
1265 ereport(ERROR,
1267 errmsg("duplicate JSON object key value: %s", key));
1268
1269 if (skip)
1270 continue;
1271 }
1272
1273 appendStringInfoString(result, " : ");
1274
1275 /* process value */
1276 add_json(args[i + 1], nulls[i + 1], result, types[i + 1], false);
1277 }
1278
1279 appendStringInfoChar(result, '}');
1280
1281 return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
1282}
1283
1284/*
1285 * SQL function json_build_object(variadic "any")
1286 */
1287Datum
1289{
1290 Datum *args;
1291 bool *nulls;
1292 Oid *types;
1293
1294 /* build argument values to build the object */
1295 int nargs = extract_variadic_args(fcinfo, 0, true,
1296 &args, &types, &nulls);
1297
1298 if (nargs < 0)
1300
1301 PG_RETURN_DATUM(json_build_object_worker(nargs, args, nulls, types, false, false));
1302}
1303
1304/*
1305 * degenerate case of json_build_object where it gets 0 arguments.
1306 */
1307Datum
1312
1313Datum
1314json_build_array_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types,
1315 bool absent_on_null)
1316{
1317 int i;
1318 const char *sep = "";
1319 StringInfoData result;
1320
1321 initStringInfo(&result);
1322
1323 appendStringInfoChar(&result, '[');
1324
1325 for (i = 0; i < nargs; i++)
1326 {
1327 if (absent_on_null && nulls[i])
1328 continue;
1329
1330 appendStringInfoString(&result, sep);
1331 sep = ", ";
1332 add_json(args[i], nulls[i], &result, types[i], false);
1333 }
1334
1335 appendStringInfoChar(&result, ']');
1336
1337 return PointerGetDatum(cstring_to_text_with_len(result.data, result.len));
1338}
1339
1340/*
1341 * SQL function json_build_array(variadic "any")
1342 */
1343Datum
1345{
1346 Datum *args;
1347 bool *nulls;
1348 Oid *types;
1349
1350 /* build argument values to build the object */
1351 int nargs = extract_variadic_args(fcinfo, 0, true,
1352 &args, &types, &nulls);
1353
1354 if (nargs < 0)
1356
1357 PG_RETURN_DATUM(json_build_array_worker(nargs, args, nulls, types, false));
1358}
1359
1360/*
1361 * degenerate case of json_build_array where it gets 0 arguments.
1362 */
1363Datum
1368
1369/*
1370 * SQL function json_object(text[])
1371 *
1372 * take a one or two dimensional array of text as key/value pairs
1373 * for a json object.
1374 */
1375Datum
1377{
1379 int ndims = ARR_NDIM(in_array);
1380 StringInfoData result;
1382 bool *in_nulls;
1383 int in_count,
1384 count,
1385 i;
1386 text *rval;
1387
1388 switch (ndims)
1389 {
1390 case 0:
1392 break;
1393
1394 case 1:
1395 if ((ARR_DIMS(in_array)[0]) % 2)
1396 ereport(ERROR,
1398 errmsg("array must have even number of elements")));
1399 break;
1400
1401 case 2:
1402 if ((ARR_DIMS(in_array)[1]) != 2)
1403 ereport(ERROR,
1405 errmsg("array must have two columns")));
1406 break;
1407
1408 default:
1409 ereport(ERROR,
1411 errmsg("wrong number of array subscripts")));
1412 }
1413
1415
1416 count = in_count / 2;
1417
1418 initStringInfo(&result);
1419
1420 appendStringInfoChar(&result, '{');
1421
1422 for (i = 0; i < count; ++i)
1423 {
1424 if (in_nulls[i * 2])
1425 ereport(ERROR,
1427 errmsg("null value not allowed for object key")));
1428
1429 if (i > 0)
1430 appendStringInfoString(&result, ", ");
1431 escape_json_text(&result, (text *) DatumGetPointer(in_datums[i * 2]));
1432 appendStringInfoString(&result, " : ");
1433 if (in_nulls[i * 2 + 1])
1434 appendStringInfoString(&result, "null");
1435 else
1436 {
1437 escape_json_text(&result,
1438 (text *) DatumGetPointer(in_datums[i * 2 + 1]));
1439 }
1440 }
1441
1442 appendStringInfoChar(&result, '}');
1443
1445 pfree(in_nulls);
1446
1447 rval = cstring_to_text_with_len(result.data, result.len);
1448 pfree(result.data);
1449
1451}
1452
1453/*
1454 * SQL function json_object(text[], text[])
1455 *
1456 * take separate key and value arrays of text to construct a json object
1457 * pairwise.
1458 */
1459Datum
1461{
1464 int nkdims = ARR_NDIM(key_array);
1465 int nvdims = ARR_NDIM(val_array);
1466 StringInfoData result;
1468 *val_datums;
1469 bool *key_nulls,
1470 *val_nulls;
1471 int key_count,
1472 val_count,
1473 i;
1474 text *rval;
1475
1476 if (nkdims > 1 || nkdims != nvdims)
1477 ereport(ERROR,
1479 errmsg("wrong number of array subscripts")));
1480
1481 if (nkdims == 0)
1483
1486
1487 if (key_count != val_count)
1488 ereport(ERROR,
1490 errmsg("mismatched array dimensions")));
1491
1492 initStringInfo(&result);
1493
1494 appendStringInfoChar(&result, '{');
1495
1496 for (i = 0; i < key_count; ++i)
1497 {
1498 if (key_nulls[i])
1499 ereport(ERROR,
1501 errmsg("null value not allowed for object key")));
1502
1503 if (i > 0)
1504 appendStringInfoString(&result, ", ");
1506 appendStringInfoString(&result, " : ");
1507 if (val_nulls[i])
1508 appendStringInfoString(&result, "null");
1509 else
1510 escape_json_text(&result,
1512 }
1513
1514 appendStringInfoChar(&result, '}');
1515
1520
1521 rval = cstring_to_text_with_len(result.data, result.len);
1522 pfree(result.data);
1523
1525}
1526
1527/*
1528 * escape_json_char
1529 * Inline helper function for escape_json* functions
1530 */
1533{
1534 switch (c)
1535 {
1536 case '\b':
1538 break;
1539 case '\f':
1541 break;
1542 case '\n':
1544 break;
1545 case '\r':
1547 break;
1548 case '\t':
1550 break;
1551 case '"':
1552 appendStringInfoString(buf, "\\\"");
1553 break;
1554 case '\\':
1555 appendStringInfoString(buf, "\\\\");
1556 break;
1557 default:
1558 if ((unsigned char) c < ' ')
1559 appendStringInfo(buf, "\\u%04x", (int) c);
1560 else
1562 break;
1563 }
1564}
1565
1566/*
1567 * escape_json
1568 * Produce a JSON string literal, properly escaping the NUL-terminated
1569 * cstring.
1570 */
1571void
1573{
1575
1576 for (; *str != '\0'; str++)
1578
1580}
1581
1582/*
1583 * Define the number of bytes that escape_json_with_len will look ahead in the
1584 * input string before flushing the input string to the destination buffer.
1585 * Looking ahead too far could result in cachelines being evicted that will
1586 * need to be reloaded in order to perform the appendBinaryStringInfo call.
1587 * Smaller values will result in a larger number of calls to
1588 * appendBinaryStringInfo and introduce additional function call overhead.
1589 * Values larger than the size of L1d cache will likely result in worse
1590 * performance.
1591 */
1592#define ESCAPE_JSON_FLUSH_AFTER 512
1593
1594/*
1595 * escape_json_with_len
1596 * Produce a JSON string literal, properly escaping the possibly not
1597 * NUL-terminated characters in 'str'. 'len' defines the number of bytes
1598 * from 'str' to process.
1599 */
1600void
1602{
1603 int vlen;
1604
1605 Assert(len >= 0);
1606
1607 /*
1608 * Since we know the minimum length we'll need to append, let's just
1609 * enlarge the buffer now rather than incrementally making more space when
1610 * we run out. Add two extra bytes for the enclosing quotes.
1611 */
1613
1614 /*
1615 * Figure out how many bytes to process using SIMD. Round 'len' down to
1616 * the previous multiple of sizeof(Vector8), assuming that's a power-of-2.
1617 */
1618 vlen = len & (int) (~(sizeof(Vector8) - 1));
1619
1621
1622 for (int i = 0, copypos = 0;;)
1623 {
1624 /*
1625 * To speed this up, try searching sizeof(Vector8) bytes at once for
1626 * special characters that we need to escape. When we find one, we
1627 * fall out of the Vector8 loop and copy the portion we've vector
1628 * searched and then we process sizeof(Vector8) bytes one byte at a
1629 * time. Once done, come back and try doing vector searching again.
1630 * We'll also process any remaining bytes at the tail end of the
1631 * string byte-by-byte. This optimization assumes that most chunks of
1632 * sizeof(Vector8) bytes won't contain any special characters.
1633 */
1634 for (; i < vlen; i += sizeof(Vector8))
1635 {
1636 Vector8 chunk;
1637
1638 vector8_load(&chunk, (const uint8 *) &str[i]);
1639
1640 /*
1641 * Break on anything less than ' ' or if we find a '"' or '\\'.
1642 * Those need special handling. That's done in the per-byte loop.
1643 */
1644 if (vector8_has_le(chunk, (unsigned char) 0x1F) ||
1645 vector8_has(chunk, (unsigned char) '"') ||
1646 vector8_has(chunk, (unsigned char) '\\'))
1647 break;
1648
1649#ifdef ESCAPE_JSON_FLUSH_AFTER
1650
1651 /*
1652 * Flush what's been checked so far out to the destination buffer
1653 * every so often to avoid having to re-read cachelines when
1654 * escaping large strings.
1655 */
1657 {
1659 copypos = i;
1660 }
1661#endif
1662 }
1663
1664 /*
1665 * Write to the destination up to the point that we've vector searched
1666 * so far. Do this only when switching into per-byte mode rather than
1667 * once every sizeof(Vector8) bytes.
1668 */
1669 if (copypos < i)
1670 {
1672 copypos = i;
1673 }
1674
1675 /*
1676 * Per-byte loop for Vector8s containing special chars and for
1677 * processing the tail of the string.
1678 */
1679 for (int b = 0; b < sizeof(Vector8); b++)
1680 {
1681 /* check if we've finished */
1682 if (i == len)
1683 goto done;
1684
1685 Assert(i < len);
1686
1688 }
1689
1690 copypos = i;
1691 /* We're not done yet. Try the vector search again. */
1692 }
1693
1694done:
1696}
1697
1698/*
1699 * escape_json_text
1700 * Append 'txt' onto 'buf' and escape using escape_json_with_len.
1701 *
1702 * This is more efficient than calling text_to_cstring and appending the
1703 * result as that could require an additional palloc and memcpy.
1704 */
1705void
1707{
1708 /* must cast away the const, unfortunately */
1711 char *str;
1712
1714
1716
1717 /* pfree any detoasted values */
1718 if (tunpacked != txt)
1720}
1721
1722/* Semantic actions for key uniqueness check */
1723static JsonParseErrorType
1725{
1727 JsonUniqueStackEntry *entry;
1728
1729 if (!state->unique)
1730 return JSON_SUCCESS;
1731
1732 /* push object entry to stack */
1734 entry->object_id = state->id_counter++;
1735 entry->parent = state->stack;
1736 state->stack = entry;
1737
1738 return JSON_SUCCESS;
1739}
1740
1741static JsonParseErrorType
1743{
1745 JsonUniqueStackEntry *entry;
1746
1747 if (!state->unique)
1748 return JSON_SUCCESS;
1749
1750 entry = state->stack;
1751 state->stack = entry->parent; /* pop object from stack */
1752 pfree(entry);
1753 return JSON_SUCCESS;
1754}
1755
1756static JsonParseErrorType
1757json_unique_object_field_start(void *_state, char *field, bool isnull)
1758{
1760 JsonUniqueStackEntry *entry;
1761
1762 if (!state->unique)
1763 return JSON_SUCCESS;
1764
1765 /* find key collision in the current object */
1766 if (json_unique_check_key(&state->check, field, state->stack->object_id))
1767 return JSON_SUCCESS;
1768
1769 state->unique = false;
1770
1771 /* pop all objects entries */
1772 while ((entry = state->stack))
1773 {
1774 state->stack = entry->parent;
1775 pfree(entry);
1776 }
1777 return JSON_SUCCESS;
1778}
1779
1780/* Validate JSON text and additionally check key uniqueness */
1781bool
1783{
1784 JsonLexContext lex;
1787 JsonParseErrorType result;
1788
1790
1792 {
1793 state.lex = &lex;
1794 state.stack = NULL;
1795 state.id_counter = 0;
1796 state.unique = true;
1798
1799 uniqueSemAction.semstate = &state;
1803 }
1804
1806
1807 if (result != JSON_SUCCESS)
1808 {
1809 if (throw_error)
1810 json_errsave_error(result, &lex, NULL);
1811
1812 return false; /* invalid json */
1813 }
1814
1815 if (check_unique_keys && !state.unique)
1816 {
1817 if (throw_error)
1818 ereport(ERROR,
1820 errmsg("duplicate JSON object key value")));
1821
1822 return false; /* not unique keys */
1823 }
1824
1826 freeJsonLexContext(&lex);
1827
1828 return true; /* ok */
1829}
1830
1831/*
1832 * SQL function json_typeof(json) -> text
1833 *
1834 * Returns the type of the outermost JSON value as TEXT. Possible types are
1835 * "object", "array", "string", "number", "boolean", and "null".
1836 *
1837 * Performs a single call to json_lex() to get the first token of the supplied
1838 * value. This initial token uniquely determines the value's type. As our
1839 * input must already have been validated by json_in() or json_recv(), the
1840 * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END,
1841 * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END.
1842 */
1843Datum
1845{
1846 text *json = PG_GETARG_TEXT_PP(0);
1847 JsonLexContext lex;
1848 char *type;
1849 JsonParseErrorType result;
1850
1851 /* Lex exactly one token from the input and check its type. */
1852 makeJsonLexContext(&lex, json, false);
1853 result = json_lex(&lex);
1854 if (result != JSON_SUCCESS)
1855 json_errsave_error(result, &lex, NULL);
1856
1857 switch (lex.token_type)
1858 {
1860 type = "object";
1861 break;
1863 type = "array";
1864 break;
1865 case JSON_TOKEN_STRING:
1866 type = "string";
1867 break;
1868 case JSON_TOKEN_NUMBER:
1869 type = "number";
1870 break;
1871 case JSON_TOKEN_TRUE:
1872 case JSON_TOKEN_FALSE:
1873 type = "boolean";
1874 break;
1875 case JSON_TOKEN_NULL:
1876 type = "null";
1877 break;
1878 default:
1879 elog(ERROR, "unexpected json token: %d", lex.token_type);
1880 }
1881
1883}
#define ARR_NDIM(a)
Definition array.h:290
#define PG_GETARG_ARRAYTYPE_P(n)
Definition array.h:263
#define DatumGetArrayTypeP(X)
Definition array.h:261
#define ARR_ELEMTYPE(a)
Definition array.h:292
#define ARR_DIMS(a)
Definition array.h:294
void deconstruct_array_builtin(const ArrayType *array, Oid elmtype, Datum **elemsp, bool **nullsp, int *nelemsp)
void deconstruct_array(const ArrayType *array, Oid elmtype, int elmlen, bool elmbyval, char elmalign, Datum **elemsp, bool **nullsp, int *nelemsp)
int ArrayGetNItems(int ndim, const int *dims)
Definition arrayutils.c:57
void EncodeTimeOnly(struct pg_tm *tm, fsec_t fsec, bool print_tz, int tz, int style, char *str)
Definition datetime.c:4435
void j2date(int jd, int *year, int *month, int *day)
Definition datetime.c:322
void EncodeDateTime(struct pg_tm *tm, fsec_t fsec, bool print_tz, int tz, const char *tzn, int style, char *str)
Definition datetime.c:4465
void EncodeDateOnly(struct pg_tm *tm, int style, char *str)
Definition datetime.c:4350
void EncodeSpecialTimestamp(Timestamp dt, char *str)
Definition timestamp.c:1578
int timestamp2tm(Timestamp dt, int *tzp, struct pg_tm *tm, fsec_t *fsec, const char **tzn, pg_tz *attimezone)
Definition timestamp.c:1901
#define CStringGetTextDatum(s)
Definition builtins.h:98
#define TextDatumGetCString(d)
Definition builtins.h:99
#define NameStr(name)
Definition c.h:837
#define unconstify(underlying_type, expr)
Definition c.h:1327
uint8_t uint8
Definition c.h:616
#define VARHDRSZ
Definition c.h:783
#define Assert(condition)
Definition c.h:945
#define pg_attribute_always_inline
Definition c.h:299
int16_t int16
Definition c.h:613
int32_t int32
Definition c.h:614
uint32_t uint32
Definition c.h:618
size_t Size
Definition c.h:691
int64 Timestamp
Definition timestamp.h:38
int64 TimestampTz
Definition timestamp.h:39
int32 fsec_t
Definition timestamp.h:41
#define USECS_PER_SEC
Definition timestamp.h:134
#define TIMESTAMP_NOT_FINITE(j)
Definition timestamp.h:169
#define POSTGRES_EPOCH_JDATE
Definition timestamp.h:235
int timetz2tm(TimeTzADT *time, struct pg_tm *tm, fsec_t *fsec, int *tzp)
Definition date.c:2491
int time2tm(TimeADT time, struct pg_tm *tm, fsec_t *fsec)
Definition date.c:1576
void EncodeSpecialDate(DateADT dt, char *str)
Definition date.c:295
#define DATE_NOT_FINITE(j)
Definition date.h:49
static TimeTzADT * DatumGetTimeTzADTP(Datum X)
Definition date.h:72
int32 DateADT
Definition date.h:21
static DateADT DatumGetDateADT(Datum X)
Definition date.h:60
static TimeADT DatumGetTimeADT(Datum X)
Definition date.h:66
int64 TimeADT
Definition date.h:23
void * hash_search(HTAB *hashp, const void *keyPtr, HASHACTION action, bool *foundPtr)
Definition dynahash.c:952
HTAB * hash_create(const char *tabname, int64 nelem, const HASHCTL *info, int flags)
Definition dynahash.c:358
struct typedefs * types
Definition ecpg.c:30
Datum arg
Definition elog.c:1322
int errcode(int sqlerrcode)
Definition elog.c:874
int errhint(const char *fmt,...) pg_attribute_printf(1
#define ERROR
Definition elog.h:39
#define elog(elevel,...)
Definition elog.h:226
#define ereport(elevel,...)
Definition elog.h:150
#define palloc_object(type)
Definition fe_memutils.h:74
char * OidOutputFunctionCall(Oid functionId, Datum val)
Definition fmgr.c:1764
varlena * pg_detoast_datum_packed(varlena *datum)
Definition fmgr.c:1830
Oid get_fn_expr_argtype(FmgrInfo *flinfo, int argnum)
Definition fmgr.c:1876
#define OidFunctionCall1(functionId, arg1)
Definition fmgr.h:722
#define PG_GETARG_TEXT_PP(n)
Definition fmgr.h:310
#define PG_RETURN_BYTEA_P(x)
Definition fmgr.h:373
#define DatumGetHeapTupleHeader(X)
Definition fmgr.h:296
#define DatumGetTextPP(X)
Definition fmgr.h:293
#define PG_GETARG_POINTER(n)
Definition fmgr.h:277
#define PG_RETURN_CSTRING(x)
Definition fmgr.h:364
#define PG_ARGISNULL(n)
Definition fmgr.h:209
#define PG_GETARG_DATUM(n)
Definition fmgr.h:268
#define PG_GETARG_CSTRING(n)
Definition fmgr.h:278
#define PG_RETURN_NULL()
Definition fmgr.h:346
#define PG_RETURN_TEXT_P(x)
Definition fmgr.h:374
#define PG_GETARG_BOOL(n)
Definition fmgr.h:274
#define PG_RETURN_DATUM(x)
Definition fmgr.h:354
#define PG_RETURN_POINTER(x)
Definition fmgr.h:363
#define PG_FUNCTION_ARGS
Definition fmgr.h:193
int extract_variadic_args(FunctionCallInfo fcinfo, int variadic_start, bool convert_unknown, Datum **args, Oid **types, bool **nulls)
Definition funcapi.c:2011
uint32 hash_bytes_uint32(uint32 k)
Definition hashfn.c:610
uint32 hash_bytes(const unsigned char *k, int keylen)
Definition hashfn.c:146
const char * str
@ HASH_ENTER
Definition hsearch.h:114
#define HASH_CONTEXT
Definition hsearch.h:102
#define HASH_ELEM
Definition hsearch.h:95
#define HASH_COMPARE
Definition hsearch.h:99
#define HASH_FUNCTION
Definition hsearch.h:98
static Datum heap_getattr(HeapTuple tup, int attnum, TupleDesc tupleDesc, bool *isnull)
static int32 HeapTupleHeaderGetTypMod(const HeapTupleHeaderData *tup)
static uint32 HeapTupleHeaderGetDatumLength(const HeapTupleHeaderData *tup)
static Oid HeapTupleHeaderGetTypeId(const HeapTupleHeaderData *tup)
#define MAXDATELEN
Definition datetime.h:200
#define nitems(x)
Definition indent.h:31
long val
Definition informix.c:689
static struct @174 value
int b
Definition isn.c:74
int i
Definition isn.c:77
Datum row_to_json(PG_FUNCTION_ARGS)
Definition json.c:663
static JsonParseErrorType json_unique_object_start(void *_state)
Definition json.c:1724
static uint32 json_unique_hash(const void *key, Size keysize)
Definition json.c:870
Datum json_build_object_noargs(PG_FUNCTION_ARGS)
Definition json.c:1308
void escape_json_text(StringInfo buf, const text *txt)
Definition json.c:1706
char * JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp)
Definition json.c:309
static int json_unique_hash_match(const void *key1, const void *key2, Size keysize)
Definition json.c:881
static void json_unique_check_init(JsonUniqueCheckState *cxt)
Definition json.c:902
static text * catenate_stringinfo_string(StringInfo buffer, const char *addon)
Definition json.c:1179
struct HTAB * JsonUniqueCheckState
Definition json.c:42
Datum json_agg_strict_transfn(PG_FUNCTION_ARGS)
Definition json.c:840
static pg_attribute_always_inline void escape_json_char(StringInfo buf, char c)
Definition json.c:1532
Datum json_in(PG_FUNCTION_ARGS)
Definition json.c:106
static StringInfo json_unique_builder_get_throwawaybuf(JsonUniqueBuilderState *cxt)
Definition json.c:948
static bool json_unique_check_key(JsonUniqueCheckState *cxt, const char *key, int object_id)
Definition json.c:928
Datum json_out(PG_FUNCTION_ARGS)
Definition json.c:125
Datum json_agg_transfn(PG_FUNCTION_ARGS)
Definition json.c:831
Datum to_json(PG_FUNCTION_ARGS)
Definition json.c:708
Datum json_build_array_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types, bool absent_on_null)
Definition json.c:1314
Datum json_object_agg_finalfn(PG_FUNCTION_ARGS)
Definition json.c:1156
Datum row_to_json_pretty(PG_FUNCTION_ARGS)
Definition json.c:679
Datum json_send(PG_FUNCTION_ARGS)
Definition json.c:137
static Datum json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
Definition json.c:749
void escape_json_with_len(StringInfo buf, const char *str, int len)
Definition json.c:1601
static void datum_to_json_internal(Datum val, bool is_null, StringInfo result, JsonTypeCategory tcategory, Oid outfuncoid, bool key_scalar)
Definition json.c:178
Datum array_to_json_pretty(PG_FUNCTION_ARGS)
Definition json.c:646
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, const Datum *vals, const bool *nulls, int *valcount, JsonTypeCategory tcategory, Oid outfuncoid, bool use_line_feeds)
Definition json.c:430
Datum json_object_agg_unique_transfn(PG_FUNCTION_ARGS)
Definition json.c:1138
Datum json_object_two_arg(PG_FUNCTION_ARGS)
Definition json.c:1460
Datum json_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS)
Definition json.c:1147
Datum json_build_array_noargs(PG_FUNCTION_ARGS)
Definition json.c:1364
Datum json_object_agg_transfn(PG_FUNCTION_ARGS)
Definition json.c:1120
Datum array_to_json(PG_FUNCTION_ARGS)
Definition json.c:630
Datum json_object_agg_strict_transfn(PG_FUNCTION_ARGS)
Definition json.c:1129
Datum json_build_array(PG_FUNCTION_ARGS)
Definition json.c:1344
static void json_unique_builder_init(JsonUniqueBuilderState *cxt)
Definition json.c:920
Datum json_build_object_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types, bool absent_on_null, bool unique_keys)
Definition json.c:1194
#define ESCAPE_JSON_FLUSH_AFTER
Definition json.c:1592
static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar)
Definition json.c:602
Datum json_agg_finalfn(PG_FUNCTION_ARGS)
Definition json.c:849
void composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
Definition json.c:521
Datum json_recv(PG_FUNCTION_ARGS)
Definition json.c:151
Datum datum_to_json(Datum val, JsonTypeCategory tcategory, Oid outfuncoid)
Definition json.c:732
bool json_validate(text *json, bool check_unique_keys, bool throw_error)
Definition json.c:1782
Datum json_typeof(PG_FUNCTION_ARGS)
Definition json.c:1844
void escape_json(StringInfo buf, const char *str)
Definition json.c:1572
static Datum json_object_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null, bool unique_keys)
Definition json.c:972
Datum json_object(PG_FUNCTION_ARGS)
Definition json.c:1376
Datum json_build_object(PG_FUNCTION_ARGS)
Definition json.c:1288
bool to_json_is_immutable(Oid typoid)
Definition json.c:696
static JsonParseErrorType json_unique_object_field_start(void *_state, char *field, bool isnull)
Definition json.c:1757
static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
Definition json.c:473
static JsonParseErrorType json_unique_object_end(void *_state)
Definition json.c:1742
JsonParseErrorType pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
Definition jsonapi.c:744
JsonLexContext * makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json, size_t len, int encoding, bool need_escapes)
Definition jsonapi.c:392
const JsonSemAction nullSemAction
Definition jsonapi.c:287
JsonParseErrorType json_lex(JsonLexContext *lex)
Definition jsonapi.c:1588
void freeJsonLexContext(JsonLexContext *lex)
Definition jsonapi.c:687
JsonParseErrorType
Definition jsonapi.h:35
@ JSON_SUCCESS
Definition jsonapi.h:36
@ JSON_TOKEN_FALSE
Definition jsonapi.h:29
@ JSON_TOKEN_TRUE
Definition jsonapi.h:28
@ JSON_TOKEN_NULL
Definition jsonapi.h:30
@ JSON_TOKEN_OBJECT_START
Definition jsonapi.h:22
@ JSON_TOKEN_NUMBER
Definition jsonapi.h:21
@ JSON_TOKEN_STRING
Definition jsonapi.h:20
@ JSON_TOKEN_ARRAY_START
Definition jsonapi.h:24
JsonLexContext * makeJsonLexContext(JsonLexContext *lex, text *json, bool need_escapes)
Definition jsonfuncs.c:543
void json_check_mutability(Oid typoid, bool is_jsonb, bool *has_mutable)
Definition jsonfuncs.c:6078
void json_categorize_type(Oid typoid, bool is_jsonb, JsonTypeCategory *tcategory, Oid *outfuncoid)
Definition jsonfuncs.c:5967
void json_errsave_error(JsonParseErrorType error, JsonLexContext *lex, Node *escontext)
Definition jsonfuncs.c:644
bool pg_parse_json_or_errsave(JsonLexContext *lex, const JsonSemAction *sem, Node *escontext)
Definition jsonfuncs.c:522
#define pg_parse_json_or_ereport(lex, sem)
Definition jsonfuncs.h:47
JsonTypeCategory
Definition jsonfuncs.h:69
@ JSONTYPE_JSON
Definition jsonfuncs.h:76
@ JSONTYPE_NULL
Definition jsonfuncs.h:70
@ JSONTYPE_TIMESTAMP
Definition jsonfuncs.h:74
@ JSONTYPE_NUMERIC
Definition jsonfuncs.h:72
@ JSONTYPE_DATE
Definition jsonfuncs.h:73
@ JSONTYPE_BOOL
Definition jsonfuncs.h:71
@ JSONTYPE_CAST
Definition jsonfuncs.h:80
@ JSONTYPE_COMPOSITE
Definition jsonfuncs.h:79
@ JSONTYPE_ARRAY
Definition jsonfuncs.h:78
@ JSONTYPE_TIMESTAMPTZ
Definition jsonfuncs.h:75
static struct pg_tm tm
Definition localtime.c:104
void get_typlenbyvalalign(Oid typid, int16 *typlen, bool *typbyval, char *typalign)
Definition lsyscache.c:2491
int GetDatabaseEncoding(void)
Definition mbutils.c:1389
char * MemoryContextStrdup(MemoryContext context, const char *string)
Definition mcxt.c:1768
char * pstrdup(const char *in)
Definition mcxt.c:1781
void pfree(void *pointer)
Definition mcxt.c:1616
void * palloc(Size size)
Definition mcxt.c:1387
MemoryContext CurrentMemoryContext
Definition mcxt.c:160
#define USE_XSD_DATES
Definition miscadmin.h:240
int AggCheckCallContext(FunctionCallInfo fcinfo, MemoryContext *aggcontext)
Definition nodeAgg.c:4609
static char * errmsg
static MemoryContext MemoryContextSwitchTo(MemoryContext context)
Definition palloc.h:124
NameData attname
FormData_pg_attribute * Form_pg_attribute
static const struct exclude_list_item skip[]
const void size_t len
static char buf[DEFAULT_XLOG_SEG_SIZE]
char typalign
Definition pg_type.h:178
long date
Definition pgtypes_date.h:9
int64 timestamp
static bool DatumGetBool(Datum X)
Definition postgres.h:100
static Datum PointerGetDatum(const void *X)
Definition postgres.h:342
uint64_t Datum
Definition postgres.h:70
static Pointer DatumGetPointer(Datum X)
Definition postgres.h:332
#define InvalidOid
unsigned int Oid
void pq_sendtext(StringInfo buf, const char *str, int slen)
Definition pqformat.c:172
char * pq_getmsgtext(StringInfo msg, int rawbytes, int *nbytes)
Definition pqformat.c:545
void pq_begintypsend(StringInfo buf)
Definition pqformat.c:325
bytea * pq_endtypsend(StringInfo buf)
Definition pqformat.c:345
char * c
static int fb(int x)
tree ctl
Definition radixtree.h:1838
static unsigned hash(unsigned *uv, int n)
Definition rege_dfa.c:715
static bool vector8_has_le(const Vector8 v, const uint8 c)
Definition simd.h:214
static void vector8_load(Vector8 *v, const uint8 *s)
Definition simd.h:94
uint64 Vector8
Definition simd.h:47
static bool vector8_has(const Vector8 v, const uint8 c)
Definition simd.h:163
void check_stack_depth(void)
Definition stack_depth.c:95
struct StringInfoData * StringInfo
Definition string.h:15
StringInfo makeStringInfo(void)
Definition stringinfo.c:72
void appendStringInfo(StringInfo str, const char *fmt,...)
Definition stringinfo.c:145
void enlargeStringInfo(StringInfo str, int needed)
Definition stringinfo.c:337
void appendBinaryStringInfo(StringInfo str, const void *data, int datalen)
Definition stringinfo.c:281
void appendStringInfoString(StringInfo str, const char *s)
Definition stringinfo.c:230
void appendStringInfoChar(StringInfo str, char ch)
Definition stringinfo.c:242
void initStringInfo(StringInfo str)
Definition stringinfo.c:97
#define appendStringInfoCharMacro(str, ch)
Definition stringinfo.h:231
FmgrInfo * flinfo
Definition fmgr.h:87
JsonUniqueBuilderState unique_check
Definition json.c:86
JsonTypeCategory key_category
Definition json.c:82
Oid val_output_func
Definition json.c:85
StringInfo str
Definition json.c:81
Oid key_output_func
Definition json.c:83
JsonTypeCategory val_category
Definition json.c:84
JsonTokenType token_type
Definition jsonapi.h:109
JsonUniqueCheckState check
Definition json.c:72
StringInfoData skipped_keys
Definition json.c:73
MemoryContext mcxt
Definition json.c:74
const char * key
Definition json.c:47
JsonUniqueCheckState check
Definition json.c:63
JsonUniqueStackEntry * stack
Definition json.c:64
JsonLexContext * lex
Definition json.c:62
struct JsonUniqueStackEntry * parent
Definition json.c:55
Definition pgtime.h:35
int tm_mday
Definition pgtime.h:39
int tm_mon
Definition pgtime.h:40
int tm_isdst
Definition pgtime.h:44
int tm_year
Definition pgtime.h:41
Definition c.h:778
#define ReleaseTupleDesc(tupdesc)
Definition tupdesc.h:238
static FormData_pg_attribute * TupleDescAttr(TupleDesc tupdesc, int i)
Definition tupdesc.h:178
TupleDesc lookup_rowtype_tupdesc(Oid type_id, int32 typmod)
Definition typcache.c:1947
static Timestamp DatumGetTimestamp(Datum X)
Definition timestamp.h:28
static TimestampTz DatumGetTimestampTz(Datum X)
Definition timestamp.h:34
static Size VARSIZE_ANY_EXHDR(const void *PTR)
Definition varatt.h:472
static char * VARDATA(const void *PTR)
Definition varatt.h:305
static char * VARDATA_ANY(const void *PTR)
Definition varatt.h:486
static void SET_VARSIZE(void *PTR, Size len)
Definition varatt.h:432
text * cstring_to_text_with_len(const char *s, int len)
Definition varlena.c:196
text * cstring_to_text(const char *s)
Definition varlena.c:184
const char * type