PostgreSQL Source Code git master
json.c
Go to the documentation of this file.
1/*-------------------------------------------------------------------------
2 *
3 * json.c
4 * JSON data type support.
5 *
6 * Portions Copyright (c) 1996-2025, PostgreSQL Global Development Group
7 * Portions Copyright (c) 1994, Regents of the University of California
8 *
9 * IDENTIFICATION
10 * src/backend/utils/adt/json.c
11 *
12 *-------------------------------------------------------------------------
13 */
14#include "postgres.h"
15
16#include "catalog/pg_proc.h"
17#include "catalog/pg_type.h"
18#include "common/hashfn.h"
19#include "funcapi.h"
20#include "libpq/pqformat.h"
21#include "miscadmin.h"
22#include "port/simd.h"
23#include "utils/array.h"
24#include "utils/builtins.h"
25#include "utils/date.h"
26#include "utils/datetime.h"
27#include "utils/fmgroids.h"
28#include "utils/json.h"
29#include "utils/jsonfuncs.h"
30#include "utils/lsyscache.h"
31#include "utils/typcache.h"
32
33
34/*
35 * Support for fast key uniqueness checking.
36 *
37 * We maintain a hash table of used keys in JSON objects for fast detection
38 * of duplicates.
39 */
40/* Common context for key uniqueness check */
41typedef struct HTAB *JsonUniqueCheckState; /* hash table for key names */
42
43/* Hash entry for JsonUniqueCheckState */
44typedef struct JsonUniqueHashEntry
45{
46 const char *key;
50
51/* Stack element for key uniqueness check during JSON parsing */
53{
57
58/* Context struct for key uniqueness check during JSON parsing */
60{
65 bool unique;
67
68/* Context struct for key uniqueness check during JSON building */
70{
71 JsonUniqueCheckState check; /* unique check */
72 StringInfoData skipped_keys; /* skipped keys with NULL values */
73 MemoryContext mcxt; /* context for saving skipped keys */
75
76
77/* State struct for JSON aggregation */
78typedef struct JsonAggState
79{
87
88static void composite_to_json(Datum composite, StringInfo result,
89 bool use_line_feeds);
90static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
91 Datum *vals, bool *nulls, int *valcount,
92 JsonTypeCategory tcategory, Oid outfuncoid,
93 bool use_line_feeds);
94static void array_to_json_internal(Datum array, StringInfo result,
95 bool use_line_feeds);
96static void datum_to_json_internal(Datum val, bool is_null, StringInfo result,
97 JsonTypeCategory tcategory, Oid outfuncoid,
98 bool key_scalar);
99static void add_json(Datum val, bool is_null, StringInfo result,
100 Oid val_type, bool key_scalar);
101static text *catenate_stringinfo_string(StringInfo buffer, const char *addon);
102
103/*
104 * Input.
105 */
106Datum
108{
109 char *json = PG_GETARG_CSTRING(0);
110 text *result = cstring_to_text(json);
111 JsonLexContext lex;
112
113 /* validate it */
114 makeJsonLexContext(&lex, result, false);
115 if (!pg_parse_json_or_errsave(&lex, &nullSemAction, fcinfo->context))
117
118 /* Internal representation is the same as text */
119 PG_RETURN_TEXT_P(result);
120}
121
122/*
123 * Output.
124 */
125Datum
127{
128 /* we needn't detoast because text_to_cstring will handle that */
129 Datum txt = PG_GETARG_DATUM(0);
130
132}
133
134/*
135 * Binary send.
136 */
137Datum
139{
140 text *t = PG_GETARG_TEXT_PP(0);
142
146}
147
148/*
149 * Binary receive.
150 */
151Datum
153{
155 char *str;
156 int nbytes;
157 JsonLexContext lex;
158
159 str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes);
160
161 /* Validate it. */
163 false);
165
167}
168
169/*
170 * Turn a Datum into JSON text, appending the string to "result".
171 *
172 * tcategory and outfuncoid are from a previous call to json_categorize_type,
173 * except that if is_null is true then they can be invalid.
174 *
175 * If key_scalar is true, the value is being printed as a key, so insist
176 * it's of an acceptable type, and force it to be quoted.
177 */
178static void
180 JsonTypeCategory tcategory, Oid outfuncoid,
181 bool key_scalar)
182{
183 char *outputstr;
184 text *jsontext;
185
187
188 /* callers are expected to ensure that null keys are not passed in */
189 Assert(!(key_scalar && is_null));
190
191 if (is_null)
192 {
193 appendBinaryStringInfo(result, "null", strlen("null"));
194 return;
195 }
196
197 if (key_scalar &&
198 (tcategory == JSONTYPE_ARRAY ||
199 tcategory == JSONTYPE_COMPOSITE ||
200 tcategory == JSONTYPE_JSON ||
201 tcategory == JSONTYPE_CAST))
203 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
204 errmsg("key value must be scalar, not array, composite, or json")));
205
206 switch (tcategory)
207 {
208 case JSONTYPE_ARRAY:
209 array_to_json_internal(val, result, false);
210 break;
212 composite_to_json(val, result, false);
213 break;
214 case JSONTYPE_BOOL:
215 if (key_scalar)
216 appendStringInfoChar(result, '"');
217 if (DatumGetBool(val))
218 appendBinaryStringInfo(result, "true", strlen("true"));
219 else
220 appendBinaryStringInfo(result, "false", strlen("false"));
221 if (key_scalar)
222 appendStringInfoChar(result, '"');
223 break;
224 case JSONTYPE_NUMERIC:
225 outputstr = OidOutputFunctionCall(outfuncoid, val);
226
227 /*
228 * Don't quote a non-key if it's a valid JSON number (i.e., not
229 * "Infinity", "-Infinity", or "NaN"). Since we know this is a
230 * numeric data type's output, we simplify and open-code the
231 * validation for better performance.
232 */
233 if (!key_scalar &&
234 ((*outputstr >= '0' && *outputstr <= '9') ||
235 (*outputstr == '-' &&
236 (outputstr[1] >= '0' && outputstr[1] <= '9'))))
237 appendStringInfoString(result, outputstr);
238 else
239 {
240 appendStringInfoChar(result, '"');
241 appendStringInfoString(result, outputstr);
242 appendStringInfoChar(result, '"');
243 }
244 pfree(outputstr);
245 break;
246 case JSONTYPE_DATE:
247 {
248 char buf[MAXDATELEN + 1];
249
250 JsonEncodeDateTime(buf, val, DATEOID, NULL);
251 appendStringInfoChar(result, '"');
253 appendStringInfoChar(result, '"');
254 }
255 break;
257 {
258 char buf[MAXDATELEN + 1];
259
260 JsonEncodeDateTime(buf, val, TIMESTAMPOID, NULL);
261 appendStringInfoChar(result, '"');
263 appendStringInfoChar(result, '"');
264 }
265 break;
267 {
268 char buf[MAXDATELEN + 1];
269
270 JsonEncodeDateTime(buf, val, TIMESTAMPTZOID, NULL);
271 appendStringInfoChar(result, '"');
273 appendStringInfoChar(result, '"');
274 }
275 break;
276 case JSONTYPE_JSON:
277 /* JSON and JSONB output will already be escaped */
278 outputstr = OidOutputFunctionCall(outfuncoid, val);
279 appendStringInfoString(result, outputstr);
280 pfree(outputstr);
281 break;
282 case JSONTYPE_CAST:
283 /* outfuncoid refers to a cast function, not an output function */
284 jsontext = DatumGetTextPP(OidFunctionCall1(outfuncoid, val));
285 appendBinaryStringInfo(result, VARDATA_ANY(jsontext),
286 VARSIZE_ANY_EXHDR(jsontext));
287 pfree(jsontext);
288 break;
289 default:
290 /* special-case text types to save useless palloc/memcpy cycles */
291 if (outfuncoid == F_TEXTOUT || outfuncoid == F_VARCHAROUT ||
292 outfuncoid == F_BPCHAROUT)
294 else
295 {
296 outputstr = OidOutputFunctionCall(outfuncoid, val);
297 escape_json(result, outputstr);
298 pfree(outputstr);
299 }
300 break;
301 }
302}
303
304/*
305 * Encode 'value' of datetime type 'typid' into JSON string in ISO format using
306 * optionally preallocated buffer 'buf'. Optional 'tzp' determines time-zone
307 * offset (in seconds) in which we want to show timestamptz.
308 */
309char *
310JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp)
311{
312 if (!buf)
313 buf = palloc(MAXDATELEN + 1);
314
315 switch (typid)
316 {
317 case DATEOID:
318 {
320 struct pg_tm tm;
321
323
324 /* Same as date_out(), but forcing DateStyle */
327 else
328 {
330 &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday));
332 }
333 }
334 break;
335 case TIMEOID:
336 {
338 struct pg_tm tt,
339 *tm = &tt;
340 fsec_t fsec;
341
342 /* Same as time_out(), but forcing DateStyle */
343 time2tm(time, tm, &fsec);
344 EncodeTimeOnly(tm, fsec, false, 0, USE_XSD_DATES, buf);
345 }
346 break;
347 case TIMETZOID:
348 {
350 struct pg_tm tt,
351 *tm = &tt;
352 fsec_t fsec;
353 int tz;
354
355 /* Same as timetz_out(), but forcing DateStyle */
356 timetz2tm(time, tm, &fsec, &tz);
357 EncodeTimeOnly(tm, fsec, true, tz, USE_XSD_DATES, buf);
358 }
359 break;
360 case TIMESTAMPOID:
361 {
363 struct pg_tm tm;
364 fsec_t fsec;
365
367 /* Same as timestamp_out(), but forcing DateStyle */
370 else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
371 EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
372 else
374 (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
375 errmsg("timestamp out of range")));
376 }
377 break;
378 case TIMESTAMPTZOID:
379 {
381 struct pg_tm tm;
382 int tz;
383 fsec_t fsec;
384 const char *tzn = NULL;
385
387
388 /*
389 * If a time zone is specified, we apply the time-zone shift,
390 * convert timestamptz to pg_tm as if it were without a time
391 * zone, and then use the specified time zone for converting
392 * the timestamp into a string.
393 */
394 if (tzp)
395 {
396 tz = *tzp;
398 }
399
400 /* Same as timestamptz_out(), but forcing DateStyle */
403 else if (timestamp2tm(timestamp, tzp ? NULL : &tz, &tm, &fsec,
404 tzp ? NULL : &tzn, NULL) == 0)
405 {
406 if (tzp)
407 tm.tm_isdst = 1; /* set time-zone presence flag */
408
409 EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
410 }
411 else
413 (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
414 errmsg("timestamp out of range")));
415 }
416 break;
417 default:
418 elog(ERROR, "unknown jsonb value datetime type oid %u", typid);
419 return NULL;
420 }
421
422 return buf;
423}
424
425/*
426 * Process a single dimension of an array.
427 * If it's the innermost dimension, output the values, otherwise call
428 * ourselves recursively to process the next dimension.
429 */
430static void
431array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals,
432 bool *nulls, int *valcount, JsonTypeCategory tcategory,
433 Oid outfuncoid, bool use_line_feeds)
434{
435 int i;
436 const char *sep;
437
438 Assert(dim < ndims);
439
440 sep = use_line_feeds ? ",\n " : ",";
441
442 appendStringInfoChar(result, '[');
443
444 for (i = 1; i <= dims[dim]; i++)
445 {
446 if (i > 1)
447 appendStringInfoString(result, sep);
448
449 if (dim + 1 == ndims)
450 {
451 datum_to_json_internal(vals[*valcount], nulls[*valcount],
452 result, tcategory,
453 outfuncoid, false);
454 (*valcount)++;
455 }
456 else
457 {
458 /*
459 * Do we want line feeds on inner dimensions of arrays? For now
460 * we'll say no.
461 */
462 array_dim_to_json(result, dim + 1, ndims, dims, vals, nulls,
463 valcount, tcategory, outfuncoid, false);
464 }
465 }
466
467 appendStringInfoChar(result, ']');
468}
469
470/*
471 * Turn an array into JSON.
472 */
473static void
474array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
475{
476 ArrayType *v = DatumGetArrayTypeP(array);
477 Oid element_type = ARR_ELEMTYPE(v);
478 int *dim;
479 int ndim;
480 int nitems;
481 int count = 0;
482 Datum *elements;
483 bool *nulls;
484 int16 typlen;
485 bool typbyval;
486 char typalign;
487 JsonTypeCategory tcategory;
488 Oid outfuncoid;
489
490 ndim = ARR_NDIM(v);
491 dim = ARR_DIMS(v);
492 nitems = ArrayGetNItems(ndim, dim);
493
494 if (nitems <= 0)
495 {
496 appendStringInfoString(result, "[]");
497 return;
498 }
499
500 get_typlenbyvalalign(element_type,
501 &typlen, &typbyval, &typalign);
502
503 json_categorize_type(element_type, false,
504 &tcategory, &outfuncoid);
505
506 deconstruct_array(v, element_type, typlen, typbyval,
507 typalign, &elements, &nulls,
508 &nitems);
509
510 array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory,
511 outfuncoid, use_line_feeds);
512
513 pfree(elements);
514 pfree(nulls);
515}
516
517/*
518 * Turn a composite / record into JSON.
519 */
520static void
521composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
522{
524 Oid tupType;
525 int32 tupTypmod;
526 TupleDesc tupdesc;
527 HeapTupleData tmptup,
528 *tuple;
529 int i;
530 bool needsep = false;
531 const char *sep;
532 int seplen;
533
534 /*
535 * We can avoid expensive strlen() calls by precalculating the separator
536 * length.
537 */
538 sep = use_line_feeds ? ",\n " : ",";
539 seplen = use_line_feeds ? strlen(",\n ") : strlen(",");
540
541 td = DatumGetHeapTupleHeader(composite);
542
543 /* Extract rowtype info and find a tupdesc */
544 tupType = HeapTupleHeaderGetTypeId(td);
545 tupTypmod = HeapTupleHeaderGetTypMod(td);
546 tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
547
548 /* Build a temporary HeapTuple control structure */
550 tmptup.t_data = td;
551 tuple = &tmptup;
552
553 appendStringInfoChar(result, '{');
554
555 for (i = 0; i < tupdesc->natts; i++)
556 {
557 Datum val;
558 bool isnull;
559 char *attname;
560 JsonTypeCategory tcategory;
561 Oid outfuncoid;
562 Form_pg_attribute att = TupleDescAttr(tupdesc, i);
563
564 if (att->attisdropped)
565 continue;
566
567 if (needsep)
568 appendBinaryStringInfo(result, sep, seplen);
569 needsep = true;
570
571 attname = NameStr(att->attname);
572 escape_json(result, attname);
573 appendStringInfoChar(result, ':');
574
575 val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
576
577 if (isnull)
578 {
579 tcategory = JSONTYPE_NULL;
580 outfuncoid = InvalidOid;
581 }
582 else
583 json_categorize_type(att->atttypid, false, &tcategory,
584 &outfuncoid);
585
586 datum_to_json_internal(val, isnull, result, tcategory, outfuncoid,
587 false);
588 }
589
590 appendStringInfoChar(result, '}');
591 ReleaseTupleDesc(tupdesc);
592}
593
594/*
595 * Append JSON text for "val" to "result".
596 *
597 * This is just a thin wrapper around datum_to_json. If the same type will be
598 * printed many times, avoid using this; better to do the json_categorize_type
599 * lookups only once.
600 */
601static void
602add_json(Datum val, bool is_null, StringInfo result,
603 Oid val_type, bool key_scalar)
604{
605 JsonTypeCategory tcategory;
606 Oid outfuncoid;
607
608 if (val_type == InvalidOid)
610 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
611 errmsg("could not determine input data type")));
612
613 if (is_null)
614 {
615 tcategory = JSONTYPE_NULL;
616 outfuncoid = InvalidOid;
617 }
618 else
619 json_categorize_type(val_type, false,
620 &tcategory, &outfuncoid);
621
622 datum_to_json_internal(val, is_null, result, tcategory, outfuncoid,
623 key_scalar);
624}
625
626/*
627 * SQL function array_to_json(row)
628 */
629Datum
631{
632 Datum array = PG_GETARG_DATUM(0);
633 StringInfo result;
634
635 result = makeStringInfo();
636
637 array_to_json_internal(array, result, false);
638
640}
641
642/*
643 * SQL function array_to_json(row, prettybool)
644 */
645Datum
647{
648 Datum array = PG_GETARG_DATUM(0);
649 bool use_line_feeds = PG_GETARG_BOOL(1);
650 StringInfo result;
651
652 result = makeStringInfo();
653
654 array_to_json_internal(array, result, use_line_feeds);
655
657}
658
659/*
660 * SQL function row_to_json(row)
661 */
662Datum
664{
665 Datum array = PG_GETARG_DATUM(0);
666 StringInfo result;
667
668 result = makeStringInfo();
669
670 composite_to_json(array, result, false);
671
673}
674
675/*
676 * SQL function row_to_json(row, prettybool)
677 */
678Datum
680{
681 Datum array = PG_GETARG_DATUM(0);
682 bool use_line_feeds = PG_GETARG_BOOL(1);
683 StringInfo result;
684
685 result = makeStringInfo();
686
687 composite_to_json(array, result, use_line_feeds);
688
690}
691
692/*
693 * Is the given type immutable when coming out of a JSON context?
694 *
695 * At present, datetimes are all considered mutable, because they
696 * depend on timezone. XXX we should also drill down into objects
697 * and arrays, but do not.
698 */
699bool
701{
702 JsonTypeCategory tcategory;
703 Oid outfuncoid;
704
705 json_categorize_type(typoid, false, &tcategory, &outfuncoid);
706
707 switch (tcategory)
708 {
709 case JSONTYPE_BOOL:
710 case JSONTYPE_JSON:
711 case JSONTYPE_JSONB:
712 case JSONTYPE_NULL:
713 return true;
714
715 case JSONTYPE_DATE:
718 return false;
719
720 case JSONTYPE_ARRAY:
721 return false; /* TODO recurse into elements */
722
724 return false; /* TODO recurse into fields */
725
726 case JSONTYPE_NUMERIC:
727 case JSONTYPE_CAST:
728 case JSONTYPE_OTHER:
729 return func_volatile(outfuncoid) == PROVOLATILE_IMMUTABLE;
730 }
731
732 return false; /* not reached */
733}
734
735/*
736 * SQL function to_json(anyvalue)
737 */
738Datum
740{
742 Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
743 JsonTypeCategory tcategory;
744 Oid outfuncoid;
745
746 if (val_type == InvalidOid)
748 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
749 errmsg("could not determine input data type")));
750
751 json_categorize_type(val_type, false,
752 &tcategory, &outfuncoid);
753
754 PG_RETURN_DATUM(datum_to_json(val, tcategory, outfuncoid));
755}
756
757/*
758 * Turn a Datum into JSON text.
759 *
760 * tcategory and outfuncoid are from a previous call to json_categorize_type.
761 */
762Datum
764{
765 StringInfo result = makeStringInfo();
766
767 datum_to_json_internal(val, false, result, tcategory, outfuncoid,
768 false);
769
770 return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
771}
772
773/*
774 * json_agg transition function
775 *
776 * aggregate input column as a json array value.
777 */
778static Datum
779json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
780{
781 MemoryContext aggcontext,
782 oldcontext;
784 Datum val;
785
786 if (!AggCheckCallContext(fcinfo, &aggcontext))
787 {
788 /* cannot be called directly because of internal-type argument */
789 elog(ERROR, "json_agg_transfn called in non-aggregate context");
790 }
791
792 if (PG_ARGISNULL(0))
793 {
794 Oid arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
795
796 if (arg_type == InvalidOid)
798 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
799 errmsg("could not determine input data type")));
800
801 /*
802 * Make this state object in a context where it will persist for the
803 * duration of the aggregate call. MemoryContextSwitchTo is only
804 * needed the first time, as the StringInfo routines make sure they
805 * use the right context to enlarge the object if necessary.
806 */
807 oldcontext = MemoryContextSwitchTo(aggcontext);
808 state = (JsonAggState *) palloc(sizeof(JsonAggState));
809 state->str = makeStringInfo();
810 MemoryContextSwitchTo(oldcontext);
811
812 appendStringInfoChar(state->str, '[');
813 json_categorize_type(arg_type, false, &state->val_category,
814 &state->val_output_func);
815 }
816 else
817 {
819 }
820
821 if (absent_on_null && PG_ARGISNULL(1))
823
824 if (state->str->len > 1)
825 appendStringInfoString(state->str, ", ");
826
827 /* fast path for NULLs */
828 if (PG_ARGISNULL(1))
829 {
831 InvalidOid, false);
833 }
834
835 val = PG_GETARG_DATUM(1);
836
837 /* add some whitespace if structured type and not first item */
838 if (!PG_ARGISNULL(0) && state->str->len > 1 &&
839 (state->val_category == JSONTYPE_ARRAY ||
840 state->val_category == JSONTYPE_COMPOSITE))
841 {
842 appendStringInfoString(state->str, "\n ");
843 }
844
845 datum_to_json_internal(val, false, state->str, state->val_category,
846 state->val_output_func, false);
847
848 /*
849 * The transition type for json_agg() is declared to be "internal", which
850 * is a pass-by-value type the same size as a pointer. So we can safely
851 * pass the JsonAggState pointer through nodeAgg.c's machinations.
852 */
854}
855
856
857/*
858 * json_agg aggregate function
859 */
860Datum
862{
863 return json_agg_transfn_worker(fcinfo, false);
864}
865
866/*
867 * json_agg_strict aggregate function
868 */
869Datum
871{
872 return json_agg_transfn_worker(fcinfo, true);
873}
874
875/*
876 * json_agg final function
877 */
878Datum
880{
882
883 /* cannot be called directly because of internal-type argument */
884 Assert(AggCheckCallContext(fcinfo, NULL));
885
886 state = PG_ARGISNULL(0) ?
887 NULL :
889
890 /* NULL result for no rows in, as is standard with aggregates */
891 if (state == NULL)
893
894 /* Else return state with appropriate array terminator added */
896}
897
898/* Functions implementing hash table for key uniqueness check */
899static uint32
900json_unique_hash(const void *key, Size keysize)
901{
904
905 hash ^= hash_bytes((const unsigned char *) entry->key, entry->key_len);
906
907 return DatumGetUInt32(hash);
908}
909
910static int
911json_unique_hash_match(const void *key1, const void *key2, Size keysize)
912{
913 const JsonUniqueHashEntry *entry1 = (const JsonUniqueHashEntry *) key1;
914 const JsonUniqueHashEntry *entry2 = (const JsonUniqueHashEntry *) key2;
915
916 if (entry1->object_id != entry2->object_id)
917 return entry1->object_id > entry2->object_id ? 1 : -1;
918
919 if (entry1->key_len != entry2->key_len)
920 return entry1->key_len > entry2->key_len ? 1 : -1;
921
922 return strncmp(entry1->key, entry2->key, entry1->key_len);
923}
924
925/*
926 * Uniqueness detection support.
927 *
928 * In order to detect uniqueness during building or parsing of a JSON
929 * object, we maintain a hash table of key names already seen.
930 */
931static void
933{
934 HASHCTL ctl;
935
936 memset(&ctl, 0, sizeof(ctl));
937 ctl.keysize = sizeof(JsonUniqueHashEntry);
938 ctl.entrysize = sizeof(JsonUniqueHashEntry);
940 ctl.hash = json_unique_hash;
942
943 *cxt = hash_create("json object hashtable",
944 32,
945 &ctl,
947}
948
949static void
951{
954 cxt->skipped_keys.data = NULL;
955}
956
957static bool
958json_unique_check_key(JsonUniqueCheckState *cxt, const char *key, int object_id)
959{
961 bool found;
962
963 entry.key = key;
964 entry.key_len = strlen(key);
965 entry.object_id = object_id;
966
967 (void) hash_search(*cxt, &entry, HASH_ENTER, &found);
968
969 return !found;
970}
971
972/*
973 * On-demand initialization of a throwaway StringInfo. This is used to
974 * read a key name that we don't need to store in the output object, for
975 * duplicate key detection when the value is NULL.
976 */
977static StringInfo
979{
980 StringInfo out = &cxt->skipped_keys;
981
982 if (!out->data)
983 {
985
986 initStringInfo(out);
987 MemoryContextSwitchTo(oldcxt);
988 }
989 else
990 /* Just reset the string to empty */
991 out->len = 0;
992
993 return out;
994}
995
996/*
997 * json_object_agg transition function.
998 *
999 * aggregate two input columns as a single json object value.
1000 */
1001static Datum
1003 bool absent_on_null, bool unique_keys)
1004{
1005 MemoryContext aggcontext,
1006 oldcontext;
1008 StringInfo out;
1009 Datum arg;
1010 bool skip;
1011 int key_offset;
1012
1013 if (!AggCheckCallContext(fcinfo, &aggcontext))
1014 {
1015 /* cannot be called directly because of internal-type argument */
1016 elog(ERROR, "json_object_agg_transfn called in non-aggregate context");
1017 }
1018
1019 if (PG_ARGISNULL(0))
1020 {
1021 Oid arg_type;
1022
1023 /*
1024 * Make the StringInfo in a context where it will persist for the
1025 * duration of the aggregate call. Switching context is only needed
1026 * for this initial step, as the StringInfo and dynahash routines make
1027 * sure they use the right context to enlarge the object if necessary.
1028 */
1029 oldcontext = MemoryContextSwitchTo(aggcontext);
1030 state = (JsonAggState *) palloc(sizeof(JsonAggState));
1031 state->str = makeStringInfo();
1032 if (unique_keys)
1033 json_unique_builder_init(&state->unique_check);
1034 else
1035 memset(&state->unique_check, 0, sizeof(state->unique_check));
1036 MemoryContextSwitchTo(oldcontext);
1037
1038 arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
1039
1040 if (arg_type == InvalidOid)
1041 ereport(ERROR,
1042 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1043 errmsg("could not determine data type for argument %d", 1)));
1044
1045 json_categorize_type(arg_type, false, &state->key_category,
1046 &state->key_output_func);
1047
1048 arg_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
1049
1050 if (arg_type == InvalidOid)
1051 ereport(ERROR,
1052 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1053 errmsg("could not determine data type for argument %d", 2)));
1054
1055 json_categorize_type(arg_type, false, &state->val_category,
1056 &state->val_output_func);
1057
1058 appendStringInfoString(state->str, "{ ");
1059 }
1060 else
1061 {
1063 }
1064
1065 /*
1066 * Note: since json_object_agg() is declared as taking type "any", the
1067 * parser will not do any type conversion on unknown-type literals (that
1068 * is, undecorated strings or NULLs). Such values will arrive here as
1069 * type UNKNOWN, which fortunately does not matter to us, since
1070 * unknownout() works fine.
1071 */
1072
1073 if (PG_ARGISNULL(1))
1074 ereport(ERROR,
1075 (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
1076 errmsg("null value not allowed for object key")));
1077
1078 /* Skip null values if absent_on_null */
1079 skip = absent_on_null && PG_ARGISNULL(2);
1080
1081 if (skip)
1082 {
1083 /*
1084 * We got a NULL value and we're not storing those; if we're not
1085 * testing key uniqueness, we're done. If we are, use the throwaway
1086 * buffer to store the key name so that we can check it.
1087 */
1088 if (!unique_keys)
1090
1091 out = json_unique_builder_get_throwawaybuf(&state->unique_check);
1092 }
1093 else
1094 {
1095 out = state->str;
1096
1097 /*
1098 * Append comma delimiter only if we have already output some fields
1099 * after the initial string "{ ".
1100 */
1101 if (out->len > 2)
1102 appendStringInfoString(out, ", ");
1103 }
1104
1105 arg = PG_GETARG_DATUM(1);
1106
1107 key_offset = out->len;
1108
1109 datum_to_json_internal(arg, false, out, state->key_category,
1110 state->key_output_func, true);
1111
1112 if (unique_keys)
1113 {
1114 /*
1115 * Copy the key first, instead of pointing into the buffer. It will be
1116 * added to the hash table, but the buffer may get reallocated as
1117 * we're appending more data to it. That would invalidate pointers to
1118 * keys in the current buffer.
1119 */
1120 const char *key = MemoryContextStrdup(aggcontext,
1121 &out->data[key_offset]);
1122
1123 if (!json_unique_check_key(&state->unique_check.check, key, 0))
1124 ereport(ERROR,
1125 errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
1126 errmsg("duplicate JSON object key value: %s", key));
1127
1128 if (skip)
1130 }
1131
1132 appendStringInfoString(state->str, " : ");
1133
1134 if (PG_ARGISNULL(2))
1135 arg = (Datum) 0;
1136 else
1137 arg = PG_GETARG_DATUM(2);
1138
1140 state->val_category,
1141 state->val_output_func, false);
1142
1144}
1145
1146/*
1147 * json_object_agg aggregate function
1148 */
1149Datum
1151{
1152 return json_object_agg_transfn_worker(fcinfo, false, false);
1153}
1154
1155/*
1156 * json_object_agg_strict aggregate function
1157 */
1158Datum
1160{
1161 return json_object_agg_transfn_worker(fcinfo, true, false);
1162}
1163
1164/*
1165 * json_object_agg_unique aggregate function
1166 */
1167Datum
1169{
1170 return json_object_agg_transfn_worker(fcinfo, false, true);
1171}
1172
1173/*
1174 * json_object_agg_unique_strict aggregate function
1175 */
1176Datum
1178{
1179 return json_object_agg_transfn_worker(fcinfo, true, true);
1180}
1181
1182/*
1183 * json_object_agg final function.
1184 */
1185Datum
1187{
1189
1190 /* cannot be called directly because of internal-type argument */
1191 Assert(AggCheckCallContext(fcinfo, NULL));
1192
1193 state = PG_ARGISNULL(0) ? NULL : (JsonAggState *) PG_GETARG_POINTER(0);
1194
1195 /* NULL result for no rows in, as is standard with aggregates */
1196 if (state == NULL)
1198
1199 /* Else return state with appropriate object terminator added */
1201}
1202
1203/*
1204 * Helper function for aggregates: return given StringInfo's contents plus
1205 * specified trailing string, as a text datum. We need this because aggregate
1206 * final functions are not allowed to modify the aggregate state.
1207 */
1208static text *
1209catenate_stringinfo_string(StringInfo buffer, const char *addon)
1210{
1211 /* custom version of cstring_to_text_with_len */
1212 int buflen = buffer->len;
1213 int addlen = strlen(addon);
1214 text *result = (text *) palloc(buflen + addlen + VARHDRSZ);
1215
1216 SET_VARSIZE(result, buflen + addlen + VARHDRSZ);
1217 memcpy(VARDATA(result), buffer->data, buflen);
1218 memcpy(VARDATA(result) + buflen, addon, addlen);
1219
1220 return result;
1221}
1222
1223Datum
1224json_build_object_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types,
1225 bool absent_on_null, bool unique_keys)
1226{
1227 int i;
1228 const char *sep = "";
1229 StringInfo result;
1230 JsonUniqueBuilderState unique_check;
1231
1232 if (nargs % 2 != 0)
1233 ereport(ERROR,
1234 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1235 errmsg("argument list must have even number of elements"),
1236 /* translator: %s is a SQL function name */
1237 errhint("The arguments of %s must consist of alternating keys and values.",
1238 "json_build_object()")));
1239
1240 result = makeStringInfo();
1241
1242 appendStringInfoChar(result, '{');
1243
1244 if (unique_keys)
1245 json_unique_builder_init(&unique_check);
1246
1247 for (i = 0; i < nargs; i += 2)
1248 {
1249 StringInfo out;
1250 bool skip;
1251 int key_offset;
1252
1253 /* Skip null values if absent_on_null */
1254 skip = absent_on_null && nulls[i + 1];
1255
1256 if (skip)
1257 {
1258 /* If key uniqueness check is needed we must save skipped keys */
1259 if (!unique_keys)
1260 continue;
1261
1262 out = json_unique_builder_get_throwawaybuf(&unique_check);
1263 }
1264 else
1265 {
1266 appendStringInfoString(result, sep);
1267 sep = ", ";
1268 out = result;
1269 }
1270
1271 /* process key */
1272 if (nulls[i])
1273 ereport(ERROR,
1274 (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
1275 errmsg("null value not allowed for object key")));
1276
1277 /* save key offset before appending it */
1278 key_offset = out->len;
1279
1280 add_json(args[i], false, out, types[i], true);
1281
1282 if (unique_keys)
1283 {
1284 /*
1285 * check key uniqueness after key appending
1286 *
1287 * Copy the key first, instead of pointing into the buffer. It
1288 * will be added to the hash table, but the buffer may get
1289 * reallocated as we're appending more data to it. That would
1290 * invalidate pointers to keys in the current buffer.
1291 */
1292 const char *key = pstrdup(&out->data[key_offset]);
1293
1294 if (!json_unique_check_key(&unique_check.check, key, 0))
1295 ereport(ERROR,
1296 errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
1297 errmsg("duplicate JSON object key value: %s", key));
1298
1299 if (skip)
1300 continue;
1301 }
1302
1303 appendStringInfoString(result, " : ");
1304
1305 /* process value */
1306 add_json(args[i + 1], nulls[i + 1], result, types[i + 1], false);
1307 }
1308
1309 appendStringInfoChar(result, '}');
1310
1311 return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
1312}
1313
1314/*
1315 * SQL function json_build_object(variadic "any")
1316 */
1317Datum
1319{
1320 Datum *args;
1321 bool *nulls;
1322 Oid *types;
1323
1324 /* build argument values to build the object */
1325 int nargs = extract_variadic_args(fcinfo, 0, true,
1326 &args, &types, &nulls);
1327
1328 if (nargs < 0)
1330
1331 PG_RETURN_DATUM(json_build_object_worker(nargs, args, nulls, types, false, false));
1332}
1333
1334/*
1335 * degenerate case of json_build_object where it gets 0 arguments.
1336 */
1337Datum
1339{
1341}
1342
1343Datum
1344json_build_array_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types,
1345 bool absent_on_null)
1346{
1347 int i;
1348 const char *sep = "";
1349 StringInfo result;
1350
1351 result = makeStringInfo();
1352
1353 appendStringInfoChar(result, '[');
1354
1355 for (i = 0; i < nargs; i++)
1356 {
1357 if (absent_on_null && nulls[i])
1358 continue;
1359
1360 appendStringInfoString(result, sep);
1361 sep = ", ";
1362 add_json(args[i], nulls[i], result, types[i], false);
1363 }
1364
1365 appendStringInfoChar(result, ']');
1366
1367 return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
1368}
1369
1370/*
1371 * SQL function json_build_array(variadic "any")
1372 */
1373Datum
1375{
1376 Datum *args;
1377 bool *nulls;
1378 Oid *types;
1379
1380 /* build argument values to build the object */
1381 int nargs = extract_variadic_args(fcinfo, 0, true,
1382 &args, &types, &nulls);
1383
1384 if (nargs < 0)
1386
1387 PG_RETURN_DATUM(json_build_array_worker(nargs, args, nulls, types, false));
1388}
1389
1390/*
1391 * degenerate case of json_build_array where it gets 0 arguments.
1392 */
1393Datum
1395{
1397}
1398
1399/*
1400 * SQL function json_object(text[])
1401 *
1402 * take a one or two dimensional array of text as key/value pairs
1403 * for a json object.
1404 */
1405Datum
1407{
1408 ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
1409 int ndims = ARR_NDIM(in_array);
1410 StringInfoData result;
1411 Datum *in_datums;
1412 bool *in_nulls;
1413 int in_count,
1414 count,
1415 i;
1416 text *rval;
1417
1418 switch (ndims)
1419 {
1420 case 0:
1422 break;
1423
1424 case 1:
1425 if ((ARR_DIMS(in_array)[0]) % 2)
1426 ereport(ERROR,
1427 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
1428 errmsg("array must have even number of elements")));
1429 break;
1430
1431 case 2:
1432 if ((ARR_DIMS(in_array)[1]) != 2)
1433 ereport(ERROR,
1434 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
1435 errmsg("array must have two columns")));
1436 break;
1437
1438 default:
1439 ereport(ERROR,
1440 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
1441 errmsg("wrong number of array subscripts")));
1442 }
1443
1444 deconstruct_array_builtin(in_array, TEXTOID, &in_datums, &in_nulls, &in_count);
1445
1446 count = in_count / 2;
1447
1448 initStringInfo(&result);
1449
1450 appendStringInfoChar(&result, '{');
1451
1452 for (i = 0; i < count; ++i)
1453 {
1454 if (in_nulls[i * 2])
1455 ereport(ERROR,
1456 (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
1457 errmsg("null value not allowed for object key")));
1458
1459 if (i > 0)
1460 appendStringInfoString(&result, ", ");
1461 escape_json_text(&result, (text *) DatumGetPointer(in_datums[i * 2]));
1462 appendStringInfoString(&result, " : ");
1463 if (in_nulls[i * 2 + 1])
1464 appendStringInfoString(&result, "null");
1465 else
1466 {
1467 escape_json_text(&result,
1468 (text *) DatumGetPointer(in_datums[i * 2 + 1]));
1469 }
1470 }
1471
1472 appendStringInfoChar(&result, '}');
1473
1474 pfree(in_datums);
1475 pfree(in_nulls);
1476
1477 rval = cstring_to_text_with_len(result.data, result.len);
1478 pfree(result.data);
1479
1480 PG_RETURN_TEXT_P(rval);
1481}
1482
1483/*
1484 * SQL function json_object(text[], text[])
1485 *
1486 * take separate key and value arrays of text to construct a json object
1487 * pairwise.
1488 */
1489Datum
1491{
1492 ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0);
1493 ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
1494 int nkdims = ARR_NDIM(key_array);
1495 int nvdims = ARR_NDIM(val_array);
1496 StringInfoData result;
1497 Datum *key_datums,
1498 *val_datums;
1499 bool *key_nulls,
1500 *val_nulls;
1501 int key_count,
1502 val_count,
1503 i;
1504 text *rval;
1505
1506 if (nkdims > 1 || nkdims != nvdims)
1507 ereport(ERROR,
1508 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
1509 errmsg("wrong number of array subscripts")));
1510
1511 if (nkdims == 0)
1513
1514 deconstruct_array_builtin(key_array, TEXTOID, &key_datums, &key_nulls, &key_count);
1515 deconstruct_array_builtin(val_array, TEXTOID, &val_datums, &val_nulls, &val_count);
1516
1517 if (key_count != val_count)
1518 ereport(ERROR,
1519 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
1520 errmsg("mismatched array dimensions")));
1521
1522 initStringInfo(&result);
1523
1524 appendStringInfoChar(&result, '{');
1525
1526 for (i = 0; i < key_count; ++i)
1527 {
1528 if (key_nulls[i])
1529 ereport(ERROR,
1530 (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
1531 errmsg("null value not allowed for object key")));
1532
1533 if (i > 0)
1534 appendStringInfoString(&result, ", ");
1535 escape_json_text(&result, (text *) DatumGetPointer(key_datums[i]));
1536 appendStringInfoString(&result, " : ");
1537 if (val_nulls[i])
1538 appendStringInfoString(&result, "null");
1539 else
1540 escape_json_text(&result,
1541 (text *) DatumGetPointer(val_datums[i]));
1542 }
1543
1544 appendStringInfoChar(&result, '}');
1545
1546 pfree(key_datums);
1547 pfree(key_nulls);
1548 pfree(val_datums);
1549 pfree(val_nulls);
1550
1551 rval = cstring_to_text_with_len(result.data, result.len);
1552 pfree(result.data);
1553
1554 PG_RETURN_TEXT_P(rval);
1555}
1556
1557/*
1558 * escape_json_char
1559 * Inline helper function for escape_json* functions
1560 */
1563{
1564 switch (c)
1565 {
1566 case '\b':
1568 break;
1569 case '\f':
1571 break;
1572 case '\n':
1574 break;
1575 case '\r':
1577 break;
1578 case '\t':
1580 break;
1581 case '"':
1582 appendStringInfoString(buf, "\\\"");
1583 break;
1584 case '\\':
1585 appendStringInfoString(buf, "\\\\");
1586 break;
1587 default:
1588 if ((unsigned char) c < ' ')
1589 appendStringInfo(buf, "\\u%04x", (int) c);
1590 else
1592 break;
1593 }
1594}
1595
1596/*
1597 * escape_json
1598 * Produce a JSON string literal, properly escaping the NUL-terminated
1599 * cstring.
1600 */
1601void
1603{
1605
1606 for (; *str != '\0'; str++)
1608
1610}
1611
1612/*
1613 * Define the number of bytes that escape_json_with_len will look ahead in the
1614 * input string before flushing the input string to the destination buffer.
1615 * Looking ahead too far could result in cachelines being evicted that will
1616 * need to be reloaded in order to perform the appendBinaryStringInfo call.
1617 * Smaller values will result in a larger number of calls to
1618 * appendBinaryStringInfo and introduce additional function call overhead.
1619 * Values larger than the size of L1d cache will likely result in worse
1620 * performance.
1621 */
1622#define ESCAPE_JSON_FLUSH_AFTER 512
1623
1624/*
1625 * escape_json_with_len
1626 * Produce a JSON string literal, properly escaping the possibly not
1627 * NUL-terminated characters in 'str'. 'len' defines the number of bytes
1628 * from 'str' to process.
1629 */
1630void
1632{
1633 int vlen;
1634
1635 Assert(len >= 0);
1636
1637 /*
1638 * Since we know the minimum length we'll need to append, let's just
1639 * enlarge the buffer now rather than incrementally making more space when
1640 * we run out. Add two extra bytes for the enclosing quotes.
1641 */
1643
1644 /*
1645 * Figure out how many bytes to process using SIMD. Round 'len' down to
1646 * the previous multiple of sizeof(Vector8), assuming that's a power-of-2.
1647 */
1648 vlen = len & (int) (~(sizeof(Vector8) - 1));
1649
1651
1652 for (int i = 0, copypos = 0;;)
1653 {
1654 /*
1655 * To speed this up, try searching sizeof(Vector8) bytes at once for
1656 * special characters that we need to escape. When we find one, we
1657 * fall out of the Vector8 loop and copy the portion we've vector
1658 * searched and then we process sizeof(Vector8) bytes one byte at a
1659 * time. Once done, come back and try doing vector searching again.
1660 * We'll also process any remaining bytes at the tail end of the
1661 * string byte-by-byte. This optimization assumes that most chunks of
1662 * sizeof(Vector8) bytes won't contain any special characters.
1663 */
1664 for (; i < vlen; i += sizeof(Vector8))
1665 {
1666 Vector8 chunk;
1667
1668 vector8_load(&chunk, (const uint8 *) &str[i]);
1669
1670 /*
1671 * Break on anything less than ' ' or if we find a '"' or '\\'.
1672 * Those need special handling. That's done in the per-byte loop.
1673 */
1674 if (vector8_has_le(chunk, (unsigned char) 0x1F) ||
1675 vector8_has(chunk, (unsigned char) '"') ||
1676 vector8_has(chunk, (unsigned char) '\\'))
1677 break;
1678
1679#ifdef ESCAPE_JSON_FLUSH_AFTER
1680
1681 /*
1682 * Flush what's been checked so far out to the destination buffer
1683 * every so often to avoid having to re-read cachelines when
1684 * escaping large strings.
1685 */
1686 if (i - copypos >= ESCAPE_JSON_FLUSH_AFTER)
1687 {
1688 appendBinaryStringInfo(buf, &str[copypos], i - copypos);
1689 copypos = i;
1690 }
1691#endif
1692 }
1693
1694 /*
1695 * Write to the destination up to the point that we've vector searched
1696 * so far. Do this only when switching into per-byte mode rather than
1697 * once every sizeof(Vector8) bytes.
1698 */
1699 if (copypos < i)
1700 {
1701 appendBinaryStringInfo(buf, &str[copypos], i - copypos);
1702 copypos = i;
1703 }
1704
1705 /*
1706 * Per-byte loop for Vector8s containing special chars and for
1707 * processing the tail of the string.
1708 */
1709 for (int b = 0; b < sizeof(Vector8); b++)
1710 {
1711 /* check if we've finished */
1712 if (i == len)
1713 goto done;
1714
1715 Assert(i < len);
1716
1718 }
1719
1720 copypos = i;
1721 /* We're not done yet. Try the vector search again. */
1722 }
1723
1724done:
1726}
1727
1728/*
1729 * escape_json_text
1730 * Append 'txt' onto 'buf' and escape using escape_json_with_len.
1731 *
1732 * This is more efficient than calling text_to_cstring and appending the
1733 * result as that could require an additional palloc and memcpy.
1734 */
1735void
1737{
1738 /* must cast away the const, unfortunately */
1739 text *tunpacked = pg_detoast_datum_packed(unconstify(text *, txt));
1740 int len = VARSIZE_ANY_EXHDR(tunpacked);
1741 char *str;
1742
1743 str = VARDATA_ANY(tunpacked);
1744
1746
1747 /* pfree any detoasted values */
1748 if (tunpacked != txt)
1749 pfree(tunpacked);
1750}
1751
1752/* Semantic actions for key uniqueness check */
1753static JsonParseErrorType
1755{
1756 JsonUniqueParsingState *state = _state;
1757 JsonUniqueStackEntry *entry;
1758
1759 if (!state->unique)
1760 return JSON_SUCCESS;
1761
1762 /* push object entry to stack */
1763 entry = palloc(sizeof(*entry));
1764 entry->object_id = state->id_counter++;
1765 entry->parent = state->stack;
1766 state->stack = entry;
1767
1768 return JSON_SUCCESS;
1769}
1770
1771static JsonParseErrorType
1773{
1774 JsonUniqueParsingState *state = _state;
1775 JsonUniqueStackEntry *entry;
1776
1777 if (!state->unique)
1778 return JSON_SUCCESS;
1779
1780 entry = state->stack;
1781 state->stack = entry->parent; /* pop object from stack */
1782 pfree(entry);
1783 return JSON_SUCCESS;
1784}
1785
1786static JsonParseErrorType
1787json_unique_object_field_start(void *_state, char *field, bool isnull)
1788{
1789 JsonUniqueParsingState *state = _state;
1790 JsonUniqueStackEntry *entry;
1791
1792 if (!state->unique)
1793 return JSON_SUCCESS;
1794
1795 /* find key collision in the current object */
1796 if (json_unique_check_key(&state->check, field, state->stack->object_id))
1797 return JSON_SUCCESS;
1798
1799 state->unique = false;
1800
1801 /* pop all objects entries */
1802 while ((entry = state->stack))
1803 {
1804 state->stack = entry->parent;
1805 pfree(entry);
1806 }
1807 return JSON_SUCCESS;
1808}
1809
1810/* Validate JSON text and additionally check key uniqueness */
1811bool
1812json_validate(text *json, bool check_unique_keys, bool throw_error)
1813{
1814 JsonLexContext lex;
1815 JsonSemAction uniqueSemAction = {0};
1817 JsonParseErrorType result;
1818
1819 makeJsonLexContext(&lex, json, check_unique_keys);
1820
1821 if (check_unique_keys)
1822 {
1823 state.lex = &lex;
1824 state.stack = NULL;
1825 state.id_counter = 0;
1826 state.unique = true;
1828
1829 uniqueSemAction.semstate = &state;
1830 uniqueSemAction.object_start = json_unique_object_start;
1832 uniqueSemAction.object_end = json_unique_object_end;
1833 }
1834
1835 result = pg_parse_json(&lex, check_unique_keys ? &uniqueSemAction : &nullSemAction);
1836
1837 if (result != JSON_SUCCESS)
1838 {
1839 if (throw_error)
1840 json_errsave_error(result, &lex, NULL);
1841
1842 return false; /* invalid json */
1843 }
1844
1845 if (check_unique_keys && !state.unique)
1846 {
1847 if (throw_error)
1848 ereport(ERROR,
1849 (errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
1850 errmsg("duplicate JSON object key value")));
1851
1852 return false; /* not unique keys */
1853 }
1854
1855 if (check_unique_keys)
1856 freeJsonLexContext(&lex);
1857
1858 return true; /* ok */
1859}
1860
1861/*
1862 * SQL function json_typeof(json) -> text
1863 *
1864 * Returns the type of the outermost JSON value as TEXT. Possible types are
1865 * "object", "array", "string", "number", "boolean", and "null".
1866 *
1867 * Performs a single call to json_lex() to get the first token of the supplied
1868 * value. This initial token uniquely determines the value's type. As our
1869 * input must already have been validated by json_in() or json_recv(), the
1870 * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END,
1871 * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END.
1872 */
1873Datum
1875{
1876 text *json = PG_GETARG_TEXT_PP(0);
1877 JsonLexContext lex;
1878 char *type;
1879 JsonParseErrorType result;
1880
1881 /* Lex exactly one token from the input and check its type. */
1882 makeJsonLexContext(&lex, json, false);
1883 result = json_lex(&lex);
1884 if (result != JSON_SUCCESS)
1885 json_errsave_error(result, &lex, NULL);
1886
1887 switch (lex.token_type)
1888 {
1890 type = "object";
1891 break;
1893 type = "array";
1894 break;
1895 case JSON_TOKEN_STRING:
1896 type = "string";
1897 break;
1898 case JSON_TOKEN_NUMBER:
1899 type = "number";
1900 break;
1901 case JSON_TOKEN_TRUE:
1902 case JSON_TOKEN_FALSE:
1903 type = "boolean";
1904 break;
1905 case JSON_TOKEN_NULL:
1906 type = "null";
1907 break;
1908 default:
1909 elog(ERROR, "unexpected json token: %d", lex.token_type);
1910 }
1911
1913}
#define ARR_NDIM(a)
Definition: array.h:290
#define PG_GETARG_ARRAYTYPE_P(n)
Definition: array.h:263
#define DatumGetArrayTypeP(X)
Definition: array.h:261
#define ARR_ELEMTYPE(a)
Definition: array.h:292
#define ARR_DIMS(a)
Definition: array.h:294
void deconstruct_array(ArrayType *array, Oid elmtype, int elmlen, bool elmbyval, char elmalign, Datum **elemsp, bool **nullsp, int *nelemsp)
Definition: arrayfuncs.c:3631
void deconstruct_array_builtin(ArrayType *array, Oid elmtype, Datum **elemsp, bool **nullsp, int *nelemsp)
Definition: arrayfuncs.c:3697
int ArrayGetNItems(int ndim, const int *dims)
Definition: arrayutils.c:57
void EncodeTimeOnly(struct pg_tm *tm, fsec_t fsec, bool print_tz, int tz, int style, char *str)
Definition: datetime.c:4428
void j2date(int jd, int *year, int *month, int *day)
Definition: datetime.c:321
void EncodeDateTime(struct pg_tm *tm, fsec_t fsec, bool print_tz, int tz, const char *tzn, int style, char *str)
Definition: datetime.c:4458
void EncodeDateOnly(struct pg_tm *tm, int style, char *str)
Definition: datetime.c:4343
void EncodeSpecialTimestamp(Timestamp dt, char *str)
Definition: timestamp.c:1586
int timestamp2tm(Timestamp dt, int *tzp, struct pg_tm *tm, fsec_t *fsec, const char **tzn, pg_tz *attimezone)
Definition: timestamp.c:1891
#define CStringGetTextDatum(s)
Definition: builtins.h:97
#define TextDatumGetCString(d)
Definition: builtins.h:98
#define NameStr(name)
Definition: c.h:703
#define unconstify(underlying_type, expr)
Definition: c.h:1202
uint8_t uint8
Definition: c.h:486
#define VARHDRSZ
Definition: c.h:649
#define Assert(condition)
Definition: c.h:815
#define pg_attribute_always_inline
Definition: c.h:256
int16_t int16
Definition: c.h:483
int32_t int32
Definition: c.h:484
uint32_t uint32
Definition: c.h:488
size_t Size
Definition: c.h:562
int64 Timestamp
Definition: timestamp.h:38
int64 TimestampTz
Definition: timestamp.h:39
int32 fsec_t
Definition: timestamp.h:41
#define USECS_PER_SEC
Definition: timestamp.h:134
#define TIMESTAMP_NOT_FINITE(j)
Definition: timestamp.h:169
#define POSTGRES_EPOCH_JDATE
Definition: timestamp.h:235
int timetz2tm(TimeTzADT *time, struct pg_tm *tm, fsec_t *fsec, int *tzp)
Definition: date.c:2422
int time2tm(TimeADT time, struct pg_tm *tm, fsec_t *fsec)
Definition: date.c:1507
void EncodeSpecialDate(DateADT dt, char *str)
Definition: date.c:301
#define DATE_NOT_FINITE(j)
Definition: date.h:43
static TimeTzADT * DatumGetTimeTzADTP(Datum X)
Definition: date.h:66
int32 DateADT
Definition: date.h:23
static DateADT DatumGetDateADT(Datum X)
Definition: date.h:54
static TimeADT DatumGetTimeADT(Datum X)
Definition: date.h:60
int64 TimeADT
Definition: date.h:25
void * hash_search(HTAB *hashp, const void *keyPtr, HASHACTION action, bool *foundPtr)
Definition: dynahash.c:955
HTAB * hash_create(const char *tabname, long nelem, const HASHCTL *info, int flags)
Definition: dynahash.c:352
struct typedefs * types
Definition: ecpg.c:30
int errhint(const char *fmt,...)
Definition: elog.c:1317
int errcode(int sqlerrcode)
Definition: elog.c:853
int errmsg(const char *fmt,...)
Definition: elog.c:1070
#define ERROR
Definition: elog.h:39
#define elog(elevel,...)
Definition: elog.h:225
#define ereport(elevel,...)
Definition: elog.h:149
struct varlena * pg_detoast_datum_packed(struct varlena *datum)
Definition: fmgr.c:1864
char * OidOutputFunctionCall(Oid functionId, Datum val)
Definition: fmgr.c:1763
Oid get_fn_expr_argtype(FmgrInfo *flinfo, int argnum)
Definition: fmgr.c:1910
#define OidFunctionCall1(functionId, arg1)
Definition: fmgr.h:679
#define PG_GETARG_TEXT_PP(n)
Definition: fmgr.h:309
#define PG_RETURN_BYTEA_P(x)
Definition: fmgr.h:371
#define DatumGetHeapTupleHeader(X)
Definition: fmgr.h:295
#define DatumGetTextPP(X)
Definition: fmgr.h:292
#define PG_GETARG_POINTER(n)
Definition: fmgr.h:276
#define PG_RETURN_CSTRING(x)
Definition: fmgr.h:362
#define PG_ARGISNULL(n)
Definition: fmgr.h:209
#define PG_GETARG_DATUM(n)
Definition: fmgr.h:268
#define PG_GETARG_CSTRING(n)
Definition: fmgr.h:277
#define PG_RETURN_NULL()
Definition: fmgr.h:345
#define PG_RETURN_TEXT_P(x)
Definition: fmgr.h:372
#define PG_GETARG_BOOL(n)
Definition: fmgr.h:274
#define PG_RETURN_DATUM(x)
Definition: fmgr.h:353
#define PG_RETURN_POINTER(x)
Definition: fmgr.h:361
#define PG_FUNCTION_ARGS
Definition: fmgr.h:193
int extract_variadic_args(FunctionCallInfo fcinfo, int variadic_start, bool convert_unknown, Datum **args, Oid **types, bool **nulls)
Definition: funcapi.c:2005
uint32 hash_bytes_uint32(uint32 k)
Definition: hashfn.c:610
uint32 hash_bytes(const unsigned char *k, int keylen)
Definition: hashfn.c:146
uint64 chunk
const char * str
@ HASH_ENTER
Definition: hsearch.h:114
#define HASH_CONTEXT
Definition: hsearch.h:102
#define HASH_ELEM
Definition: hsearch.h:95
#define HASH_COMPARE
Definition: hsearch.h:99
#define HASH_FUNCTION
Definition: hsearch.h:98
static Datum heap_getattr(HeapTuple tup, int attnum, TupleDesc tupleDesc, bool *isnull)
Definition: htup_details.h:792
#define HeapTupleHeaderGetTypMod(tup)
Definition: htup_details.h:466
#define HeapTupleHeaderGetTypeId(tup)
Definition: htup_details.h:456
#define HeapTupleHeaderGetDatumLength(tup)
Definition: htup_details.h:450
#define MAXDATELEN
Definition: datetime.h:200
#define nitems(x)
Definition: indent.h:31
static struct @162 value
long val
Definition: informix.c:689
int b
Definition: isn.c:69
int i
Definition: isn.c:72
Datum row_to_json(PG_FUNCTION_ARGS)
Definition: json.c:663
struct JsonUniqueStackEntry JsonUniqueStackEntry
static JsonParseErrorType json_unique_object_start(void *_state)
Definition: json.c:1754
static uint32 json_unique_hash(const void *key, Size keysize)
Definition: json.c:900
Datum json_build_object_noargs(PG_FUNCTION_ARGS)
Definition: json.c:1338
void escape_json_text(StringInfo buf, const text *txt)
Definition: json.c:1736
char * JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp)
Definition: json.c:310
static int json_unique_hash_match(const void *key1, const void *key2, Size keysize)
Definition: json.c:911
static void json_unique_check_init(JsonUniqueCheckState *cxt)
Definition: json.c:932
static text * catenate_stringinfo_string(StringInfo buffer, const char *addon)
Definition: json.c:1209
struct HTAB * JsonUniqueCheckState
Definition: json.c:41
Datum json_agg_strict_transfn(PG_FUNCTION_ARGS)
Definition: json.c:870
static pg_attribute_always_inline void escape_json_char(StringInfo buf, char c)
Definition: json.c:1562
struct JsonUniqueBuilderState JsonUniqueBuilderState
Datum json_in(PG_FUNCTION_ARGS)
Definition: json.c:107
static StringInfo json_unique_builder_get_throwawaybuf(JsonUniqueBuilderState *cxt)
Definition: json.c:978
static bool json_unique_check_key(JsonUniqueCheckState *cxt, const char *key, int object_id)
Definition: json.c:958
struct JsonUniqueParsingState JsonUniqueParsingState
Datum json_out(PG_FUNCTION_ARGS)
Definition: json.c:126
Datum json_agg_transfn(PG_FUNCTION_ARGS)
Definition: json.c:861
Datum to_json(PG_FUNCTION_ARGS)
Definition: json.c:739
Datum json_build_array_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types, bool absent_on_null)
Definition: json.c:1344
Datum json_object_agg_finalfn(PG_FUNCTION_ARGS)
Definition: json.c:1186
Datum row_to_json_pretty(PG_FUNCTION_ARGS)
Definition: json.c:679
struct JsonUniqueHashEntry JsonUniqueHashEntry
Datum json_send(PG_FUNCTION_ARGS)
Definition: json.c:138
static Datum json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
Definition: json.c:779
void escape_json_with_len(StringInfo buf, const char *str, int len)
Definition: json.c:1631
static void datum_to_json_internal(Datum val, bool is_null, StringInfo result, JsonTypeCategory tcategory, Oid outfuncoid, bool key_scalar)
Definition: json.c:179
Datum array_to_json_pretty(PG_FUNCTION_ARGS)
Definition: json.c:646
Datum json_object_agg_unique_transfn(PG_FUNCTION_ARGS)
Definition: json.c:1168
Datum json_object_two_arg(PG_FUNCTION_ARGS)
Definition: json.c:1490
Datum json_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS)
Definition: json.c:1177
Datum json_build_array_noargs(PG_FUNCTION_ARGS)
Definition: json.c:1394
Datum json_object_agg_transfn(PG_FUNCTION_ARGS)
Definition: json.c:1150
Datum array_to_json(PG_FUNCTION_ARGS)
Definition: json.c:630
Datum json_object_agg_strict_transfn(PG_FUNCTION_ARGS)
Definition: json.c:1159
Datum json_build_array(PG_FUNCTION_ARGS)
Definition: json.c:1374
static void json_unique_builder_init(JsonUniqueBuilderState *cxt)
Definition: json.c:950
Datum json_build_object_worker(int nargs, const Datum *args, const bool *nulls, const Oid *types, bool absent_on_null, bool unique_keys)
Definition: json.c:1224
#define ESCAPE_JSON_FLUSH_AFTER
Definition: json.c:1622
static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar)
Definition: json.c:602
Datum json_agg_finalfn(PG_FUNCTION_ARGS)
Definition: json.c:879
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, bool *nulls, int *valcount, JsonTypeCategory tcategory, Oid outfuncoid, bool use_line_feeds)
Definition: json.c:431
Datum json_recv(PG_FUNCTION_ARGS)
Definition: json.c:152
Datum datum_to_json(Datum val, JsonTypeCategory tcategory, Oid outfuncoid)
Definition: json.c:763
bool json_validate(text *json, bool check_unique_keys, bool throw_error)
Definition: json.c:1812
Datum json_typeof(PG_FUNCTION_ARGS)
Definition: json.c:1874
void escape_json(StringInfo buf, const char *str)
Definition: json.c:1602
struct JsonAggState JsonAggState
static void composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
Definition: json.c:521
static Datum json_object_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null, bool unique_keys)
Definition: json.c:1002
Datum json_object(PG_FUNCTION_ARGS)
Definition: json.c:1406
Datum json_build_object(PG_FUNCTION_ARGS)
Definition: json.c:1318
bool to_json_is_immutable(Oid typoid)
Definition: json.c:700
static JsonParseErrorType json_unique_object_field_start(void *_state, char *field, bool isnull)
Definition: json.c:1787
static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
Definition: json.c:474
static JsonParseErrorType json_unique_object_end(void *_state)
Definition: json.c:1772
JsonParseErrorType pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
Definition: jsonapi.c:744
JsonLexContext * makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json, size_t len, int encoding, bool need_escapes)
Definition: jsonapi.c:392
const JsonSemAction nullSemAction
Definition: jsonapi.c:287
JsonParseErrorType json_lex(JsonLexContext *lex)
Definition: jsonapi.c:1584
void freeJsonLexContext(JsonLexContext *lex)
Definition: jsonapi.c:687
JsonParseErrorType
Definition: jsonapi.h:35
@ JSON_SUCCESS
Definition: jsonapi.h:36
@ JSON_TOKEN_FALSE
Definition: jsonapi.h:29
@ JSON_TOKEN_TRUE
Definition: jsonapi.h:28
@ JSON_TOKEN_NULL
Definition: jsonapi.h:30
@ JSON_TOKEN_OBJECT_START
Definition: jsonapi.h:22
@ JSON_TOKEN_NUMBER
Definition: jsonapi.h:21
@ JSON_TOKEN_STRING
Definition: jsonapi.h:20
@ JSON_TOKEN_ARRAY_START
Definition: jsonapi.h:24
JsonLexContext * makeJsonLexContext(JsonLexContext *lex, text *json, bool need_escapes)
Definition: jsonfuncs.c:539
void json_categorize_type(Oid typoid, bool is_jsonb, JsonTypeCategory *tcategory, Oid *outfuncoid)
Definition: jsonfuncs.c:5976
void json_errsave_error(JsonParseErrorType error, JsonLexContext *lex, Node *escontext)
Definition: jsonfuncs.c:640
bool pg_parse_json_or_errsave(JsonLexContext *lex, const JsonSemAction *sem, Node *escontext)
Definition: jsonfuncs.c:518
#define pg_parse_json_or_ereport(lex, sem)
Definition: jsonfuncs.h:47
JsonTypeCategory
Definition: jsonfuncs.h:69
@ JSONTYPE_JSON
Definition: jsonfuncs.h:76
@ JSONTYPE_NULL
Definition: jsonfuncs.h:70
@ JSONTYPE_TIMESTAMP
Definition: jsonfuncs.h:74
@ JSONTYPE_NUMERIC
Definition: jsonfuncs.h:72
@ JSONTYPE_DATE
Definition: jsonfuncs.h:73
@ JSONTYPE_BOOL
Definition: jsonfuncs.h:71
@ JSONTYPE_OTHER
Definition: jsonfuncs.h:81
@ JSONTYPE_CAST
Definition: jsonfuncs.h:80
@ JSONTYPE_COMPOSITE
Definition: jsonfuncs.h:79
@ JSONTYPE_ARRAY
Definition: jsonfuncs.h:78
@ JSONTYPE_TIMESTAMPTZ
Definition: jsonfuncs.h:75
@ JSONTYPE_JSONB
Definition: jsonfuncs.h:77
static struct pg_tm tm
Definition: localtime.c:104
void get_typlenbyvalalign(Oid typid, int16 *typlen, bool *typbyval, char *typalign)
Definition: lsyscache.c:2271
char func_volatile(Oid funcid)
Definition: lsyscache.c:1780
int GetDatabaseEncoding(void)
Definition: mbutils.c:1261
char * MemoryContextStrdup(MemoryContext context, const char *string)
Definition: mcxt.c:1683
char * pstrdup(const char *in)
Definition: mcxt.c:1696
void pfree(void *pointer)
Definition: mcxt.c:1521
void * palloc(Size size)
Definition: mcxt.c:1317
MemoryContext CurrentMemoryContext
Definition: mcxt.c:143
#define USE_XSD_DATES
Definition: miscadmin.h:239
int AggCheckCallContext(FunctionCallInfo fcinfo, MemoryContext *aggcontext)
Definition: nodeAgg.c:4504
static MemoryContext MemoryContextSwitchTo(MemoryContext context)
Definition: palloc.h:124
NameData attname
Definition: pg_attribute.h:41
FormData_pg_attribute * Form_pg_attribute
Definition: pg_attribute.h:200
void * arg
static const struct exclude_list_item skip[]
Definition: pg_checksums.c:107
const void size_t len
static char * buf
Definition: pg_test_fsync.c:72
char typalign
Definition: pg_type.h:176
long date
Definition: pgtypes_date.h:9
int64 timestamp
static uint32 DatumGetUInt32(Datum X)
Definition: postgres.h:227
static bool DatumGetBool(Datum X)
Definition: postgres.h:95
static Datum PointerGetDatum(const void *X)
Definition: postgres.h:327
uintptr_t Datum
Definition: postgres.h:69
static Pointer DatumGetPointer(Datum X)
Definition: postgres.h:317
#define InvalidOid
Definition: postgres_ext.h:37
unsigned int Oid
Definition: postgres_ext.h:32
void pq_sendtext(StringInfo buf, const char *str, int slen)
Definition: pqformat.c:172
char * pq_getmsgtext(StringInfo msg, int rawbytes, int *nbytes)
Definition: pqformat.c:546
void pq_begintypsend(StringInfo buf)
Definition: pqformat.c:326
bytea * pq_endtypsend(StringInfo buf)
Definition: pqformat.c:346
char * c
tree ctl
Definition: radixtree.h:1838
static unsigned hash(unsigned *uv, int n)
Definition: rege_dfa.c:715
static bool vector8_has_le(const Vector8 v, const uint8 c)
Definition: simd.h:213
static void vector8_load(Vector8 *v, const uint8 *s)
Definition: simd.h:108
uint64 Vector8
Definition: simd.h:60
static bool vector8_has(const Vector8 v, const uint8 c)
Definition: simd.h:162
void check_stack_depth(void)
Definition: stack_depth.c:95
StringInfo makeStringInfo(void)
Definition: stringinfo.c:72
void appendStringInfo(StringInfo str, const char *fmt,...)
Definition: stringinfo.c:145
void enlargeStringInfo(StringInfo str, int needed)
Definition: stringinfo.c:337
void appendBinaryStringInfo(StringInfo str, const void *data, int datalen)
Definition: stringinfo.c:281
void appendStringInfoString(StringInfo str, const char *s)
Definition: stringinfo.c:230
void appendStringInfoChar(StringInfo str, char ch)
Definition: stringinfo.c:242
void initStringInfo(StringInfo str)
Definition: stringinfo.c:97
StringInfoData * StringInfo
Definition: stringinfo.h:54
#define appendStringInfoCharMacro(str, ch)
Definition: stringinfo.h:231
FmgrInfo * flinfo
Definition: fmgr.h:87
Definition: dynahash.c:220
uint32 t_len
Definition: htup.h:64
HeapTupleHeader t_data
Definition: htup.h:68
JsonUniqueBuilderState unique_check
Definition: json.c:85
JsonTypeCategory key_category
Definition: json.c:81
Oid val_output_func
Definition: json.c:84
StringInfo str
Definition: json.c:80
Oid key_output_func
Definition: json.c:82
JsonTypeCategory val_category
Definition: json.c:83
JsonTokenType token_type
Definition: jsonapi.h:109
json_struct_action object_start
Definition: jsonapi.h:148
json_ofield_action object_field_start
Definition: jsonapi.h:152
void * semstate
Definition: jsonapi.h:147
json_struct_action object_end
Definition: jsonapi.h:149
JsonUniqueCheckState check
Definition: json.c:71
StringInfoData skipped_keys
Definition: json.c:72
MemoryContext mcxt
Definition: json.c:73
const char * key
Definition: json.c:46
JsonUniqueCheckState check
Definition: json.c:62
JsonUniqueStackEntry * stack
Definition: json.c:63
JsonLexContext * lex
Definition: json.c:61
struct JsonUniqueStackEntry * parent
Definition: json.c:54
Definition: date.h:28
Definition: pgtime.h:35
int tm_mday
Definition: pgtime.h:39
int tm_mon
Definition: pgtime.h:40
int tm_isdst
Definition: pgtime.h:44
int tm_year
Definition: pgtime.h:41
Definition: regguts.h:323
Definition: c.h:644
#define ReleaseTupleDesc(tupdesc)
Definition: tupdesc.h:212
static FormData_pg_attribute * TupleDescAttr(TupleDesc tupdesc, int i)
Definition: tupdesc.h:153
TupleDesc lookup_rowtype_tupdesc(Oid type_id, int32 typmod)
Definition: typcache.c:1920
static Timestamp DatumGetTimestamp(Datum X)
Definition: timestamp.h:28
static TimestampTz DatumGetTimestampTz(Datum X)
Definition: timestamp.h:34
#define VARDATA(PTR)
Definition: varatt.h:278
#define VARDATA_ANY(PTR)
Definition: varatt.h:324
#define SET_VARSIZE(PTR, len)
Definition: varatt.h:305
#define VARSIZE_ANY_EXHDR(PTR)
Definition: varatt.h:317
text * cstring_to_text_with_len(const char *s, int len)
Definition: varlena.c:196
text * cstring_to_text(const char *s)
Definition: varlena.c:184
const char * type