aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Dunstan <andrew@dunslane.net>2012-02-03 12:11:16 -0500
committerAndrew Dunstan <andrew@dunslane.net>2012-02-03 12:11:16 -0500
commit39909d1d39ae57c3a655fc7010e394e26b90fec9 (patch)
tree7eff6f5605be63418574a53b085ed1fd420cd223
parent69e9768e7b183d4b276d0e067a5a0000689580eb (diff)
downloadpostgresql-39909d1d39ae57c3a655fc7010e394e26b90fec9.tar.gz
postgresql-39909d1d39ae57c3a655fc7010e394e26b90fec9.zip
Add array_to_json and row_to_json functions.
Also move the escape_json function from explain.c to json.c where it seems to belong. Andrew Dunstan, Reviewd by Abhijit Menon-Sen.
-rw-r--r--doc/src/sgml/func.sgml59
-rw-r--r--src/backend/commands/explain.c47
-rw-r--r--src/backend/utils/adt/json.c354
-rw-r--r--src/include/catalog/pg_proc.h8
-rw-r--r--src/include/utils/json.h5
-rw-r--r--src/test/regress/expected/json.out111
-rw-r--r--src/test/regress/sql/json.sql43
7 files changed, 579 insertions, 48 deletions
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index ec140040843..22adcb8c5a0 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -9617,6 +9617,65 @@ table2-mapping
</sect2>
</sect1>
+ <sect1 id="functions-json">
+ <title>JSON functions</title>
+
+ <indexterm zone="datatype-json">
+ <primary>JSON</primary>
+ <secondary>Functions and operators</secondary>
+ </indexterm>
+
+ <para>
+ This section descripbes the functions that are available for creating
+ JSON (see <xref linkend="datatype-json">) data.
+ </para>
+
+ <table id="functions-json-table">
+ <title>JSON Support Functions</title>
+ <tgroup cols="4">
+ <thead>
+ <row>
+ <entry>Function</entry>
+ <entry>Description</entry>
+ <entry>Example</entry>
+ <entry>Example Result</entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <indexterm>
+ <primary>array_to_json</primary>
+ </indexterm>
+ <literal>array_to_json(anyarray [, pretty_bool])</literal>
+ </entry>
+ <entry>
+ Returns the array as JSON. A Postgres multi-dimensional array
+ becomes a JSON array of arrays. Line feeds will be added between
+ dimension 1 elements if pretty_bool is true.
+ </entry>
+ <entry><literal>array_to_json('{{1,5},{99,100}}'::int[])</literal></entry>
+ <entry><literal>[[1,5],[99,100]]</literal></entry>
+ </row>
+ <row>
+ <entry>
+ <indexterm>
+ <primary>row_to_json</primary>
+ </indexterm>
+ <literal>row_to_json(record [, pretty_bool])</literal>
+ </entry>
+ <entry>
+ Returns the row as JSON. Line feeds will be added between level
+ 1 elements if pretty_bool is true.
+ </entry>
+ <entry><literal>row_to_json(row(1,'foo'))</literal></entry>
+ <entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ </sect1>
<sect1 id="functions-sequence">
<title>Sequence Manipulation Functions</title>
diff --git a/src/backend/commands/explain.c b/src/backend/commands/explain.c
index e35ac590301..e57580e8add 100644
--- a/src/backend/commands/explain.c
+++ b/src/backend/commands/explain.c
@@ -24,6 +24,7 @@
#include "rewrite/rewriteHandler.h"
#include "tcop/tcopprot.h"
#include "utils/builtins.h"
+#include "utils/json.h"
#include "utils/lsyscache.h"
#include "utils/rel.h"
#include "utils/snapmgr.h"
@@ -99,7 +100,6 @@ static void ExplainDummyGroup(const char *objtype, const char *labelname,
static void ExplainXMLTag(const char *tagname, int flags, ExplainState *es);
static void ExplainJSONLineEnding(ExplainState *es);
static void ExplainYAMLLineStarting(ExplainState *es);
-static void escape_json(StringInfo buf, const char *str);
static void escape_yaml(StringInfo buf, const char *str);
@@ -2319,51 +2319,6 @@ ExplainYAMLLineStarting(ExplainState *es)
}
/*
- * Produce a JSON string literal, properly escaping characters in the text.
- */
-static void
-escape_json(StringInfo buf, const char *str)
-{
- const char *p;
-
- appendStringInfoCharMacro(buf, '\"');
- for (p = str; *p; p++)
- {
- switch (*p)
- {
- case '\b':
- appendStringInfoString(buf, "\\b");
- break;
- case '\f':
- appendStringInfoString(buf, "\\f");
- break;
- case '\n':
- appendStringInfoString(buf, "\\n");
- break;
- case '\r':
- appendStringInfoString(buf, "\\r");
- break;
- case '\t':
- appendStringInfoString(buf, "\\t");
- break;
- case '"':
- appendStringInfoString(buf, "\\\"");
- break;
- case '\\':
- appendStringInfoString(buf, "\\\\");
- break;
- default:
- if ((unsigned char) *p < ' ')
- appendStringInfo(buf, "\\u%04x", (int) *p);
- else
- appendStringInfoCharMacro(buf, *p);
- break;
- }
- }
- appendStringInfoCharMacro(buf, '\"');
-}
-
-/*
* YAML is a superset of JSON; unfortuantely, the YAML quoting rules are
* ridiculously complicated -- as documented in sections 5.3 and 7.3.3 of
* http://yaml.org/spec/1.2/spec.html -- so we chose to just quote everything.
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index cbb81d1bf37..60addf2871f 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -13,11 +13,17 @@
*/
#include "postgres.h"
+#include "catalog/pg_type.h"
+#include "executor/spi.h"
#include "lib/stringinfo.h"
#include "libpq/pqformat.h"
#include "mb/pg_wchar.h"
+#include "parser/parse_coerce.h"
+#include "utils/array.h"
#include "utils/builtins.h"
+#include "utils/lsyscache.h"
#include "utils/json.h"
+#include "utils/typcache.h"
typedef enum
{
@@ -72,8 +78,11 @@ static void json_lex_number(JsonLexContext *lex, char *s);
static void report_parse_error(JsonParseStack *stack, JsonLexContext *lex);
static void report_invalid_token(JsonLexContext *lex);
static char *extract_mb_char(char *s);
-
-extern Datum json_in(PG_FUNCTION_ARGS);
+static void composite_to_json(Datum composite, StringInfo result, bool use_line_feeds);
+static void array_dim_to_json(StringInfo result, int dim, int ndims,int * dims,
+ Datum *vals, int * valcount, TYPCATEGORY tcategory,
+ Oid typoutputfunc, bool use_line_feeds);
+static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds);
/*
* Input.
@@ -663,3 +672,344 @@ extract_mb_char(char *s)
return res;
}
+
+/*
+ * Turn a scalar Datum into JSON. Hand off a non-scalar datum to
+ * composite_to_json or array_to_json_internal as appropriate.
+ */
+static inline void
+datum_to_json(Datum val, StringInfo result, TYPCATEGORY tcategory,
+ Oid typoutputfunc)
+{
+
+ char *outputstr;
+
+ if (val == (Datum) NULL)
+ {
+ appendStringInfoString(result,"null");
+ return;
+ }
+
+ switch (tcategory)
+ {
+ case TYPCATEGORY_ARRAY:
+ array_to_json_internal(val, result, false);
+ break;
+ case TYPCATEGORY_COMPOSITE:
+ composite_to_json(val, result, false);
+ break;
+ case TYPCATEGORY_BOOLEAN:
+ if (DatumGetBool(val))
+ appendStringInfoString(result,"true");
+ else
+ appendStringInfoString(result,"false");
+ break;
+ case TYPCATEGORY_NUMERIC:
+ outputstr = OidOutputFunctionCall(typoutputfunc, val);
+ /*
+ * Don't call escape_json here. Numeric output should
+ * be a valid JSON number and JSON numbers shouldn't
+ * be quoted.
+ */
+ appendStringInfoString(result, outputstr);
+ pfree(outputstr);
+ break;
+ default:
+ outputstr = OidOutputFunctionCall(typoutputfunc, val);
+ escape_json(result, outputstr);
+ pfree(outputstr);
+ }
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_json(StringInfo result, int dim, int ndims,int * dims, Datum *vals,
+ int * valcount, TYPCATEGORY tcategory, Oid typoutputfunc,
+ bool use_line_feeds)
+{
+
+ int i;
+ char *sep;
+
+ Assert(dim < ndims);
+
+ sep = use_line_feeds ? ",\n " : ",";
+
+ appendStringInfoChar(result, '[');
+
+ for (i = 1; i <= dims[dim]; i++)
+ {
+ if (i > 1)
+ appendStringInfoString(result,sep);
+
+ if (dim + 1 == ndims)
+ {
+ datum_to_json(vals[*valcount],result,tcategory,typoutputfunc);
+ (*valcount)++;
+ }
+ else
+ {
+ /*
+ * Do we want line feeds on inner dimensions of arrays?
+ * For now we'll say no.
+ */
+ array_dim_to_json(result, dim+1, ndims, dims, vals, valcount,
+ tcategory,typoutputfunc,false);
+ }
+ }
+
+ appendStringInfoChar(result, ']');
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
+{
+ ArrayType *v = DatumGetArrayTypeP(array);
+ Oid element_type = ARR_ELEMTYPE(v);
+ int *dim;
+ int ndim;
+ int nitems;
+ int count = 0;
+ Datum *elements;
+ bool *nulls;
+
+ int16 typlen;
+ bool typbyval;
+ char typalign,
+ typdelim;
+ Oid typioparam;
+ Oid typoutputfunc;
+ TYPCATEGORY tcategory;
+
+ ndim = ARR_NDIM(v);
+ dim = ARR_DIMS(v);
+ nitems = ArrayGetNItems(ndim, dim);
+
+ if (nitems <= 0)
+ {
+ appendStringInfoString(result,"[]");
+ return;
+ }
+
+ get_type_io_data(element_type, IOFunc_output,
+ &typlen, &typbyval, &typalign,
+ &typdelim, &typioparam, &typoutputfunc);
+
+ deconstruct_array(v, element_type, typlen, typbyval,
+ typalign, &elements, &nulls,
+ &nitems);
+
+ /* can't have an array of arrays, so this is the only special case here */
+ if (element_type == RECORDOID)
+ tcategory = TYPCATEGORY_COMPOSITE;
+ else
+ tcategory = TypeCategory(element_type);
+
+ array_dim_to_json(result, 0, ndim, dim, elements, &count, tcategory,
+ typoutputfunc, use_line_feeds);
+
+ pfree(elements);
+ pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+{
+ HeapTupleHeader td;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc;
+ HeapTupleData tmptup, *tuple;
+ int i;
+ bool needsep = false;
+ char *sep;
+
+ sep = use_line_feeds ? ",\n " : ",";
+
+ td = DatumGetHeapTupleHeader(composite);
+
+ /* Extract rowtype info and find a tupdesc */
+ tupType = HeapTupleHeaderGetTypeId(td);
+ tupTypmod = HeapTupleHeaderGetTypMod(td);
+ tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+ /* Build a temporary HeapTuple control structure */
+ tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+ tmptup.t_data = td;
+ tuple = &tmptup;
+
+ appendStringInfoChar(result,'{');
+
+ for (i = 0; i < tupdesc->natts; i++)
+ {
+ Datum val, origval;
+ bool isnull;
+ char *attname;
+ TYPCATEGORY tcategory;
+ Oid typoutput;
+ bool typisvarlena;
+
+ if (tupdesc->attrs[i]->attisdropped)
+ continue;
+
+ if (needsep)
+ appendStringInfoString(result,sep);
+ needsep = true;
+
+ attname = NameStr(tupdesc->attrs[i]->attname);
+ escape_json(result,attname);
+ appendStringInfoChar(result,':');
+
+ origval = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ if (tupdesc->attrs[i]->atttypid == RECORDARRAYOID)
+ tcategory = TYPCATEGORY_ARRAY;
+ else if (tupdesc->attrs[i]->atttypid == RECORDOID)
+ tcategory = TYPCATEGORY_COMPOSITE;
+ else
+ tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
+
+ getTypeOutputInfo(tupdesc->attrs[i]->atttypid,
+ &typoutput, &typisvarlena);
+
+ /*
+ * If we have a toasted datum, forcibly detoast it here to avoid memory
+ * leakage inside the type's output routine.
+ */
+ if (typisvarlena && ! isnull)
+ val = PointerGetDatum(PG_DETOAST_DATUM(origval));
+ else
+ val = origval;
+
+ datum_to_json(val, result, tcategory, typoutput);
+
+ /* Clean up detoasted copy, if any */
+ if (val != origval)
+ pfree(DatumGetPointer(val));
+ }
+
+ appendStringInfoChar(result,'}');
+ ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * SQL function array_to_json(row)
+ */
+extern Datum
+array_to_json(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ array_to_json_internal(array, result, false);
+
+ PG_RETURN_TEXT_P(cstring_to_text(result->data));
+};
+
+/*
+ * SQL function array_to_json(row, prettybool)
+ */
+extern Datum
+array_to_json_pretty(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ bool use_line_feeds = PG_GETARG_BOOL(1);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ array_to_json_internal(array, result, use_line_feeds);
+
+ PG_RETURN_TEXT_P(cstring_to_text(result->data));
+};
+
+/*
+ * SQL function row_to_json(row)
+ */
+extern Datum
+row_to_json(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ composite_to_json(array, result, false);
+
+ PG_RETURN_TEXT_P(cstring_to_text(result->data));
+};
+
+/*
+ * SQL function row_to_json(row, prettybool)
+ */
+extern Datum
+row_to_json_pretty(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ bool use_line_feeds = PG_GETARG_BOOL(1);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ composite_to_json(array, result, use_line_feeds);
+
+ PG_RETURN_TEXT_P(cstring_to_text(result->data));
+};
+
+/*
+ * Produce a JSON string literal, properly escaping characters in the text.
+ */
+void
+escape_json(StringInfo buf, const char *str)
+{
+ const char *p;
+
+ appendStringInfoCharMacro(buf, '\"');
+ for (p = str; *p; p++)
+ {
+ switch (*p)
+ {
+ case '\b':
+ appendStringInfoString(buf, "\\b");
+ break;
+ case '\f':
+ appendStringInfoString(buf, "\\f");
+ break;
+ case '\n':
+ appendStringInfoString(buf, "\\n");
+ break;
+ case '\r':
+ appendStringInfoString(buf, "\\r");
+ break;
+ case '\t':
+ appendStringInfoString(buf, "\\t");
+ break;
+ case '"':
+ appendStringInfoString(buf, "\\\"");
+ break;
+ case '\\':
+ appendStringInfoString(buf, "\\\\");
+ break;
+ default:
+ if ((unsigned char) *p < ' ')
+ appendStringInfo(buf, "\\u%04x", (int) *p);
+ else
+ appendStringInfoCharMacro(buf, *p);
+ break;
+ }
+ }
+ appendStringInfoCharMacro(buf, '\"');
+}
+
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 5dc6d05478f..8fc4ddb4b56 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4031,6 +4031,14 @@ DATA(insert OID = 323 ( json_recv PGNSP PGUID 12 1 0 0 0 f f f t f s 1 0 11
DESCR("I/O");
DATA(insert OID = 324 ( json_send PGNSP PGUID 12 1 0 0 0 f f f t f s 1 0 17 "114" _null_ _null_ _null_ _null_ json_send _null_ _null_ _null_ ));
DESCR("I/O");
+DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f t f s 1 0 114 "2277" _null_ _null_ _null_ _null_ array_to_json _null_ _null_ _null_ ));
+DESCR("map array to json");
+DATA(insert OID = 3154 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f t f s 2 0 114 "2277 16" _null_ _null_ _null_ _null_ array_to_json_pretty _null_ _null_ _null_ ));
+DESCR("map array to json with optional pretty printing");
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ ));
+DESCR("map row to json");
+DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
+DESCR("map row to json with optional pretty printing");
/* uuid */
DATA(insert OID = 2952 ( uuid_in PGNSP PGUID 12 1 0 0 0 f f f t f i 1 0 2950 "2275" _null_ _null_ _null_ _null_ uuid_in _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index ee87fd61d7c..415787b458d 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -20,5 +20,10 @@ extern Datum json_in(PG_FUNCTION_ARGS);
extern Datum json_out(PG_FUNCTION_ARGS);
extern Datum json_recv(PG_FUNCTION_ARGS);
extern Datum json_send(PG_FUNCTION_ARGS);
+extern Datum array_to_json(PG_FUNCTION_ARGS);
+extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern void escape_json(StringInfo buf, const char *str);
#endif /* XML_H */
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index 5ef65f7c972..f2148bf362f 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -256,3 +256,114 @@ ERROR: invalid input syntax for type json: " "
LINE 1: SELECT ' '::json;
^
DETAIL: The input string ended unexpectedly.
+--constructors
+-- array_to_json
+SELECT array_to_json(array(select 1 as a));
+ array_to_json
+---------------
+ [1]
+(1 row)
+
+SELECT array_to_json(array_agg(q),false) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
+ array_to_json
+---------------------------------------------------
+ [{"f1":1,"f2":2},{"f1":2,"f2":4},{"f1":3,"f2":6}]
+(1 row)
+
+SELECT array_to_json(array_agg(q),true) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
+ array_to_json
+-------------------
+ [{"f1":1,"f2":2},+
+ {"f1":2,"f2":4},+
+ {"f1":3,"f2":6}]
+(1 row)
+
+SELECT array_to_json(array_agg(q),false)
+ FROM ( SELECT $$a$$ || x AS b, y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+ array_to_json
+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},{"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]},{"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
+(1 row)
+
+SELECT array_to_json(array_agg(x),false) from generate_series(5,10) x;
+ array_to_json
+----------------
+ [5,6,7,8,9,10]
+(1 row)
+
+SELECT array_to_json('{{1,5},{99,100}}'::int[]);
+ array_to_json
+------------------
+ [[1,5],[99,100]]
+(1 row)
+
+-- row_to_json
+SELECT row_to_json(row(1,'foo'));
+ row_to_json
+---------------------
+ {"f1":1,"f2":"foo"}
+(1 row)
+
+SELECT row_to_json(q)
+FROM (SELECT $$a$$ || x AS b,
+ y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+ row_to_json
+--------------------------------------------------------------------
+ {"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
+ {"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
+ {"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
+ {"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
+(4 rows)
+
+SELECT row_to_json(q,true)
+FROM (SELECT $$a$$ || x AS b,
+ y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+ row_to_json
+-----------------------------------------------------
+ {"b":"a1", +
+ "c":4, +
+ "z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
+ {"b":"a1", +
+ "c":5, +
+ "z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
+ {"b":"a2", +
+ "c":4, +
+ "z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
+ {"b":"a2", +
+ "c":5, +
+ "z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
+(4 rows)
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT row_to_json(q,true)
+FROM rows q;
+ row_to_json
+--------------
+ {"x":1, +
+ "y":"txt1"}
+ {"x":2, +
+ "y":"txt2"}
+ {"x":3, +
+ "y":"txt3"}
+(3 rows)
+
+SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+ row_to_json
+-----------------------
+ {"f1":[5,6,7,8,9,10]}
+(1 row)
+
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 440398b9b9f..61273555aae 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -54,3 +54,46 @@ SELECT 'truf'::json; -- ERROR, not a keyword
SELECT 'trues'::json; -- ERROR, not a keyword
SELECT ''::json; -- ERROR, no value
SELECT ' '::json; -- ERROR, no value
+
+--constructors
+-- array_to_json
+
+SELECT array_to_json(array(select 1 as a));
+SELECT array_to_json(array_agg(q),false) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
+SELECT array_to_json(array_agg(q),true) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
+SELECT array_to_json(array_agg(q),false)
+ FROM ( SELECT $$a$$ || x AS b, y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+SELECT array_to_json(array_agg(x),false) from generate_series(5,10) x;
+SELECT array_to_json('{{1,5},{99,100}}'::int[]);
+
+-- row_to_json
+SELECT row_to_json(row(1,'foo'));
+
+SELECT row_to_json(q)
+FROM (SELECT $$a$$ || x AS b,
+ y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+
+SELECT row_to_json(q,true)
+FROM (SELECT $$a$$ || x AS b,
+ y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT row_to_json(q,true)
+FROM rows q;
+
+SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);