mirror of
https://git.postgresql.org/git/postgresql.git
synced 2025-03-07 19:47:50 +08:00
New json functions.
json_build_array() and json_build_object allow for the construction of arbitrarily complex json trees. json_object() turns a one or two dimensional array, or two separate arrays, into a json_object of name/value pairs, similarly to the hstore() function. json_object_agg() aggregates its two arguments into a single json object as name value pairs. Catalog version bumped. Andrew Dunstan, reviewed by Marko Tiikkaja.
This commit is contained in:
parent
9132b189bf
commit
105639900b
@ -10300,6 +10300,137 @@ table2-mapping
|
||||
<entry><literal>json_typeof('-123.4')</literal></entry>
|
||||
<entry><literal>number</literal></entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_build_array</primary>
|
||||
</indexterm>
|
||||
<literal>json_build_array(VARIADIC "any")</literal>
|
||||
</entry>
|
||||
<entry><type>json</type></entry>
|
||||
<entry>
|
||||
Builds a heterogeneously typed json array out of a variadic argument list.
|
||||
</entry>
|
||||
<entry><literal>SELECT json_build_array(1,2,'3',4,5);</literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
json_build_array
|
||||
-------------------
|
||||
[1, 2, "3", 4, 5]
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_build_object</primary>
|
||||
</indexterm>
|
||||
<literal>json_build_object(VARIADIC "any")</literal>
|
||||
</entry>
|
||||
<entry><type>json</type></entry>
|
||||
<entry>
|
||||
Builds a JSON array out of a variadic agument list. By convention, the object is
|
||||
constructed out of alternating name/value arguments.
|
||||
</entry>
|
||||
<entry><literal>SELECT json_build_object('foo',1,'bar',2);</literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
json_build_object
|
||||
------------------------
|
||||
{"foo" : 1, "bar" : 2}
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_object</primary>
|
||||
</indexterm>
|
||||
<literal>json_object(text[])</literal>
|
||||
</entry>
|
||||
<entry><type>json</type></entry>
|
||||
<entry>
|
||||
Builds a JSON object out of a text array. The array must have either
|
||||
exactly one dimension with an even number of members, in which case
|
||||
they are taken as alternating name/value pairs, or two dimensions
|
||||
such that each inner array has exactly two elements, which
|
||||
are taken as a name/value pair.
|
||||
</entry>
|
||||
<entry><literal>select * from json_object('{a, 1, b, "def", c, 3.5}') or <literal>select * from json_object('{{a, 1},{b, "def"},{c, 3.5}}')</literal></literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
json_object
|
||||
---------------------------------------
|
||||
{"a" : "1", "b" : "def", "c" : "3.5"}
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<literal>json_object(keys text[], values text[])</literal>
|
||||
</entry>
|
||||
<entry><type>json</type></entry>
|
||||
<entry>
|
||||
The two argument form of JSON object takes keys and values pairwise from two separate
|
||||
arrays. In all other respects it is identical to the one argument form.
|
||||
</entry>
|
||||
<entry><literal>select * from json_object('{a, b}', '{1,2}');</literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
json_object
|
||||
------------------------
|
||||
{"a" : "1", "b" : "2"}
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_to_record</primary>
|
||||
</indexterm>
|
||||
<literal>json_to_record(json, nested_as_text bool)</literal>
|
||||
</entry>
|
||||
<entry><type>record</type></entry>
|
||||
<entry>
|
||||
json_to_record returns an arbitrary record from a JSON object. As with all functions
|
||||
returning 'record', the caller must explicitly define the structure of the record
|
||||
when making the call. The input JSON must be an object, not a scalar or an array.
|
||||
If nested_as_text is true, the function coerces nested complex elements to text.
|
||||
Also, see notes below on columns and types.
|
||||
</entry>
|
||||
<entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}',true) as x(a int, b text, d text) </literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
a | b | d
|
||||
---+---------+---
|
||||
1 | [1,2,3] |
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_to_recordset</primary>
|
||||
</indexterm>
|
||||
<literal>json_to_recordset(json, nested_as_text bool)</literal>
|
||||
</entry>
|
||||
<entry><type>setof record</type></entry>
|
||||
<entry>
|
||||
json_to_recordset returns an arbitrary set of records from a JSON object. As with
|
||||
json_to_record, the structure of the record must be explicitly defined when making the
|
||||
call. However, with json_to_recordset the input JSON must be an array containing
|
||||
objects. nested_as_text works as with json_to_record.
|
||||
</entry>
|
||||
<entry><literal>select * from json_to_recordset('[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]',true) as x(a int, b text);</literal></entry>
|
||||
<entry>
|
||||
<programlisting>
|
||||
a | b
|
||||
---+-----
|
||||
1 | foo
|
||||
2 |
|
||||
</programlisting>
|
||||
</entry>
|
||||
</row>
|
||||
</tbody>
|
||||
</tgroup>
|
||||
</table>
|
||||
@ -10324,6 +10455,17 @@ table2-mapping
|
||||
</para>
|
||||
</note>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
In json_to_record and json_to_recordset, type coercion from the JSON is
|
||||
"best effort" and may not result in desired values for some types. JSON
|
||||
elements are matched to identical field names in the record definition,
|
||||
and elements which do not exist in the JSON will simply be NULL. JSON
|
||||
elements which are not defined in the record template will
|
||||
be omitted from the output.
|
||||
</para>
|
||||
</note>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
The <xref linkend="hstore"> extension has a cast from <type>hstore</type> to
|
||||
@ -11772,6 +11914,22 @@ NULL baz</literallayout>(3 rows)</entry>
|
||||
<entry>aggregates records as a JSON array of objects</entry>
|
||||
</row>
|
||||
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
<primary>json_object_agg</primary>
|
||||
</indexterm>
|
||||
<function>json_object_agg(<replaceable class="parameter">expression</replaceable>)</function>
|
||||
</entry>
|
||||
<entry>
|
||||
<type>("any", "any")</type>
|
||||
</entry>
|
||||
<entry>
|
||||
<type>json</type>
|
||||
</entry>
|
||||
<entry>aggregates name/value pairs as a JSON object</entry>
|
||||
</row>
|
||||
|
||||
<row>
|
||||
<entry>
|
||||
<indexterm>
|
||||
|
@ -68,6 +68,10 @@ static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
|
||||
bool use_line_feeds);
|
||||
static void array_to_json_internal(Datum array, StringInfo result,
|
||||
bool use_line_feeds);
|
||||
static void datum_to_json(Datum val, bool is_null, StringInfo result,
|
||||
TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar);
|
||||
static void add_json(Datum val, bool is_null, StringInfo result,
|
||||
Oid val_type, bool key_scalar);
|
||||
|
||||
/* the null action object used for pure validation */
|
||||
static JsonSemAction nullSemAction =
|
||||
@ -1219,7 +1223,7 @@ extract_mb_char(char *s)
|
||||
*/
|
||||
static void
|
||||
datum_to_json(Datum val, bool is_null, StringInfo result,
|
||||
TYPCATEGORY tcategory, Oid typoutputfunc)
|
||||
TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar)
|
||||
{
|
||||
char *outputstr;
|
||||
text *jsontext;
|
||||
@ -1241,24 +1245,32 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
|
||||
composite_to_json(val, result, false);
|
||||
break;
|
||||
case TYPCATEGORY_BOOLEAN:
|
||||
if (DatumGetBool(val))
|
||||
appendStringInfoString(result, "true");
|
||||
if (!key_scalar)
|
||||
appendStringInfoString(result, DatumGetBool(val) ? "true" : "false");
|
||||
else
|
||||
appendStringInfoString(result, "false");
|
||||
escape_json(result, DatumGetBool(val) ? "true" : "false");
|
||||
break;
|
||||
case TYPCATEGORY_NUMERIC:
|
||||
outputstr = OidOutputFunctionCall(typoutputfunc, val);
|
||||
|
||||
/*
|
||||
* Don't call escape_json here if it's a valid JSON number.
|
||||
*/
|
||||
dummy_lex.input = *outputstr == '-' ? outputstr + 1 : outputstr;
|
||||
dummy_lex.input_length = strlen(dummy_lex.input);
|
||||
json_lex_number(&dummy_lex, dummy_lex.input, &numeric_error);
|
||||
if (!numeric_error)
|
||||
appendStringInfoString(result, outputstr);
|
||||
else
|
||||
if (key_scalar)
|
||||
{
|
||||
/* always quote keys */
|
||||
escape_json(result, outputstr);
|
||||
}
|
||||
else
|
||||
{
|
||||
/*
|
||||
* Don't call escape_json for a non-key if it's a valid JSON
|
||||
* number.
|
||||
*/
|
||||
dummy_lex.input = *outputstr == '-' ? outputstr + 1 : outputstr;
|
||||
dummy_lex.input_length = strlen(dummy_lex.input);
|
||||
json_lex_number(&dummy_lex, dummy_lex.input, &numeric_error);
|
||||
if (!numeric_error)
|
||||
appendStringInfoString(result, outputstr);
|
||||
else
|
||||
escape_json(result, outputstr);
|
||||
}
|
||||
pfree(outputstr);
|
||||
break;
|
||||
case TYPCATEGORY_JSON:
|
||||
@ -1276,6 +1288,10 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
|
||||
break;
|
||||
default:
|
||||
outputstr = OidOutputFunctionCall(typoutputfunc, val);
|
||||
if (key_scalar && *outputstr == '\0')
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("key value must not be empty")));
|
||||
escape_json(result, outputstr);
|
||||
pfree(outputstr);
|
||||
break;
|
||||
@ -1309,7 +1325,7 @@ array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals,
|
||||
if (dim + 1 == ndims)
|
||||
{
|
||||
datum_to_json(vals[*valcount], nulls[*valcount], result, tcategory,
|
||||
typoutputfunc);
|
||||
typoutputfunc, false);
|
||||
(*valcount)++;
|
||||
}
|
||||
else
|
||||
@ -1490,13 +1506,75 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
|
||||
else
|
||||
tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
|
||||
|
||||
datum_to_json(val, isnull, result, tcategory, typoutput);
|
||||
datum_to_json(val, isnull, result, tcategory, typoutput, false);
|
||||
}
|
||||
|
||||
appendStringInfoChar(result, '}');
|
||||
ReleaseTupleDesc(tupdesc);
|
||||
}
|
||||
|
||||
/*
|
||||
* append Json for orig_val to result. If it's a field key, make sure it's
|
||||
* of an acceptable type and is quoted.
|
||||
*/
|
||||
static void
|
||||
add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar)
|
||||
{
|
||||
TYPCATEGORY tcategory;
|
||||
Oid typoutput;
|
||||
bool typisvarlena;
|
||||
Oid castfunc = InvalidOid;
|
||||
|
||||
if (val_type == InvalidOid)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("could not determine input data type")));
|
||||
|
||||
|
||||
getTypeOutputInfo(val_type, &typoutput, &typisvarlena);
|
||||
|
||||
if (val_type > FirstNormalObjectId)
|
||||
{
|
||||
HeapTuple tuple;
|
||||
Form_pg_cast castForm;
|
||||
|
||||
tuple = SearchSysCache2(CASTSOURCETARGET,
|
||||
ObjectIdGetDatum(val_type),
|
||||
ObjectIdGetDatum(JSONOID));
|
||||
if (HeapTupleIsValid(tuple))
|
||||
{
|
||||
castForm = (Form_pg_cast) GETSTRUCT(tuple);
|
||||
|
||||
if (castForm->castmethod == COERCION_METHOD_FUNCTION)
|
||||
castfunc = typoutput = castForm->castfunc;
|
||||
|
||||
ReleaseSysCache(tuple);
|
||||
}
|
||||
}
|
||||
|
||||
if (castfunc != InvalidOid)
|
||||
tcategory = TYPCATEGORY_JSON_CAST;
|
||||
else if (val_type == RECORDARRAYOID)
|
||||
tcategory = TYPCATEGORY_ARRAY;
|
||||
else if (val_type == RECORDOID)
|
||||
tcategory = TYPCATEGORY_COMPOSITE;
|
||||
else if (val_type == JSONOID)
|
||||
tcategory = TYPCATEGORY_JSON;
|
||||
else
|
||||
tcategory = TypeCategory(val_type);
|
||||
|
||||
if (key_scalar &&
|
||||
(tcategory == TYPCATEGORY_ARRAY ||
|
||||
tcategory == TYPCATEGORY_COMPOSITE ||
|
||||
tcategory == TYPCATEGORY_JSON ||
|
||||
tcategory == TYPCATEGORY_JSON_CAST))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("key value must be scalar, not array, composite or json")));
|
||||
|
||||
datum_to_json(val, is_null, result, tcategory, typoutput, key_scalar);
|
||||
}
|
||||
|
||||
/*
|
||||
* SQL function array_to_json(row)
|
||||
*/
|
||||
@ -1616,7 +1694,7 @@ to_json(PG_FUNCTION_ARGS)
|
||||
else
|
||||
tcategory = TypeCategory(val_type);
|
||||
|
||||
datum_to_json(val, false, result, tcategory, typoutput);
|
||||
datum_to_json(val, false, result, tcategory, typoutput, false);
|
||||
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
|
||||
}
|
||||
@ -1672,7 +1750,7 @@ json_agg_transfn(PG_FUNCTION_ARGS)
|
||||
if (PG_ARGISNULL(1))
|
||||
{
|
||||
val = (Datum) 0;
|
||||
datum_to_json(val, true, state, 0, InvalidOid);
|
||||
datum_to_json(val, true, state, 0, InvalidOid, false);
|
||||
PG_RETURN_POINTER(state);
|
||||
}
|
||||
|
||||
@ -1716,7 +1794,7 @@ json_agg_transfn(PG_FUNCTION_ARGS)
|
||||
appendStringInfoString(state, "\n ");
|
||||
}
|
||||
|
||||
datum_to_json(val, false, state, tcategory, typoutput);
|
||||
datum_to_json(val, false, state, tcategory, typoutput, false);
|
||||
|
||||
/*
|
||||
* The transition type for array_agg() is declared to be "internal", which
|
||||
@ -1747,6 +1825,467 @@ json_agg_finalfn(PG_FUNCTION_ARGS)
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len(state->data, state->len));
|
||||
}
|
||||
|
||||
/*
|
||||
* json_object_agg transition function.
|
||||
*
|
||||
* aggregate two input columns as a single json value.
|
||||
*/
|
||||
Datum
|
||||
json_object_agg_transfn(PG_FUNCTION_ARGS)
|
||||
{
|
||||
Oid val_type;
|
||||
MemoryContext aggcontext,
|
||||
oldcontext;
|
||||
StringInfo state;
|
||||
Datum arg;
|
||||
|
||||
if (!AggCheckCallContext(fcinfo, &aggcontext))
|
||||
{
|
||||
/* cannot be called directly because of internal-type argument */
|
||||
elog(ERROR, "json_agg_transfn called in non-aggregate context");
|
||||
}
|
||||
|
||||
if (PG_ARGISNULL(0))
|
||||
{
|
||||
/*
|
||||
* Make this StringInfo in a context where it will persist for the
|
||||
* duration off the aggregate call. It's only needed for this initial
|
||||
* piece, as the StringInfo routines make sure they use the right
|
||||
* context to enlarge the object if necessary.
|
||||
*/
|
||||
oldcontext = MemoryContextSwitchTo(aggcontext);
|
||||
state = makeStringInfo();
|
||||
MemoryContextSwitchTo(oldcontext);
|
||||
|
||||
appendStringInfoString(state, "{ ");
|
||||
}
|
||||
else
|
||||
{
|
||||
state = (StringInfo) PG_GETARG_POINTER(0);
|
||||
appendStringInfoString(state, ", ");
|
||||
}
|
||||
|
||||
if (PG_ARGISNULL(1))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("field name must not be null")));
|
||||
|
||||
|
||||
val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
|
||||
|
||||
/*
|
||||
* turn a constant (more or less literal) value that's of unknown type
|
||||
* into text. Unknowns come in as a cstring pointer.
|
||||
*/
|
||||
if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, 1))
|
||||
{
|
||||
val_type = TEXTOID;
|
||||
arg = CStringGetTextDatum(PG_GETARG_POINTER(1));
|
||||
}
|
||||
else
|
||||
{
|
||||
arg = PG_GETARG_DATUM(1);
|
||||
}
|
||||
|
||||
if (val_type == InvalidOid || val_type == UNKNOWNOID)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg 1: could not determine data type")));
|
||||
|
||||
add_json(arg, false, state, val_type, true);
|
||||
|
||||
appendStringInfoString(state, " : ");
|
||||
|
||||
val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
|
||||
/* see comments above */
|
||||
if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, 2))
|
||||
{
|
||||
val_type = TEXTOID;
|
||||
if (PG_ARGISNULL(2))
|
||||
arg = (Datum) 0;
|
||||
else
|
||||
arg = CStringGetTextDatum(PG_GETARG_POINTER(2));
|
||||
}
|
||||
else
|
||||
{
|
||||
arg = PG_GETARG_DATUM(2);
|
||||
}
|
||||
|
||||
if (val_type == InvalidOid || val_type == UNKNOWNOID)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg 2: could not determine data type")));
|
||||
|
||||
add_json(arg, PG_ARGISNULL(2), state, val_type, false);
|
||||
|
||||
PG_RETURN_POINTER(state);
|
||||
}
|
||||
|
||||
/*
|
||||
* json_object_agg final function.
|
||||
*
|
||||
*/
|
||||
Datum
|
||||
json_object_agg_finalfn(PG_FUNCTION_ARGS)
|
||||
{
|
||||
StringInfo state;
|
||||
|
||||
/* cannot be called directly because of internal-type argument */
|
||||
Assert(AggCheckCallContext(fcinfo, NULL));
|
||||
|
||||
state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
|
||||
|
||||
if (state == NULL)
|
||||
PG_RETURN_TEXT_P(cstring_to_text("{}"));
|
||||
|
||||
appendStringInfoString(state, " }");
|
||||
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len(state->data, state->len));
|
||||
}
|
||||
|
||||
/*
|
||||
* SQL function json_build_object(variadic "any")
|
||||
*/
|
||||
Datum
|
||||
json_build_object(PG_FUNCTION_ARGS)
|
||||
{
|
||||
int nargs = PG_NARGS();
|
||||
int i;
|
||||
Datum arg;
|
||||
char *sep = "";
|
||||
StringInfo result;
|
||||
Oid val_type;
|
||||
|
||||
|
||||
if (nargs % 2 != 0)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("invalid number or arguments: object must be matched key value pairs")));
|
||||
|
||||
result = makeStringInfo();
|
||||
|
||||
appendStringInfoChar(result, '{');
|
||||
|
||||
for (i = 0; i < nargs; i += 2)
|
||||
{
|
||||
|
||||
/* process key */
|
||||
|
||||
if (PG_ARGISNULL(i))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg %d: key cannot be null", i + 1)));
|
||||
val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
|
||||
|
||||
/*
|
||||
* turn a constant (more or less literal) value that's of unknown type
|
||||
* into text. Unknowns come in as a cstring pointer.
|
||||
*/
|
||||
if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
|
||||
{
|
||||
val_type = TEXTOID;
|
||||
if (PG_ARGISNULL(i))
|
||||
arg = (Datum) 0;
|
||||
else
|
||||
arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
|
||||
}
|
||||
else
|
||||
{
|
||||
arg = PG_GETARG_DATUM(i);
|
||||
}
|
||||
if (val_type == InvalidOid || val_type == UNKNOWNOID)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg %d: could not determine data type", i + 1)));
|
||||
appendStringInfoString(result, sep);
|
||||
sep = ", ";
|
||||
add_json(arg, false, result, val_type, true);
|
||||
|
||||
appendStringInfoString(result, " : ");
|
||||
|
||||
/* process value */
|
||||
|
||||
val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
|
||||
/* see comments above */
|
||||
if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
|
||||
{
|
||||
val_type = TEXTOID;
|
||||
if (PG_ARGISNULL(i + 1))
|
||||
arg = (Datum) 0;
|
||||
else
|
||||
arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
|
||||
}
|
||||
else
|
||||
{
|
||||
arg = PG_GETARG_DATUM(i + 1);
|
||||
}
|
||||
if (val_type == InvalidOid || val_type == UNKNOWNOID)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg %d: could not determine data type", i + 2)));
|
||||
add_json(arg, PG_ARGISNULL(i + 1), result, val_type, false);
|
||||
|
||||
}
|
||||
appendStringInfoChar(result, '}');
|
||||
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* degenerate case of json_build_object where it gets 0 arguments.
|
||||
*/
|
||||
Datum
|
||||
json_build_object_noargs(PG_FUNCTION_ARGS)
|
||||
{
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
|
||||
}
|
||||
|
||||
/*
|
||||
* SQL function json_build_array(variadic "any")
|
||||
*/
|
||||
Datum
|
||||
json_build_array(PG_FUNCTION_ARGS)
|
||||
{
|
||||
int nargs = PG_NARGS();
|
||||
int i;
|
||||
Datum arg;
|
||||
char *sep = "";
|
||||
StringInfo result;
|
||||
Oid val_type;
|
||||
|
||||
|
||||
result = makeStringInfo();
|
||||
|
||||
appendStringInfoChar(result, '[');
|
||||
|
||||
for (i = 0; i < nargs; i++)
|
||||
{
|
||||
val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
|
||||
arg = PG_GETARG_DATUM(i + 1);
|
||||
/* see comments in json_build_object above */
|
||||
if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
|
||||
{
|
||||
val_type = TEXTOID;
|
||||
if (PG_ARGISNULL(i))
|
||||
arg = (Datum) 0;
|
||||
else
|
||||
arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
|
||||
}
|
||||
else
|
||||
{
|
||||
arg = PG_GETARG_DATUM(i);
|
||||
}
|
||||
if (val_type == InvalidOid || val_type == UNKNOWNOID)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||
errmsg("arg %d: could not determine data type", i + 1)));
|
||||
appendStringInfoString(result, sep);
|
||||
sep = ", ";
|
||||
add_json(arg, PG_ARGISNULL(i), result, val_type, false);
|
||||
}
|
||||
appendStringInfoChar(result, ']');
|
||||
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* degenerate case of json_build_array where it gets 0 arguments.
|
||||
*/
|
||||
Datum
|
||||
json_build_array_noargs(PG_FUNCTION_ARGS)
|
||||
{
|
||||
PG_RETURN_TEXT_P(cstring_to_text_with_len("[]", 2));
|
||||
}
|
||||
|
||||
/*
|
||||
* SQL function json_object(text[])
|
||||
*
|
||||
* take a one or two dimensional array of text as name vale pairs
|
||||
* for a json object.
|
||||
*
|
||||
*/
|
||||
Datum
|
||||
json_object(PG_FUNCTION_ARGS)
|
||||
{
|
||||
ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
|
||||
int ndims = ARR_NDIM(in_array);
|
||||
StringInfoData result;
|
||||
Datum *in_datums;
|
||||
bool *in_nulls;
|
||||
int in_count,
|
||||
count,
|
||||
i;
|
||||
text *rval;
|
||||
char *v;
|
||||
|
||||
switch (ndims)
|
||||
{
|
||||
case 0:
|
||||
PG_RETURN_DATUM(CStringGetTextDatum("{}"));
|
||||
break;
|
||||
|
||||
case 1:
|
||||
if ((ARR_DIMS(in_array)[0]) % 2)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
|
||||
errmsg("array must have even number of elements")));
|
||||
break;
|
||||
|
||||
case 2:
|
||||
if ((ARR_DIMS(in_array)[1]) != 2)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
|
||||
errmsg("array must have two columns")));
|
||||
break;
|
||||
|
||||
default:
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
|
||||
errmsg("wrong number of array subscripts")));
|
||||
}
|
||||
|
||||
deconstruct_array(in_array,
|
||||
TEXTOID, -1, false, 'i',
|
||||
&in_datums, &in_nulls, &in_count);
|
||||
|
||||
count = in_count / 2;
|
||||
|
||||
initStringInfo(&result);
|
||||
|
||||
appendStringInfoChar(&result, '{');
|
||||
|
||||
for (i = 0; i < count; ++i)
|
||||
{
|
||||
if (in_nulls[i * 2])
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
|
||||
errmsg("null value not allowed for object key")));
|
||||
|
||||
v = TextDatumGetCString(in_datums[i * 2]);
|
||||
if (v[0] == '\0')
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
|
||||
errmsg("empty value not allowed for object key")));
|
||||
if (i > 0)
|
||||
appendStringInfoString(&result, ", ");
|
||||
escape_json(&result, v);
|
||||
appendStringInfoString(&result, " : ");
|
||||
pfree(v);
|
||||
if (in_nulls[i * 2 + 1])
|
||||
appendStringInfoString(&result, "null");
|
||||
else
|
||||
{
|
||||
v = TextDatumGetCString(in_datums[i * 2 + 1]);
|
||||
escape_json(&result, v);
|
||||
pfree(v);
|
||||
}
|
||||
}
|
||||
|
||||
appendStringInfoChar(&result, '}');
|
||||
|
||||
pfree(in_datums);
|
||||
pfree(in_nulls);
|
||||
|
||||
rval = cstring_to_text_with_len(result.data, result.len);
|
||||
pfree(result.data);
|
||||
|
||||
PG_RETURN_TEXT_P(rval);
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* SQL function json_object(text[], text[])
|
||||
*
|
||||
* take separate name and value arrays of text to construct a json object
|
||||
* pairwise.
|
||||
*/
|
||||
Datum
|
||||
json_object_two_arg(PG_FUNCTION_ARGS)
|
||||
{
|
||||
ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0);
|
||||
ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
|
||||
int nkdims = ARR_NDIM(key_array);
|
||||
int nvdims = ARR_NDIM(val_array);
|
||||
StringInfoData result;
|
||||
Datum *key_datums,
|
||||
*val_datums;
|
||||
bool *key_nulls,
|
||||
*val_nulls;
|
||||
int key_count,
|
||||
val_count,
|
||||
i;
|
||||
text *rval;
|
||||
char *v;
|
||||
|
||||
if (nkdims > 1 || nkdims != nvdims)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
|
||||
errmsg("wrong number of array subscripts")));
|
||||
|
||||
if (nkdims == 0)
|
||||
PG_RETURN_DATUM(CStringGetTextDatum("{}"));
|
||||
|
||||
deconstruct_array(key_array,
|
||||
TEXTOID, -1, false, 'i',
|
||||
&key_datums, &key_nulls, &key_count);
|
||||
|
||||
deconstruct_array(val_array,
|
||||
TEXTOID, -1, false, 'i',
|
||||
&val_datums, &val_nulls, &val_count);
|
||||
|
||||
if (key_count != val_count)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
|
||||
errmsg("mismatched array dimensions")));
|
||||
|
||||
initStringInfo(&result);
|
||||
|
||||
appendStringInfoChar(&result, '{');
|
||||
|
||||
for (i = 0; i < key_count; ++i)
|
||||
{
|
||||
if (key_nulls[i])
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
|
||||
errmsg("null value not allowed for object key")));
|
||||
|
||||
v = TextDatumGetCString(key_datums[i]);
|
||||
if (v[0] == '\0')
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
|
||||
errmsg("empty value not allowed for object key")));
|
||||
if (i > 0)
|
||||
appendStringInfoString(&result, ", ");
|
||||
escape_json(&result, v);
|
||||
appendStringInfoString(&result, " : ");
|
||||
pfree(v);
|
||||
if (val_nulls[i])
|
||||
appendStringInfoString(&result, "null");
|
||||
else
|
||||
{
|
||||
v = TextDatumGetCString(val_datums[i]);
|
||||
escape_json(&result, v);
|
||||
pfree(v);
|
||||
}
|
||||
}
|
||||
|
||||
appendStringInfoChar(&result, '}');
|
||||
|
||||
pfree(key_datums);
|
||||
pfree(key_nulls);
|
||||
pfree(val_datums);
|
||||
pfree(val_nulls);
|
||||
|
||||
rval = cstring_to_text_with_len(result.data, result.len);
|
||||
pfree(result.data);
|
||||
|
||||
PG_RETURN_TEXT_P(rval);
|
||||
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Produce a JSON string literal, properly escaping characters in the text.
|
||||
*/
|
||||
|
@ -75,6 +75,10 @@ static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
|
||||
/* turn a json object into a hash table */
|
||||
static HTAB *get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text);
|
||||
|
||||
/* common worker for populate_record and to_record */
|
||||
static inline Datum populate_record_worker(PG_FUNCTION_ARGS,
|
||||
bool have_record_arg);
|
||||
|
||||
/* semantic action functions for get_json_object_as_hash */
|
||||
static void hash_object_field_start(void *state, char *fname, bool isnull);
|
||||
static void hash_object_field_end(void *state, char *fname, bool isnull);
|
||||
@ -90,6 +94,10 @@ static void populate_recordset_object_end(void *state);
|
||||
static void populate_recordset_array_start(void *state);
|
||||
static void populate_recordset_array_element_start(void *state, bool isnull);
|
||||
|
||||
/* worker function for populate_recordset and to_recordset */
|
||||
static inline Datum populate_recordset_worker(PG_FUNCTION_ARGS,
|
||||
bool have_record_arg);
|
||||
|
||||
/* search type classification for json_get* functions */
|
||||
typedef enum
|
||||
{
|
||||
@ -1216,11 +1224,22 @@ elements_scalar(void *state, char *token, JsonTokenType tokentype)
|
||||
Datum
|
||||
json_populate_record(PG_FUNCTION_ARGS)
|
||||
{
|
||||
Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
|
||||
return populate_record_worker(fcinfo, true);
|
||||
}
|
||||
|
||||
Datum
|
||||
json_to_record(PG_FUNCTION_ARGS)
|
||||
{
|
||||
return populate_record_worker(fcinfo, false);
|
||||
}
|
||||
|
||||
static inline Datum
|
||||
populate_record_worker(PG_FUNCTION_ARGS, bool have_record_arg)
|
||||
{
|
||||
text *json;
|
||||
bool use_json_as_text;
|
||||
HTAB *json_hash;
|
||||
HeapTupleHeader rec;
|
||||
HeapTupleHeader rec = NULL;
|
||||
Oid tupType;
|
||||
int32 tupTypmod;
|
||||
TupleDesc tupdesc;
|
||||
@ -1234,54 +1253,75 @@ json_populate_record(PG_FUNCTION_ARGS)
|
||||
char fname[NAMEDATALEN];
|
||||
JsonHashEntry *hashentry;
|
||||
|
||||
use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
|
||||
|
||||
if (!type_is_rowtype(argtype))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_DATATYPE_MISMATCH),
|
||||
errmsg("first argument of json_populate_record must be a row type")));
|
||||
|
||||
if (PG_ARGISNULL(0))
|
||||
if (have_record_arg)
|
||||
{
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_NULL();
|
||||
Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
|
||||
|
||||
rec = NULL;
|
||||
use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
|
||||
|
||||
/*
|
||||
* have no tuple to look at, so the only source of type info is the
|
||||
* argtype. The lookup_rowtype_tupdesc call below will error out if we
|
||||
* don't have a known composite type oid here.
|
||||
*/
|
||||
tupType = argtype;
|
||||
tupTypmod = -1;
|
||||
if (!type_is_rowtype(argtype))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_DATATYPE_MISMATCH),
|
||||
errmsg("first argument of json_populate_record must be a row type")));
|
||||
|
||||
if (PG_ARGISNULL(0))
|
||||
{
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_NULL();
|
||||
|
||||
/*
|
||||
* have no tuple to look at, so the only source of type info is
|
||||
* the argtype. The lookup_rowtype_tupdesc call below will error
|
||||
* out if we don't have a known composite type oid here.
|
||||
*/
|
||||
tupType = argtype;
|
||||
tupTypmod = -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
rec = PG_GETARG_HEAPTUPLEHEADER(0);
|
||||
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_POINTER(rec);
|
||||
|
||||
/* Extract type info from the tuple itself */
|
||||
tupType = HeapTupleHeaderGetTypeId(rec);
|
||||
tupTypmod = HeapTupleHeaderGetTypMod(rec);
|
||||
}
|
||||
|
||||
json = PG_GETARG_TEXT_P(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
rec = PG_GETARG_HEAPTUPLEHEADER(0);
|
||||
/* json_to_record case */
|
||||
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_POINTER(rec);
|
||||
use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1);
|
||||
|
||||
/* Extract type info from the tuple itself */
|
||||
tupType = HeapTupleHeaderGetTypeId(rec);
|
||||
tupTypmod = HeapTupleHeaderGetTypMod(rec);
|
||||
if (PG_ARGISNULL(0))
|
||||
PG_RETURN_NULL();
|
||||
|
||||
json = PG_GETARG_TEXT_P(0);
|
||||
|
||||
get_call_result_type(fcinfo, NULL, &tupdesc);
|
||||
}
|
||||
|
||||
json = PG_GETARG_TEXT_P(1);
|
||||
json_hash = get_json_object_as_hash(json, "json_populate_record",
|
||||
use_json_as_text);
|
||||
|
||||
json_hash = get_json_object_as_hash(json, "json_populate_record", use_json_as_text);
|
||||
|
||||
/*
|
||||
* if the input json is empty, we can only skip the rest if we were passed
|
||||
* in a non-null record, since otherwise there may be issues with domain
|
||||
* nulls.
|
||||
*/
|
||||
if (hash_get_num_entries(json_hash) == 0 && rec)
|
||||
PG_RETURN_POINTER(rec);
|
||||
if (have_record_arg)
|
||||
{
|
||||
/*
|
||||
* if the input json is empty, we can only skip the rest if we were
|
||||
* passed in a non-null record, since otherwise there may be issues
|
||||
* with domain nulls.
|
||||
*/
|
||||
if (hash_get_num_entries(json_hash) == 0 && rec)
|
||||
PG_RETURN_POINTER(rec);
|
||||
|
||||
|
||||
tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
|
||||
tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
|
||||
}
|
||||
|
||||
ncolumns = tupdesc->natts;
|
||||
|
||||
if (rec)
|
||||
@ -1310,8 +1350,8 @@ json_populate_record(PG_FUNCTION_ARGS)
|
||||
my_extra->record_typmod = 0;
|
||||
}
|
||||
|
||||
if (my_extra->record_type != tupType ||
|
||||
my_extra->record_typmod != tupTypmod)
|
||||
if (have_record_arg && (my_extra->record_type != tupType ||
|
||||
my_extra->record_typmod != tupTypmod))
|
||||
{
|
||||
MemSet(my_extra, 0,
|
||||
sizeof(RecordIOData) - sizeof(ColumnIOData)
|
||||
@ -1561,7 +1601,22 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
|
||||
Datum
|
||||
json_populate_recordset(PG_FUNCTION_ARGS)
|
||||
{
|
||||
Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
|
||||
return populate_recordset_worker(fcinfo, true);
|
||||
}
|
||||
|
||||
Datum
|
||||
json_to_recordset(PG_FUNCTION_ARGS)
|
||||
{
|
||||
return populate_recordset_worker(fcinfo, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* common worker for json_populate_recordset() and json_to_recordset()
|
||||
*/
|
||||
static inline Datum
|
||||
populate_recordset_worker(PG_FUNCTION_ARGS, bool have_record_arg)
|
||||
{
|
||||
Oid argtype;
|
||||
text *json;
|
||||
bool use_json_as_text;
|
||||
ReturnSetInfo *rsi;
|
||||
@ -1576,12 +1631,23 @@ json_populate_recordset(PG_FUNCTION_ARGS)
|
||||
JsonSemAction *sem;
|
||||
PopulateRecordsetState *state;
|
||||
|
||||
use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
|
||||
if (have_record_arg)
|
||||
{
|
||||
argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
|
||||
|
||||
if (!type_is_rowtype(argtype))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_DATATYPE_MISMATCH),
|
||||
errmsg("first argument of json_populate_recordset must be a row type")));
|
||||
use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
|
||||
|
||||
if (!type_is_rowtype(argtype))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_DATATYPE_MISMATCH),
|
||||
errmsg("first argument of json_populate_recordset must be a row type")));
|
||||
}
|
||||
else
|
||||
{
|
||||
argtype = InvalidOid;
|
||||
|
||||
use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1);
|
||||
}
|
||||
|
||||
rsi = (ReturnSetInfo *) fcinfo->resultinfo;
|
||||
|
||||
@ -1618,15 +1684,27 @@ json_populate_recordset(PG_FUNCTION_ARGS)
|
||||
MemoryContextSwitchTo(old_cxt);
|
||||
|
||||
/* if the json is null send back an empty set */
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_NULL();
|
||||
if (have_record_arg)
|
||||
{
|
||||
if (PG_ARGISNULL(1))
|
||||
PG_RETURN_NULL();
|
||||
|
||||
json = PG_GETARG_TEXT_P(1);
|
||||
json = PG_GETARG_TEXT_P(1);
|
||||
|
||||
if (PG_ARGISNULL(0))
|
||||
rec = NULL;
|
||||
if (PG_ARGISNULL(0))
|
||||
rec = NULL;
|
||||
else
|
||||
rec = PG_GETARG_HEAPTUPLEHEADER(0);
|
||||
}
|
||||
else
|
||||
rec = PG_GETARG_HEAPTUPLEHEADER(0);
|
||||
{
|
||||
if (PG_ARGISNULL(0))
|
||||
PG_RETURN_NULL();
|
||||
|
||||
json = PG_GETARG_TEXT_P(0);
|
||||
|
||||
rec = NULL;
|
||||
}
|
||||
|
||||
tupType = tupdesc->tdtypeid;
|
||||
tupTypmod = tupdesc->tdtypmod;
|
||||
|
@ -53,6 +53,6 @@
|
||||
*/
|
||||
|
||||
/* yyyymmddN */
|
||||
#define CATALOG_VERSION_NO 201401211
|
||||
#define CATALOG_VERSION_NO 201401281
|
||||
|
||||
#endif
|
||||
|
@ -258,6 +258,7 @@ DATA(insert ( 3545 n 0 bytea_string_agg_transfn bytea_string_agg_finalfn 0 2281
|
||||
|
||||
/* json */
|
||||
DATA(insert ( 3175 n 0 json_agg_transfn json_agg_finalfn 0 2281 0 _null_ ));
|
||||
DATA(insert ( 3197 n 0 json_object_agg_transfn json_object_agg_finalfn 0 2281 0 _null_ ));
|
||||
|
||||
/* ordered-set and hypothetical-set aggregates */
|
||||
DATA(insert ( 3972 o 1 ordered_set_transition percentile_disc_final 0 2281 0 _null_ ));
|
||||
|
@ -4134,6 +4134,24 @@ DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i
|
||||
DESCR("json aggregate final function");
|
||||
DATA(insert OID = 3175 ( json_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 114 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
|
||||
DESCR("aggregate input into json");
|
||||
DATA(insert OID = 3180 ( json_object_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ json_object_agg_transfn _null_ _null_ _null_ ));
|
||||
DESCR("json object aggregate transition function");
|
||||
DATA(insert OID = 3196 ( json_object_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_object_agg_finalfn _null_ _null_ _null_ ));
|
||||
DESCR("json object aggregate final function");
|
||||
DATA(insert OID = 3197 ( json_object_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 114 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
|
||||
DESCR("aggregate input into a json object");
|
||||
DATA(insert OID = 3198 ( json_build_array PGNSP PGUID 12 1 0 2276 0 f f f f f f i 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ json_build_array _null_ _null_ _null_ ));
|
||||
DESCR("build a json array from any inputs");
|
||||
DATA(insert OID = 3199 ( json_build_array PGNSP PGUID 12 1 0 0 0 f f f f f f i 0 0 114 "" _null_ _null_ _null_ _null_ json_build_array_noargs _null_ _null_ _null_ ));
|
||||
DESCR("build an empty json array");
|
||||
DATA(insert OID = 3200 ( json_build_object PGNSP PGUID 12 1 0 2276 0 f f f f f f i 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ json_build_object _null_ _null_ _null_ ));
|
||||
DESCR("build a json object from pairwise key/value inputs");
|
||||
DATA(insert OID = 3201 ( json_build_object PGNSP PGUID 12 1 0 0 0 f f f f f f i 0 0 114 "" _null_ _null_ _null_ _null_ json_build_object_noargs _null_ _null_ _null_ ));
|
||||
DESCR("build an empty json object");
|
||||
DATA(insert OID = 3202 ( json_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "1009" _null_ _null_ _null_ _null_ json_object _null_ _null_ _null_ ));
|
||||
DESCR("map text arrayof key value pais to json object");
|
||||
DATA(insert OID = 3203 ( json_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "1009 1009" _null_ _null_ _null_ _null_ json_object_two_arg _null_ _null_ _null_ ));
|
||||
DESCR("map text arrayof key value pais to json object");
|
||||
DATA(insert OID = 3176 ( to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2283" _null_ _null_ _null_ _null_ to_json _null_ _null_ _null_ ));
|
||||
DESCR("map input to json");
|
||||
|
||||
@ -4161,6 +4179,10 @@ DATA(insert OID = 3960 ( json_populate_record PGNSP PGUID 12 1 0 0 0 f f f f
|
||||
DESCR("get record fields from a json object");
|
||||
DATA(insert OID = 3961 ( json_populate_recordset PGNSP PGUID 12 1 100 0 0 f f f f f t s 3 0 2283 "2283 114 16" _null_ _null_ _null_ _null_ json_populate_recordset _null_ _null_ _null_ ));
|
||||
DESCR("get set of records with fields from a json array of objects");
|
||||
DATA(insert OID = 3204 ( json_to_record PGNSP PGUID 12 1 0 0 0 f f f f f f s 2 0 2249 "114 16" _null_ _null_ _null_ _null_ json_to_record _null_ _null_ _null_ ));
|
||||
DESCR("get record fields from a json object");
|
||||
DATA(insert OID = 3205 ( json_to_recordset PGNSP PGUID 12 1 100 0 0 f f f f f t s 2 0 2249 "114 16" _null_ _null_ _null_ _null_ json_to_recordset _null_ _null_ _null_ ));
|
||||
DESCR("get set of records with fields from a json array of objects");
|
||||
DATA(insert OID = 3968 ( json_typeof PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 25 "114" _null_ _null_ _null_ _null_ json_typeof _null_ _null_ _null_ ));
|
||||
DESCR("get the type of a json value");
|
||||
|
||||
|
@ -31,6 +31,17 @@ extern Datum to_json(PG_FUNCTION_ARGS);
|
||||
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
|
||||
extern Datum json_agg_finalfn(PG_FUNCTION_ARGS);
|
||||
|
||||
extern Datum json_object_agg_finalfn(PG_FUNCTION_ARGS);
|
||||
extern Datum json_object_agg_transfn(PG_FUNCTION_ARGS);
|
||||
|
||||
extern Datum json_build_object(PG_FUNCTION_ARGS);
|
||||
extern Datum json_build_object_noargs(PG_FUNCTION_ARGS);
|
||||
extern Datum json_build_array(PG_FUNCTION_ARGS);
|
||||
extern Datum json_build_array_noargs(PG_FUNCTION_ARGS);
|
||||
|
||||
extern Datum json_object(PG_FUNCTION_ARGS);
|
||||
extern Datum json_object_two_arg(PG_FUNCTION_ARGS);
|
||||
|
||||
extern void escape_json(StringInfo buf, const char *str);
|
||||
|
||||
extern Datum json_typeof(PG_FUNCTION_ARGS);
|
||||
@ -49,5 +60,7 @@ extern Datum json_each_text(PG_FUNCTION_ARGS);
|
||||
extern Datum json_array_elements(PG_FUNCTION_ARGS);
|
||||
extern Datum json_populate_record(PG_FUNCTION_ARGS);
|
||||
extern Datum json_populate_recordset(PG_FUNCTION_ARGS);
|
||||
extern Datum json_to_record(PG_FUNCTION_ARGS);
|
||||
extern Datum json_to_recordset(PG_FUNCTION_ARGS);
|
||||
|
||||
#endif /* JSON_H */
|
||||
|
@ -991,3 +991,129 @@ select value, json_typeof(value)
|
||||
|
|
||||
(11 rows)
|
||||
|
||||
-- json_build_array, json_build_object, json_object_agg
|
||||
SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
json_build_array
|
||||
-----------------------------------------------------------------------
|
||||
["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1,2,3]}]
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
json_build_object
|
||||
----------------------------------------------------------------------------
|
||||
{"a" : 1, "b" : 1.2, "c" : true, "d" : null, "e" : {"x": 3, "y": [1,2,3]}}
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object(
|
||||
'a', json_build_object('b',false,'c',99),
|
||||
'd', json_build_object('e',array[9,8,7]::int[],
|
||||
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
|
||||
json_build_object
|
||||
-------------------------------------------------------------------------------------------------
|
||||
{"a" : {"b" : false, "c" : 99}, "d" : {"e" : [9,8,7], "f" : {"relkind":"r","name":"pg_class"}}}
|
||||
(1 row)
|
||||
|
||||
-- empty objects/arrays
|
||||
SELECT json_build_array();
|
||||
json_build_array
|
||||
------------------
|
||||
[]
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object();
|
||||
json_build_object
|
||||
-------------------
|
||||
{}
|
||||
(1 row)
|
||||
|
||||
-- make sure keys are quoted
|
||||
SELECT json_build_object(1,2);
|
||||
json_build_object
|
||||
-------------------
|
||||
{"1" : 2}
|
||||
(1 row)
|
||||
|
||||
-- keys must be scalar and not null
|
||||
SELECT json_build_object(null,2);
|
||||
ERROR: arg 1: key cannot be null
|
||||
SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
SELECT json_build_object(json '{"a":1,"b":2}', 3);
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
SELECT json_build_object('{1,2,3}'::int[], 3);
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
CREATE TEMP TABLE foo (serial_num int, name text, type text);
|
||||
INSERT INTO foo VALUES (847001,'t15','GE1043');
|
||||
INSERT INTO foo VALUES (847002,'t16','GE1043');
|
||||
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
|
||||
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
|
||||
FROM foo;
|
||||
json_build_object
|
||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
{"turbines" : { "847001" : {"name" : "t15", "type" : "GE1043"}, "847002" : {"name" : "t16", "type" : "GE1043"}, "847003" : {"name" : "sub-alpha", "type" : "GESS90"} }}
|
||||
(1 row)
|
||||
|
||||
-- json_object
|
||||
-- one dimension
|
||||
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
|
||||
json_object
|
||||
-------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- same but with two dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
json_object
|
||||
-------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- odd number error
|
||||
SELECT json_object('{a,b,c}');
|
||||
ERROR: array must have even number of elements
|
||||
-- one column error
|
||||
SELECT json_object('{{a},{b}}');
|
||||
ERROR: array must have two columns
|
||||
-- too many columns error
|
||||
SELECT json_object('{{a,b,c},{b,c,d}}');
|
||||
ERROR: array must have two columns
|
||||
-- too many dimensions error
|
||||
SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
|
||||
ERROR: wrong number of array subscripts
|
||||
--two argument form of json_object
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
|
||||
json_object
|
||||
------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "c" : "3", "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- too many dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
ERROR: wrong number of array subscripts
|
||||
-- mismatched dimensions
|
||||
select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
|
||||
ERROR: mismatched array dimensions
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
|
||||
ERROR: mismatched array dimensions
|
||||
-- null key error
|
||||
select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
|
||||
ERROR: null value not allowed for object key
|
||||
-- empty key error
|
||||
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
|
||||
ERROR: empty value not allowed for object key
|
||||
-- json_to_record and json_to_recordset
|
||||
select * from json_to_record('{"a":1,"b":"foo","c":"bar"}',true)
|
||||
as x(a int, b text, d text);
|
||||
a | b | d
|
||||
---+-----+---
|
||||
1 | foo |
|
||||
(1 row)
|
||||
|
||||
select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]',false)
|
||||
as x(a int, b text, c boolean);
|
||||
a | b | c
|
||||
---+-----+---
|
||||
1 | foo |
|
||||
2 | bar | t
|
||||
(2 rows)
|
||||
|
||||
|
@ -987,3 +987,129 @@ select value, json_typeof(value)
|
||||
|
|
||||
(11 rows)
|
||||
|
||||
-- json_build_array, json_build_object, json_object_agg
|
||||
SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
json_build_array
|
||||
-----------------------------------------------------------------------
|
||||
["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1,2,3]}]
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
json_build_object
|
||||
----------------------------------------------------------------------------
|
||||
{"a" : 1, "b" : 1.2, "c" : true, "d" : null, "e" : {"x": 3, "y": [1,2,3]}}
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object(
|
||||
'a', json_build_object('b',false,'c',99),
|
||||
'd', json_build_object('e',array[9,8,7]::int[],
|
||||
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
|
||||
json_build_object
|
||||
-------------------------------------------------------------------------------------------------
|
||||
{"a" : {"b" : false, "c" : 99}, "d" : {"e" : [9,8,7], "f" : {"relkind":"r","name":"pg_class"}}}
|
||||
(1 row)
|
||||
|
||||
-- empty objects/arrays
|
||||
SELECT json_build_array();
|
||||
json_build_array
|
||||
------------------
|
||||
[]
|
||||
(1 row)
|
||||
|
||||
SELECT json_build_object();
|
||||
json_build_object
|
||||
-------------------
|
||||
{}
|
||||
(1 row)
|
||||
|
||||
-- make sure keys are quoted
|
||||
SELECT json_build_object(1,2);
|
||||
json_build_object
|
||||
-------------------
|
||||
{"1" : 2}
|
||||
(1 row)
|
||||
|
||||
-- keys must be scalar and not null
|
||||
SELECT json_build_object(null,2);
|
||||
ERROR: arg 1: key cannot be null
|
||||
SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
SELECT json_build_object(json '{"a":1,"b":2}', 3);
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
SELECT json_build_object('{1,2,3}'::int[], 3);
|
||||
ERROR: key value must be scalar, not array, composite or json
|
||||
CREATE TEMP TABLE foo (serial_num int, name text, type text);
|
||||
INSERT INTO foo VALUES (847001,'t15','GE1043');
|
||||
INSERT INTO foo VALUES (847002,'t16','GE1043');
|
||||
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
|
||||
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
|
||||
FROM foo;
|
||||
json_build_object
|
||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
{"turbines" : { "847001" : {"name" : "t15", "type" : "GE1043"}, "847002" : {"name" : "t16", "type" : "GE1043"}, "847003" : {"name" : "sub-alpha", "type" : "GESS90"} }}
|
||||
(1 row)
|
||||
|
||||
-- json_object
|
||||
-- one dimension
|
||||
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
|
||||
json_object
|
||||
-------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- same but with two dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
json_object
|
||||
-------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- odd number error
|
||||
SELECT json_object('{a,b,c}');
|
||||
ERROR: array must have even number of elements
|
||||
-- one column error
|
||||
SELECT json_object('{{a},{b}}');
|
||||
ERROR: array must have two columns
|
||||
-- too many columns error
|
||||
SELECT json_object('{{a,b,c},{b,c,d}}');
|
||||
ERROR: array must have two columns
|
||||
-- too many dimensions error
|
||||
SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
|
||||
ERROR: wrong number of array subscripts
|
||||
--two argument form of json_object
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
|
||||
json_object
|
||||
------------------------------------------------------
|
||||
{"a" : "1", "b" : "2", "c" : "3", "d e f" : "a b c"}
|
||||
(1 row)
|
||||
|
||||
-- too many dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
ERROR: wrong number of array subscripts
|
||||
-- mismatched dimensions
|
||||
select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
|
||||
ERROR: mismatched array dimensions
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
|
||||
ERROR: mismatched array dimensions
|
||||
-- null key error
|
||||
select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
|
||||
ERROR: null value not allowed for object key
|
||||
-- empty key error
|
||||
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
|
||||
ERROR: empty value not allowed for object key
|
||||
-- json_to_record and json_to_recordset
|
||||
select * from json_to_record('{"a":1,"b":"foo","c":"bar"}',true)
|
||||
as x(a int, b text, d text);
|
||||
a | b | d
|
||||
---+-----+---
|
||||
1 | foo |
|
||||
(1 row)
|
||||
|
||||
select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]',false)
|
||||
as x(a int, b text, c boolean);
|
||||
a | b | c
|
||||
---+-----+---
|
||||
1 | foo |
|
||||
2 | bar | t
|
||||
(2 rows)
|
||||
|
||||
|
@ -325,3 +325,90 @@ select value, json_typeof(value)
|
||||
(json '{}'),
|
||||
(NULL::json))
|
||||
as data(value);
|
||||
|
||||
-- json_build_array, json_build_object, json_object_agg
|
||||
|
||||
SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
|
||||
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
|
||||
|
||||
SELECT json_build_object(
|
||||
'a', json_build_object('b',false,'c',99),
|
||||
'd', json_build_object('e',array[9,8,7]::int[],
|
||||
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
|
||||
|
||||
|
||||
-- empty objects/arrays
|
||||
SELECT json_build_array();
|
||||
|
||||
SELECT json_build_object();
|
||||
|
||||
-- make sure keys are quoted
|
||||
SELECT json_build_object(1,2);
|
||||
|
||||
-- keys must be scalar and not null
|
||||
SELECT json_build_object(null,2);
|
||||
|
||||
SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
|
||||
|
||||
SELECT json_build_object(json '{"a":1,"b":2}', 3);
|
||||
|
||||
SELECT json_build_object('{1,2,3}'::int[], 3);
|
||||
|
||||
CREATE TEMP TABLE foo (serial_num int, name text, type text);
|
||||
INSERT INTO foo VALUES (847001,'t15','GE1043');
|
||||
INSERT INTO foo VALUES (847002,'t16','GE1043');
|
||||
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
|
||||
|
||||
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
|
||||
FROM foo;
|
||||
|
||||
-- json_object
|
||||
|
||||
-- one dimension
|
||||
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
|
||||
|
||||
-- same but with two dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
|
||||
-- odd number error
|
||||
SELECT json_object('{a,b,c}');
|
||||
|
||||
-- one column error
|
||||
SELECT json_object('{{a},{b}}');
|
||||
|
||||
-- too many columns error
|
||||
SELECT json_object('{{a,b,c},{b,c,d}}');
|
||||
|
||||
-- too many dimensions error
|
||||
SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
|
||||
|
||||
--two argument form of json_object
|
||||
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
|
||||
|
||||
-- too many dimensions
|
||||
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
|
||||
|
||||
-- mismatched dimensions
|
||||
|
||||
select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
|
||||
|
||||
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
|
||||
|
||||
-- null key error
|
||||
|
||||
select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
|
||||
|
||||
-- empty key error
|
||||
|
||||
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
|
||||
|
||||
|
||||
-- json_to_record and json_to_recordset
|
||||
|
||||
select * from json_to_record('{"a":1,"b":"foo","c":"bar"}',true)
|
||||
as x(a int, b text, d text);
|
||||
|
||||
select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]',false)
|
||||
as x(a int, b text, c boolean);
|
||||
|
Loading…
Reference in New Issue
Block a user