Here is a patch for the new json functions I mentioned a couple of months ago. These are:

    json_to_record
    json_to_recordset
    json_object
    json_build_array
    json_build_object
    json_object_agg

So far there are no docs, but the way these work is illustrated in the regression tests - I hope to have docs within a few days.

cheers

andrew
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index af8ddc6..9b9d11f 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -68,6 +68,11 @@ static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
 				  bool use_line_feeds);
 static void array_to_json_internal(Datum array, StringInfo result,
 					   bool use_line_feeds);
+static void
+datum_to_json(Datum val, bool is_null, StringInfo result,
+			  TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar);
+static void add_json(Datum orig_val, bool is_null, StringInfo result, 
+					 Oid val_type, bool key_scalar);
 
 /* the null action object used for pure validation */
 static JsonSemAction nullSemAction =
@@ -1217,7 +1222,7 @@ extract_mb_char(char *s)
  */
 static void
 datum_to_json(Datum val, bool is_null, StringInfo result,
-			  TYPCATEGORY tcategory, Oid typoutputfunc)
+			  TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar)
 {
 	char	   *outputstr;
 	text	   *jsontext;
@@ -1239,23 +1244,31 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
 			composite_to_json(val, result, false);
 			break;
 		case TYPCATEGORY_BOOLEAN:
-			if (DatumGetBool(val))
-				appendStringInfoString(result, "true");
+			if (!key_scalar)
+				appendStringInfoString(result, DatumGetBool(val) ? "true" : "false");
 			else
-				appendStringInfoString(result, "false");
+				escape_json(result, DatumGetBool(val) ? "true" : "false");
 			break;
 		case TYPCATEGORY_NUMERIC:
 			outputstr = OidOutputFunctionCall(typoutputfunc, val);
-			/*
-			 * Don't call escape_json here if it's a valid JSON number.
-			 */
-			dummy_lex.input = *outputstr == '-' ? outputstr + 1 : outputstr;
-			dummy_lex.input_length = strlen(dummy_lex.input);
-			json_lex_number(&dummy_lex, dummy_lex.input, &numeric_error);
-			if (! numeric_error)
-				appendStringInfoString(result, outputstr);
-			else
+			if (key_scalar)
+			{
+				/* always quote keys */
 				escape_json(result, outputstr);
+			}
+			else
+			{
+				/*
+				 * Don't call escape_json for a non-key if it's a valid JSON number.
+				 */
+				dummy_lex.input = *outputstr == '-' ? outputstr + 1 : outputstr;
+				dummy_lex.input_length = strlen(dummy_lex.input);
+				json_lex_number(&dummy_lex, dummy_lex.input, &numeric_error);
+				if (! numeric_error)
+					appendStringInfoString(result, outputstr);
+				else
+					escape_json(result, outputstr);
+			}
 			pfree(outputstr);
 			break;
 		case TYPCATEGORY_JSON:
@@ -1273,6 +1286,10 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
 			break;
 		default:
 			outputstr = OidOutputFunctionCall(typoutputfunc, val);
+			if (key_scalar && *outputstr == '\0')
+				ereport(ERROR,
+						(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+						 errmsg("key value must not be empty")));
 			escape_json(result, outputstr);
 			pfree(outputstr);
 			break;
@@ -1306,7 +1323,7 @@ array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals,
 		if (dim + 1 == ndims)
 		{
 			datum_to_json(vals[*valcount], nulls[*valcount], result, tcategory,
-						  typoutputfunc);
+						  typoutputfunc, false);
 			(*valcount)++;
 		}
 		else
@@ -1487,13 +1504,85 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
 		else
 			tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
 
-		datum_to_json(val, isnull, result, tcategory, typoutput);
+		datum_to_json(val, isnull, result, tcategory, typoutput, false);
 	}
 
 	appendStringInfoChar(result, '}');
 	ReleaseTupleDesc(tupdesc);
 }
 
+static void
+add_json(Datum orig_val, bool is_null, StringInfo result, Oid val_type, bool key_scalar)
+{
+    Datum       val;
+	TYPCATEGORY tcategory;
+	Oid			typoutput;
+	bool		typisvarlena;
+	Oid         castfunc = InvalidOid;
+
+    if (val_type == InvalidOid)
+        ereport(ERROR,
+                (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+                 errmsg("could not determine input data type")));
+
+
+	getTypeOutputInfo(val_type, &typoutput, &typisvarlena);
+
+	if (val_type > FirstNormalObjectId)
+	{
+		    HeapTuple   tuple;
+			Form_pg_cast castForm;
+
+			tuple = SearchSysCache2(CASTSOURCETARGET,
+									ObjectIdGetDatum(val_type),
+									ObjectIdGetDatum(JSONOID));
+			if (HeapTupleIsValid(tuple))
+			{
+				castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+				if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+					castfunc = typoutput = castForm->castfunc;
+
+				ReleaseSysCache(tuple);
+			}
+	}
+
+	if (castfunc != InvalidOid)
+		tcategory = TYPCATEGORY_JSON_CAST;
+	else if (val_type == RECORDARRAYOID)
+		tcategory = TYPCATEGORY_ARRAY;
+	else if (val_type == RECORDOID)
+		tcategory = TYPCATEGORY_COMPOSITE;
+	else if (val_type == JSONOID)
+		tcategory = TYPCATEGORY_JSON;
+	else
+		tcategory = TypeCategory(val_type);
+	
+	if (key_scalar && 
+		(tcategory == TYPCATEGORY_ARRAY || 
+		 tcategory == TYPCATEGORY_COMPOSITE || 
+		 tcategory ==  TYPCATEGORY_JSON || 
+		 tcategory == TYPCATEGORY_JSON_CAST))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("key value must be scalar, not array, composite or json")));
+		
+	/*
+	 * If we have a toasted datum, forcibly detoast it here to avoid
+	 * memory leakage inside the type's output routine.
+	 */
+	if (typisvarlena && orig_val != (Datum) 0)
+		val = PointerGetDatum(PG_DETOAST_DATUM(orig_val));
+	else
+		val = orig_val;
+	
+	datum_to_json(val, is_null, result, tcategory, typoutput, key_scalar);
+
+	/* Clean up detoasted copy, if any */
+	if (val != orig_val)
+		pfree(DatumGetPointer(val));
+}
+
 /*
  * SQL function array_to_json(row)
  */
@@ -1613,7 +1702,7 @@ to_json(PG_FUNCTION_ARGS)
 	else
 		tcategory = TypeCategory(val_type);
 
-	datum_to_json(val, false, result, tcategory, typoutput);
+	datum_to_json(val, false, result, tcategory, typoutput, false);
 
 	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
 }
@@ -1669,7 +1758,7 @@ json_agg_transfn(PG_FUNCTION_ARGS)
 	if (PG_ARGISNULL(1))
 	{
 		val = (Datum) 0;
-		datum_to_json(val, true, state, 0, InvalidOid);
+		datum_to_json(val, true, state, 0, InvalidOid, false);
 		PG_RETURN_POINTER(state);
 	}
 
@@ -1713,7 +1802,7 @@ json_agg_transfn(PG_FUNCTION_ARGS)
 		appendStringInfoString(state, "\n ");
 	}
 
-	datum_to_json(val, false, state, tcategory, typoutput);
+	datum_to_json(val, false, state, tcategory, typoutput, false);
 
 	/*
 	 * The transition type for array_agg() is declared to be "internal", which
@@ -1745,6 +1834,452 @@ json_agg_finalfn(PG_FUNCTION_ARGS)
 }
 
 /*
+ * json_object_agg transition function.
+ *
+ * aggregate two input columns as a single json value.
+ */
+Datum
+json_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+    Oid         val_type;
+    MemoryContext aggcontext,
+                oldcontext;
+    StringInfo  state;
+    Datum       arg;
+
+    if (!AggCheckCallContext(fcinfo, &aggcontext))
+    {
+        /* cannot be called directly because of internal-type argument */
+        elog(ERROR, "json_agg_transfn called in non-aggregate context");
+    }
+
+    if (PG_ARGISNULL(0))
+    {
+        /*
+         * Make this StringInfo in a context where it will persist for the
+         * duration off the aggregate call. It's only needed for this initial
+         * piece, as the StringInfo routines make sure they use the right
+         * context to enlarge the object if necessary.
+         */
+        oldcontext = MemoryContextSwitchTo(aggcontext);
+        state = makeStringInfo();
+        MemoryContextSwitchTo(oldcontext);
+
+        appendStringInfoString(state, "{ ");
+    }
+    else
+    {
+        state = (StringInfo) PG_GETARG_POINTER(0);
+        appendStringInfoString(state, ", ");
+    }
+
+	if (PG_ARGISNULL(1))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("field name must not be null")));
+
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	/* 
+	 * turn a constant (more or less literal) value that's of unknown
+	 * type into text. Unknowns come in as a cstring pointer.
+	 */
+	if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, 1))
+	{
+		val_type = TEXTOID;
+		arg = CStringGetTextDatum(PG_GETARG_POINTER(1));
+	}
+	else
+	{
+		arg = PG_GETARG_DATUM(1);
+	}
+
+	if (val_type == InvalidOid || val_type == UNKNOWNOID)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("arg 1: could not determine data type")));
+
+	add_json(arg, false, state, val_type, true);
+	
+	appendStringInfoString(state," : ");
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+	/* see comments above */
+	if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, 2))
+	{
+		val_type = TEXTOID;
+		if (PG_ARGISNULL(2))
+			arg = (Datum)0;
+		else
+			arg = CStringGetTextDatum(PG_GETARG_POINTER(2));
+	}
+	else
+	{
+		arg = PG_GETARG_DATUM(2);
+	}
+
+	if (val_type == InvalidOid || val_type == UNKNOWNOID)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("arg 2: could not determine data type")));
+
+	add_json(arg, PG_ARGISNULL(2), state, val_type, false);
+
+	PG_RETURN_POINTER(state);
+}
+
+/*
+ * json_object_agg final function.
+ *
+ */
+Datum
+json_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+    StringInfo  state;
+
+    /* cannot be called directly because of internal-type argument */
+    Assert(AggCheckCallContext(fcinfo, NULL));
+
+    state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
+
+    if (state == NULL)
+        PG_RETURN_TEXT_P(cstring_to_text("{}"));
+
+    appendStringInfoString(state, " }");
+
+    PG_RETURN_TEXT_P(cstring_to_text_with_len(state->data, state->len));
+}
+
+/*
+ * SQL function json_build_object(variadic "any")
+ */
+Datum
+json_build_object(PG_FUNCTION_ARGS)
+{
+	int nargs = PG_NARGS();
+	int i;
+	Datum arg;
+	char *sep = "";
+    StringInfo  result;
+	Oid val_type;
+	
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+    result = makeStringInfo();
+
+	appendStringInfoChar(result,'{');
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i+1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		/* 
+		 * turn a constant (more or less literal) value that's of unknown
+		 * type into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum)0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type",i+1)));
+		appendStringInfoString(result,sep);
+		sep = ", ";
+		add_json(arg, false, result, val_type, true);
+
+		appendStringInfoString(result," : ");
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i+1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i+1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i+1))
+				arg = (Datum)0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i+1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i+1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type",i+2)));
+		add_json(arg, PG_ARGISNULL(i+1), result, val_type, false);
+
+	}
+	appendStringInfoChar(result,'}');
+
+	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+	
+}
+
+Datum
+json_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
+}
+
+/*
+ * SQL function json_build_array(variadic "any")
+ */
+Datum
+json_build_array(PG_FUNCTION_ARGS)
+{
+	int nargs = PG_NARGS();
+	int i;
+	Datum arg;
+	char *sep = "";
+    StringInfo  result;
+	Oid val_type;
+	
+
+    result = makeStringInfo();
+
+	appendStringInfoChar(result,'[');
+
+	for (i = 0; i < nargs; i ++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i+1);
+		/* see comments in json_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum)0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type",i+1)));
+		appendStringInfoString(result,sep);
+		sep = ", ";
+		add_json(arg, PG_ARGISNULL(i), result, val_type, false);
+	}
+	appendStringInfoChar(result,']');
+
+	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+	
+}
+
+Datum
+json_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	PG_RETURN_TEXT_P(cstring_to_text_with_len("[]", 2));
+}
+
+/*
+ * SQL function json_object(text[])
+ *
+ */
+Datum
+json_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	StringInfoData result;
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	text	   *rval;
+	char	   *v;
+
+	switch (ndims)
+	{
+		case 0:
+			PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	initStringInfo(&result);
+
+	appendStringInfoChar(&result, '{');
+
+	for (i = 0; i < count; ++i)
+	{
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		v = TextDatumGetCString(in_datums[i * 2]);
+		if (v[0] == '\0')
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("empty value not allowed for object key")));
+		if (i > 0)
+			appendStringInfoString(&result, ", ");
+		escape_json(&result, v);
+		appendStringInfoString(&result, " : ");
+		pfree(v);
+		if (in_nulls[i * 2 + 1])
+			appendStringInfoString(&result, "null");
+		else
+		{
+			v = TextDatumGetCString(in_datums[i * 2 + 1]);
+			escape_json(&result, v);
+			pfree(v);
+		}
+	}
+
+	appendStringInfoChar(&result, '}');
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+	rval = cstring_to_text_with_len(result.data, result.len);
+	pfree(result.data);
+
+	PG_RETURN_TEXT_P(rval);
+
+}
+
+/*
+ * SQL function json_object(text[], text[])
+ *
+ */
+Datum
+json_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	StringInfoData result;
+	Datum	   *key_datums, *val_datums;
+	bool	   *key_nulls, *val_nulls;
+	int			key_count, val_count,
+				count,
+				i;
+	text	   *rval;
+	char	   *v;
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+	
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	initStringInfo(&result);
+
+	appendStringInfoChar(&result, '{');
+
+	for (i = 0; i < key_count; ++i)
+	{
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		v = TextDatumGetCString(key_datums[i]);
+		if (v[0] == '\0')
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("empty value not allowed for object key")));
+		if (i > 0)
+			appendStringInfoString(&result, ", ");
+		escape_json(&result, v);
+		appendStringInfoString(&result, " : ");
+		pfree(v);
+		if (val_nulls[i])
+			appendStringInfoString(&result, "null");
+		else
+		{
+			v = TextDatumGetCString(val_datums[i]);
+			escape_json(&result, v);
+			pfree(v);
+		}
+	}
+
+	appendStringInfoChar(&result, '}');
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	rval = cstring_to_text_with_len(result.data, result.len);
+	pfree(result.data);
+
+	PG_RETURN_TEXT_P(rval);
+
+}
+
+
+/*
  * Produce a JSON string literal, properly escaping characters in the text.
  */
 void
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index bcb9354..faf7280 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -1928,3 +1928,269 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
 		hashentry->val = _state->saved_scalar;
 	}
 }
+
+
+/*
+ * SQL function json_to_record
+ *
+ * turn json into a record of the type specified in the call
+ *
+ * The function os specified to return the pseudotype RECORD, so the
+ * concrete type used has to be supplied by the caller.
+ * 
+ * Modified from json_populate_record above to get type from call
+ * instead of from argument.
+ *
+ * The json is decomposed into a hash table, in which each
+ * field in the record is then looked up by name.
+ */
+Datum
+json_to_record(PG_FUNCTION_ARGS)
+{
+	text	   *json;
+	bool		use_json_as_text;
+	HTAB	   *json_hash;
+	HeapTupleHeader rec;
+	TupleDesc	tupdesc;
+	HeapTupleData tuple;
+	HeapTuple	rettuple;
+	RecordIOData *my_extra;
+	int			ncolumns;
+	int			i;
+	Datum	   *values;
+	bool	   *nulls;
+	char		fname[NAMEDATALEN];
+	JsonHashEntry *hashentry;
+
+	use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1);
+
+	get_call_result_type(fcinfo, NULL, &tupdesc);
+
+	if (PG_ARGISNULL(1))
+		PG_RETURN_NULL();
+
+	rec = NULL;
+
+	json = PG_GETARG_TEXT_P(0);
+
+	json_hash = get_json_object_as_hash(json, "json_to_record", use_json_as_text);
+	ncolumns = tupdesc->natts;
+
+	/*
+	 * We arrange to look up the needed I/O info just once per series of
+	 * calls, assuming the record type doesn't change underneath us.
+	 */
+	my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
+	if (my_extra == NULL ||
+		my_extra->ncolumns != ncolumns)
+	{
+		fcinfo->flinfo->fn_extra =
+			MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
+							   sizeof(RecordIOData) - sizeof(ColumnIOData)
+							   + ncolumns * sizeof(ColumnIOData));
+		my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
+		my_extra->record_type = InvalidOid;
+		my_extra->record_typmod = 0;
+		MemSet(my_extra, 0,
+			   sizeof(RecordIOData) - sizeof(ColumnIOData)
+			   + ncolumns * sizeof(ColumnIOData));
+		my_extra->ncolumns = ncolumns;
+	}
+
+	values = (Datum *) palloc(ncolumns * sizeof(Datum));
+	nulls = (bool *) palloc(ncolumns * sizeof(bool));
+
+	for (i = 0; i < ncolumns; ++i)
+	{
+		values[i] = (Datum) 0;
+		nulls[i] = true;
+	}
+
+	for (i = 0; i < ncolumns; ++i)
+	{
+		ColumnIOData *column_info = &my_extra->columns[i];
+		Oid			column_type = tupdesc->attrs[i]->atttypid;
+		char	   *value;
+
+		/* Ignore dropped columns in datatype */
+		if (tupdesc->attrs[i]->attisdropped)
+		{
+			nulls[i] = true;
+			continue;
+		}
+
+		memset(fname, 0, NAMEDATALEN);
+		strncpy(fname, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN);
+		hashentry = hash_search(json_hash, fname, HASH_FIND, NULL);
+
+		/*
+		 * Prepare to convert the column value from text
+		 */
+		if (column_info->column_type != column_type)
+		{
+			getTypeInputInfo(column_type,
+							 &column_info->typiofunc,
+							 &column_info->typioparam);
+			fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
+						  fcinfo->flinfo->fn_mcxt);
+			column_info->column_type = column_type;
+		}
+		if (hashentry == NULL || hashentry->isnull)
+		{
+			/*
+			 * need InputFunctionCall to happen even for nulls, so that domain
+			 * checks are done
+			 */
+			values[i] = InputFunctionCall(&column_info->proc, NULL,
+										  column_info->typioparam,
+										  tupdesc->attrs[i]->atttypmod);
+			nulls[i] = true;
+		}
+		else
+		{
+			value = hashentry->val;
+
+			values[i] = InputFunctionCall(&column_info->proc, value,
+										  column_info->typioparam,
+										  tupdesc->attrs[i]->atttypmod);
+			nulls[i] = false;
+		}
+	}
+
+	rettuple = heap_form_tuple(tupdesc, values, nulls);
+
+	ReleaseTupleDesc(tupdesc);
+
+	PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+}
+
+
+/*
+ * SQL function json_to_recordset
+ *
+ * set fields in a set of records from the argument json,
+ * which must be an array of objects.
+ *
+ * adapted from json_populate_recordset above, but gets
+ * the type from the call instead of from an argument.
+ */
+Datum
+json_to_recordset(PG_FUNCTION_ARGS)
+{
+	text	   *json;
+	bool		use_json_as_text;
+	ReturnSetInfo *rsi;
+	MemoryContext old_cxt;
+	Oid			tupType;
+	int32		tupTypmod;
+	HeapTupleHeader rec;
+	TupleDesc	tupdesc;
+	RecordIOData *my_extra;
+	int			ncolumns;
+	JsonLexContext *lex;
+	JsonSemAction *sem;
+	PopulateRecordsetState *state;
+
+	use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1);
+
+	rsi = (ReturnSetInfo *) fcinfo->resultinfo;
+
+	if (!rsi || !IsA(rsi, ReturnSetInfo) ||
+		(rsi->allowedModes & SFRM_Materialize) == 0 ||
+		rsi->expectedDesc == NULL)
+		ereport(ERROR,
+				(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+				 errmsg("set-valued function called in context that "
+						"cannot accept a set")));
+
+
+	rsi->returnMode = SFRM_Materialize;
+
+	/*
+	 * get the tupdesc from the result set info - it must be a record type
+	 * because we already checked that arg1 is a record type.
+	 */
+	(void) get_call_result_type(fcinfo, NULL, &tupdesc);
+
+	state = palloc0(sizeof(PopulateRecordsetState));
+	sem = palloc0(sizeof(JsonSemAction));
+
+
+	/* make these in a sufficiently long-lived memory context */
+	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+
+	state->ret_tdesc = CreateTupleDescCopy(tupdesc);
+	BlessTupleDesc(state->ret_tdesc);
+	state->tuple_store =
+		tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
+							  false, work_mem);
+
+	MemoryContextSwitchTo(old_cxt);
+
+	/* if the json is null send back an empty set */
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();
+
+	json = PG_GETARG_TEXT_P(0);
+
+	rec = NULL;
+
+	tupType = tupdesc->tdtypeid;
+	tupTypmod = tupdesc->tdtypmod;
+	ncolumns = tupdesc->natts;
+
+	lex = makeJsonLexContext(json, true);
+
+	/*
+	 * We arrange to look up the needed I/O info just once per series of
+	 * calls, assuming the record type doesn't change underneath us.
+	 */
+	my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
+	if (my_extra == NULL ||
+		my_extra->ncolumns != ncolumns)
+	{
+		fcinfo->flinfo->fn_extra =
+			MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
+							   sizeof(RecordIOData) - sizeof(ColumnIOData)
+							   + ncolumns * sizeof(ColumnIOData));
+		my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
+		my_extra->record_type = InvalidOid;
+		my_extra->record_typmod = 0;
+	}
+
+	if (my_extra->record_type != tupType ||
+		my_extra->record_typmod != tupTypmod)
+	{
+		MemSet(my_extra, 0,
+			   sizeof(RecordIOData) - sizeof(ColumnIOData)
+			   + ncolumns * sizeof(ColumnIOData));
+		my_extra->record_type = tupType;
+		my_extra->record_typmod = tupTypmod;
+		my_extra->ncolumns = ncolumns;
+	}
+
+	sem->semstate = (void *) state;
+	sem->array_start = populate_recordset_array_start;
+	sem->array_element_start = populate_recordset_array_element_start;
+	sem->scalar = populate_recordset_scalar;
+	sem->object_field_start = populate_recordset_object_field_start;
+	sem->object_field_end = populate_recordset_object_field_end;
+	sem->object_start = populate_recordset_object_start;
+	sem->object_end = populate_recordset_object_end;
+
+	state->lex = lex;
+
+	state->my_extra = my_extra;
+	state->rec = rec;
+	state->use_json_as_text = use_json_as_text;
+	state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
+
+	pg_parse_json(lex, sem);
+
+	rsi->setResult = state->tuple_store;
+	rsi->setDesc = state->ret_tdesc;
+
+	PG_RETURN_NULL();
+
+}
+
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index f0eaf60..7f7349a 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -258,6 +258,7 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	0	2281
 
 /* json */
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn	0	2281	0	_null_ ));
+DATA(insert ( 3197	n 0 json_object_agg_transfn	json_object_agg_finalfn	0	2281	0	_null_ ));
 
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					0	2281	0	_null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 05d7ba5..fce4917 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4130,6 +4130,24 @@ DATA(insert OID = 3174 (  json_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i
 DESCR("json aggregate final function");
 DATA(insert OID = 3175 (  json_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 114 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
 DESCR("aggregate input into json");
+DATA(insert OID = 3195 (  json_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ json_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("json object aggregate transition function");
+DATA(insert OID = 3196 (  json_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("json object aggregate final function");
+DATA(insert OID = 3197 (  json_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 114 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into a json object");
+DATA(insert OID = 3198 (  json_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f i 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ json_build_array _null_ _null_ _null_ ));
+DESCR("build a json array from any inputs");
+DATA(insert OID = 3199 (  json_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f i 0 0 114  "" _null_ _null_ _null_ _null_ json_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty json array");
+DATA(insert OID = 3200 (  json_build_object	   PGNSP PGUID 12 1 0 2276 0 f f f f f f i 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ json_build_object _null_ _null_ _null_ ));
+DESCR("build a json object from pairwise key/value inputs");
+DATA(insert OID = 3201 (  json_build_object	   PGNSP PGUID 12 1 0 0 0 f f f f f f i 0 0 114  "" _null_ _null_ _null_ _null_ json_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty json object");
+DATA(insert OID = 3202 (  json_object    PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "1009" _null_ _null_ _null_ _null_ json_object _null_ _null_ _null_ ));
+DESCR("map text arrayof key value pais to json object");
+DATA(insert OID = 3203 (  json_object    PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "1009 1009" _null_ _null_ _null_ _null_ json_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text arrayof key value pais to json object");
 DATA(insert OID = 3176 (  to_json	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2283" _null_ _null_ _null_ _null_ to_json _null_ _null_ _null_ ));
 DESCR("map input to json");
 
@@ -4163,6 +4181,10 @@ DATA(insert OID = 3960 (  json_populate_record	   PGNSP PGUID 12 1 0 0 0 f f f f
 DESCR("get record fields from a json object");
 DATA(insert OID = 3961 (  json_populate_recordset  PGNSP PGUID 12 1 100 0 0 f f f f f t s 3 0 2283 "2283 114 16" _null_ _null_ _null_ _null_ json_populate_recordset _null_ _null_ _null_ ));
 DESCR("get set of records with fields from a json array of objects");
+DATA(insert OID = 3204 (  json_to_record	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 2 0 2249 "114 16" _null_ _null_ _null_ _null_ json_to_record _null_ _null_ _null_ ));
+DESCR("get record fields from a json object");
+DATA(insert OID = 3205 (  json_to_recordset  PGNSP PGUID 12 1 100 0 0 f f f f f t s 2 0 2249 "114 16" _null_ _null_ _null_ _null_ json_to_recordset _null_ _null_ _null_ ));
+DESCR("get set of records with fields from a json array of objects");
 DATA(insert OID = 3968 (  json_typeof              PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 25 "114" _null_ _null_ _null_ _null_ json_typeof _null_ _null_ _null_ ));
 DESCR("get the type of a json value");
 
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 62fd7c7..223f07d 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -31,6 +31,17 @@ extern Datum to_json(PG_FUNCTION_ARGS);
 extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
 extern Datum json_agg_finalfn(PG_FUNCTION_ARGS);
 
+extern Datum json_object_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum json_object_agg_transfn(PG_FUNCTION_ARGS);
+
+extern Datum json_build_object(PG_FUNCTION_ARGS);
+extern Datum json_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum json_build_array(PG_FUNCTION_ARGS);
+extern Datum json_build_array_noargs(PG_FUNCTION_ARGS);
+
+extern Datum json_object(PG_FUNCTION_ARGS);
+extern Datum json_object_two_arg(PG_FUNCTION_ARGS);
+
 extern void escape_json(StringInfo buf, const char *str);
 
 extern Datum json_typeof(PG_FUNCTION_ARGS);
@@ -49,5 +60,7 @@ extern Datum json_each_text(PG_FUNCTION_ARGS);
 extern Datum json_array_elements(PG_FUNCTION_ARGS);
 extern Datum json_populate_record(PG_FUNCTION_ARGS);
 extern Datum json_populate_recordset(PG_FUNCTION_ARGS);
+extern Datum json_to_record(PG_FUNCTION_ARGS);
+extern Datum json_to_recordset(PG_FUNCTION_ARGS);
 
 #endif   /* JSON_H */
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index a8c45b3..4e2dae8 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -991,3 +991,129 @@ select value, json_typeof(value)
                       | 
 (11 rows)
 
+-- json_build_array, json_build_object, json_object_agg
+SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           json_build_array                            
+-----------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1,2,3]}]
+(1 row)
+
+SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                             json_build_object                              
+----------------------------------------------------------------------------
+ {"a" : 1, "b" : 1.2, "c" : true, "d" : null, "e" : {"x": 3, "y": [1,2,3]}}
+(1 row)
+
+SELECT json_build_object( 
+       'a', json_build_object('b',false,'c',99), 
+       'd', json_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                        json_build_object                                        
+-------------------------------------------------------------------------------------------------
+ {"a" : {"b" : false, "c" : 99}, "d" : {"e" : [9,8,7], "f" : {"relkind":"r","name":"pg_class"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT json_build_array();
+ json_build_array 
+------------------
+ []
+(1 row)
+
+SELECT json_build_object();
+ json_build_object 
+-------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT json_build_object(1,2);
+ json_build_object 
+-------------------
+ {"1" : 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT json_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT json_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT json_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
+FROM foo;
+                                                                            json_build_object                                                                            
+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines" : { "847001" : {"name" : "t15", "type" : "GE1043"}, "847002" : {"name" : "t16", "type" : "GE1043"}, "847003" : {"name" : "sub-alpha", "type" : "GESS90"} }}
+(1 row)
+
+-- json_object
+-- one dimension
+SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                      json_object                      
+-------------------------------------------------------
+ {"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                      json_object                      
+-------------------------------------------------------
+ {"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
+(1 row)
+
+-- odd number error
+SELECT json_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT json_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT json_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of json_object
+select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                     json_object                      
+------------------------------------------------------
+ {"a" : "1", "b" : "2", "c" : "3", "d e f" : "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key error
+select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+ERROR:  empty value not allowed for object key
+-- json_to_record and json_to_recordset
+select * from json_to_record('{"a":1,"b":"foo","c":"bar"}',true) 
+    as x(a int, b text, d text);
+ a |  b  | d 
+---+-----+---
+ 1 | foo | 
+(1 row)
+
+select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]',false) 
+    as x(a int, b text, c boolean);
+ a |  b  | c 
+---+-----+---
+ 1 | foo | 
+ 2 | bar | t
+(2 rows)
+
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index cd7782c..742a0be 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -325,3 +325,90 @@ select value, json_typeof(value)
                (json '{}'),
                (NULL::json))
       as data(value);
+
+-- json_build_array, json_build_object, json_object_agg
+
+SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT json_build_object( 
+       'a', json_build_object('b',false,'c',99), 
+       'd', json_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT json_build_array();
+
+SELECT json_build_object();
+
+-- make sure keys are quoted
+SELECT json_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT json_build_object(null,2);
+
+SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT json_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT json_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
+FROM foo;
+
+-- json_object
+
+-- one dimension
+SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT json_object('{a,b,c}');
+
+-- one column error
+SELECT json_object('{{a},{b}}');
+
+-- too many columns error
+SELECT json_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of json_object
+
+select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key error
+
+select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+-- json_to_record and json_to_recordset
+
+select * from json_to_record('{"a":1,"b":"foo","c":"bar"}',true) 
+    as x(a int, b text, d text);
+
+select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]',false) 
+    as x(a int, b text, c boolean);
-- 
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

Reply via email to