diff --git a/contrib/pg_stat_statements/pg_stat_statements.c b/contrib/pg_stat_statements/pg_stat_statements.c index 3de8333be2..201645b53a 100644 --- a/contrib/pg_stat_statements/pg_stat_statements.c +++ b/contrib/pg_stat_statements/pg_stat_statements.c @@ -2805,6 +2805,17 @@ JumbleExpr(pgssJumbleState *jstate, Node *node) JumbleExpr(jstate, (Node *) conf->exclRelTlist); } break; + case T_JsonExpr: + { + JsonExpr *jexpr = (JsonExpr *) node; + + APP_JUMB(jexpr->op); + JumbleExpr(jstate, jexpr->raw_expr); + JumbleExpr(jstate, (Node *) jexpr->passing.values); + JumbleExpr(jstate, jexpr->on_empty.default_expr); + JumbleExpr(jstate, jexpr->on_error.default_expr); + } + break; case T_List: foreach(temp, (List *) node) { @@ -2876,9 +2887,11 @@ JumbleExpr(pgssJumbleState *jstate, Node *node) { TableFunc *tablefunc = (TableFunc *) node; + APP_JUMB(tablefunc->functype); JumbleExpr(jstate, tablefunc->docexpr); JumbleExpr(jstate, tablefunc->rowexpr); JumbleExpr(jstate, (Node *) tablefunc->colexprs); + JumbleExpr(jstate, (Node *) tablefunc->colvalexprs); } break; case T_TableSampleClause: diff --git a/src/backend/Makefile b/src/backend/Makefile index aab676dbbd..acdba65a4f 100644 --- a/src/backend/Makefile +++ b/src/backend/Makefile @@ -139,6 +139,9 @@ storage/lmgr/lwlocknames.h: storage/lmgr/generate-lwlocknames.pl storage/lmgr/lw utils/errcodes.h: utils/generate-errcodes.pl utils/errcodes.txt $(MAKE) -C utils errcodes.h +utils/adt/jsonpath_gram.h: utils/adt/jsonpath_gram.y + $(MAKE) -C utils/adt jsonpath_gram.h + # see explanation in parser/Makefile utils/fmgrprotos.h: utils/fmgroids.h ; @@ -169,7 +172,7 @@ submake-schemapg: .PHONY: generated-headers -generated-headers: $(top_builddir)/src/include/parser/gram.h $(top_builddir)/src/include/catalog/schemapg.h $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/errcodes.h $(top_builddir)/src/include/utils/fmgroids.h $(top_builddir)/src/include/utils/fmgrprotos.h $(top_builddir)/src/include/utils/probes.h +generated-headers: $(top_builddir)/src/include/parser/gram.h $(top_builddir)/src/include/catalog/schemapg.h $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/errcodes.h $(top_builddir)/src/include/utils/fmgroids.h $(top_builddir)/src/include/utils/fmgrprotos.h $(top_builddir)/src/include/utils/probes.h $(top_builddir)/src/include/utils/jsonpath_gram.h $(top_builddir)/src/include/parser/gram.h: parser/gram.h prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \ @@ -186,6 +189,11 @@ $(top_builddir)/src/include/storage/lwlocknames.h: storage/lmgr/lwlocknames.h cd '$(dir $@)' && rm -f $(notdir $@) && \ $(LN_S) "$$prereqdir/$(notdir $<)" . +$(top_builddir)/src/include/utils/jsonpath_gram.h: utils/adt/jsonpath_gram.h + prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \ + cd '$(dir $@)' && rm -f $(notdir $@) && \ + $(LN_S) "$$prereqdir/$(notdir $<)" . + $(top_builddir)/src/include/utils/errcodes.h: utils/errcodes.h prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \ cd '$(dir $@)' && rm -f $(notdir $@) && \ @@ -220,6 +228,7 @@ distprep: $(MAKE) -C replication repl_gram.c repl_scanner.c syncrep_gram.c syncrep_scanner.c $(MAKE) -C storage/lmgr lwlocknames.h $(MAKE) -C utils fmgrtab.c fmgroids.h fmgrprotos.h errcodes.h + $(MAKE) -C utils/adt jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c $(MAKE) -C utils/misc guc-file.c $(MAKE) -C utils/sort qsort_tuple.c @@ -308,6 +317,7 @@ endif clean: rm -f $(LOCALOBJS) postgres$(X) $(POSTGRES_IMP) \ $(top_builddir)/src/include/parser/gram.h \ + $(top_builddir)/src/include/utils/jsonpath_gram.h \ $(top_builddir)/src/include/catalog/schemapg.h \ $(top_builddir)/src/include/storage/lwlocknames.h \ $(top_builddir)/src/include/utils/fmgroids.h \ @@ -344,6 +354,7 @@ maintainer-clean: distclean utils/fmgrtab.c \ utils/errcodes.h \ utils/misc/guc-file.c \ + utils/adt/jsonpath_gram.h \ utils/sort/qsort_tuple.c diff --git a/src/backend/commands/explain.c b/src/backend/commands/explain.c index 8f7062cd6e..3559c5510a 100644 --- a/src/backend/commands/explain.c +++ b/src/backend/commands/explain.c @@ -2752,7 +2752,9 @@ ExplainTargetRel(Plan *plan, Index rti, ExplainState *es) break; case T_TableFuncScan: Assert(rte->rtekind == RTE_TABLEFUNC); - objectname = "xmltable"; + objectname = rte->tablefunc ? + rte->tablefunc->functype == TFT_XMLTABLE ? + "xmltable" : "json_table" : NULL; objecttag = "Table Function Name"; break; case T_ValuesScan: diff --git a/src/backend/executor/execExpr.c b/src/backend/executor/execExpr.c index e0839616e1..f0f8560dd8 100644 --- a/src/backend/executor/execExpr.c +++ b/src/backend/executor/execExpr.c @@ -43,6 +43,7 @@ #include "optimizer/planner.h" #include "pgstat.h" #include "utils/builtins.h" +#include "utils/jsonpath.h" #include "utils/lsyscache.h" #include "utils/typcache.h" @@ -2033,6 +2034,79 @@ ExecInitExprRec(Expr *node, PlanState *parent, ExprState *state, break; } + case T_JsonExpr: + { + JsonExpr *jexpr = castNode(JsonExpr, node); + ListCell *argexprlc; + ListCell *argnamelc; + + scratch.opcode = EEOP_JSONEXPR; + scratch.d.jsonexpr.jsexpr = jexpr; + + scratch.d.jsonexpr.raw_expr = + palloc(sizeof(*scratch.d.jsonexpr.raw_expr)); + + ExecInitExprRec((Expr *) jexpr->raw_expr, parent, state, + &scratch.d.jsonexpr.raw_expr->value, + &scratch.d.jsonexpr.raw_expr->isnull); + + scratch.d.jsonexpr.formatted_expr = + ExecInitExpr((Expr *) jexpr->formatted_expr, parent); + + scratch.d.jsonexpr.result_expr = + ExecInitExpr((Expr *) jexpr->result_expr, parent); + + scratch.d.jsonexpr.default_on_empty = + ExecInitExpr((Expr *) jexpr->on_empty.default_expr, parent); + + scratch.d.jsonexpr.default_on_error = + ExecInitExpr((Expr *) jexpr->on_error.default_expr, parent); + + if (jexpr->coerce_via_io || jexpr->omit_quotes) + { + Oid typinput; + + /* lookup the result type's input function */ + getTypeInputInfo(jexpr->returning.typid, &typinput, + &scratch.d.jsonexpr.input.typioparam); + fmgr_info(typinput, &scratch.d.jsonexpr.input.func); + } + + scratch.d.jsonexpr.args = NIL; + + forboth(argexprlc, jexpr->passing.values, + argnamelc, jexpr->passing.names) + { + Expr *argexpr = (Expr *) lfirst(argexprlc); + Value *argname = (Value *) lfirst(argnamelc); + JsonPathVariableEvalContext *var = palloc(sizeof(*var)); + + var->var.varName = cstring_to_text(argname->val.str); + var->var.typid = exprType((Node *) argexpr); + var->var.typmod = exprTypmod((Node *) argexpr); + var->var.cb = EvalJsonPathVar; + var->var.cb_arg = var; + var->estate = ExecInitExpr(argexpr, parent); + var->econtext = NULL; + var->mcxt = NULL; + var->evaluated = false; + var->value = (Datum) 0; + var->isnull = true; + + scratch.d.jsonexpr.args = + lappend(scratch.d.jsonexpr.args, var); + } + + scratch.d.jsonexpr.cache = NULL; + + memset(&scratch.d.jsonexpr.scalar, 0, + sizeof(scratch.d.jsonexpr.scalar)); + + ExprEvalPushStep(state, &scratch); + } + break; + + default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); diff --git a/src/backend/executor/execExprInterp.c b/src/backend/executor/execExprInterp.c index a0f537b706..9fa9923076 100644 --- a/src/backend/executor/execExprInterp.c +++ b/src/backend/executor/execExprInterp.c @@ -63,11 +63,16 @@ #include "executor/nodeSubplan.h" #include "funcapi.h" #include "miscadmin.h" +#include "nodes/makefuncs.h" #include "nodes/nodeFuncs.h" #include "parser/parsetree.h" +#include "parser/parse_expr.h" #include "pgstat.h" #include "utils/builtins.h" #include "utils/date.h" +#include "utils/jsonapi.h" +#include "utils/jsonb.h" +#include "utils/jsonpath.h" #include "utils/lsyscache.h" #include "utils/timestamp.h" #include "utils/typcache.h" @@ -367,6 +372,7 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) &&CASE_EEOP_WINDOW_FUNC, &&CASE_EEOP_SUBPLAN, &&CASE_EEOP_ALTERNATIVE_SUBPLAN, + &&CASE_EEOP_JSONEXPR, &&CASE_EEOP_LAST }; @@ -1535,6 +1541,13 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) EEO_NEXT(); } + EEO_CASE(EEOP_JSONEXPR) + { + /* too complex for an inline implementation */ + ExecEvalJson(state, op, econtext); + EEO_NEXT(); + } + EEO_CASE(EEOP_LAST) { /* unreachable */ @@ -3628,3 +3641,458 @@ ExecEvalWholeRowVar(ExprState *state, ExprEvalStep *op, ExprContext *econtext) *op->resvalue = PointerGetDatum(dtuple); *op->resnull = false; } + +/* + * Evaluate a expression substituting specified value in its CaseTestExpr nodes. + */ +Datum +ExecEvalExprPassingCaseValue(ExprState *estate, ExprContext *econtext, + bool *isnull, + Datum caseval_datum, bool caseval_isnull) +{ + Datum res; + Datum save_datum = econtext->caseValue_datum; + bool save_isNull = econtext->caseValue_isNull; + + econtext->caseValue_datum = caseval_datum; + econtext->caseValue_isNull = caseval_isnull; + + PG_TRY(); + { + res = ExecEvalExpr(estate, econtext, isnull); + } + PG_CATCH(); + { + econtext->caseValue_datum = save_datum; + econtext->caseValue_isNull = save_isNull; + + PG_RE_THROW(); + } + PG_END_TRY(); + + econtext->caseValue_datum = save_datum; + econtext->caseValue_isNull = save_isNull; + + return res; +} + +/* + * Evaluate a JSON error/empty behavior result. + */ +static Datum +ExecEvalJsonBehavior(ExprContext *econtext, JsonBehavior *behavior, + ExprState *default_estate, bool is_jsonb, bool *is_null) +{ + *is_null = false; + + switch (behavior->btype) + { + case JSON_BEHAVIOR_EMPTY_ARRAY: + return is_jsonb ? + JsonbPGetDatum(JsonbMakeEmptyArray()) : + PointerGetDatum(cstring_to_text("[]")); + + case JSON_BEHAVIOR_EMPTY_OBJECT: + return is_jsonb ? + JsonbPGetDatum(JsonbMakeEmptyObject()) : + PointerGetDatum(cstring_to_text("{}")); + + case JSON_BEHAVIOR_TRUE: + return BoolGetDatum(true); + + case JSON_BEHAVIOR_FALSE: + return BoolGetDatum(false); + + case JSON_BEHAVIOR_NULL: + case JSON_BEHAVIOR_UNKNOWN: + case JSON_BEHAVIOR_EMPTY: + *is_null = true; + return (Datum) 0; + + case JSON_BEHAVIOR_DEFAULT: + return ExecEvalExpr(default_estate, econtext, is_null); + + default: + elog(ERROR, "unrecognized SQL/JSON behavior %d", behavior->btype); + return (Datum) 0; + } +} + +/* + * Evaluate a coercion of a JSON item to the target type. + */ +static Datum +ExecEvalJsonExprCoercion(ExprEvalStep *op, ExprContext *econtext, + Datum res, bool *isNull, bool isJsonb) +{ + JsonExpr *jexpr = op->d.jsonexpr.jsexpr; + Jsonb *jb = *isNull || !isJsonb ? NULL : DatumGetJsonbP(res); + Json *js = *isNull || isJsonb ? NULL : DatumGetJsonP(res); + + if (jexpr->coerce_via_io || + (jexpr->omit_quotes && !*isNull && + (isJsonb ? JB_ROOT_IS_SCALAR(jb) : JsonContainerIsScalar(&js->root)))) + { + /* strip quotes and call typinput function */ + char *str = *isNull ? NULL : + (isJsonb ? JsonbUnquote(jb) : JsonUnquote(js)); + + res = InputFunctionCall(&op->d.jsonexpr.input.func, str, + op->d.jsonexpr.input.typioparam, + jexpr->returning.typmod); + } + else if (op->d.jsonexpr.result_expr) + res = ExecEvalExprPassingCaseValue(op->d.jsonexpr.result_expr, econtext, + isNull, res, *isNull); + else if (jexpr->coerce_via_populate) + res = json_populate_type(res, isJsonb ? JSONBOID : JSONOID, + jexpr->returning.typid, + jexpr->returning.typmod, + &op->d.jsonexpr.cache, + econtext->ecxt_per_query_memory, + isNull); + /* else no coercion, simply return item */ + + return res; +} + +/* + * Evaluate a JSON path variable caching computed value. + */ +Datum +EvalJsonPathVar(void *cxt, bool *isnull) +{ + JsonPathVariableEvalContext *ecxt = cxt; + + if (!ecxt->evaluated) + { + MemoryContext oldcxt = ecxt->mcxt ? + MemoryContextSwitchTo(ecxt->mcxt) : NULL; + + ecxt->value = ExecEvalExpr(ecxt->estate, ecxt->econtext, &ecxt->isnull); + ecxt->evaluated = true; + + if (oldcxt) + MemoryContextSwitchTo(oldcxt); + } + + *isnull = ecxt->isnull; + return ecxt->value; +} + +/* + * Prepare SQL/JSON item coercion to the output type. Returned a datum of the + * corresponding SQL type and a pointer to the coercion state. + */ +Datum +ExecPrepareJsonItemCoercion(JsonbValue *item, bool is_jsonb, + JsonReturning *returning, + struct JsonScalarCoercions *coercions, + MemoryContext mcxt, + struct JsonScalarCoercionExprState **pcestate) +{ + struct JsonScalarCoercionExprState *cestate; + Datum res; + Oid typid; + JsonbValue jbvbuf; + + if (item->type == jbvBinary) + { + if (JsonContainerIsScalar(item->val.binary.data)) + item = is_jsonb + ? JsonbExtractScalar(item->val.binary.data, &jbvbuf) + : JsonExtractScalar((JsonContainer *) item->val.binary.data, + &jbvbuf); + } + + /* get coercion state reference and datum of the corresponding SQL type */ + switch (item->type) + { + case jbvNull: + cestate = &coercions->null; + typid = UNKNOWNOID; + res = (Datum) 0; + break; + + case jbvString: + cestate = &coercions->string; + typid = TEXTOID; + res = PointerGetDatum( + cstring_to_text_with_len(item->val.string.val, + item->val.string.len)); + break; + + case jbvNumeric: + cestate = &coercions->numeric; + typid = NUMERICOID; + res = NumericGetDatum(item->val.numeric); + break; + + case jbvBool: + cestate = &coercions->boolean; + typid = BOOLOID; + res = BoolGetDatum(item->val.boolean); + break; + + case jbvDatetime: + res = item->val.datetime.value; + typid = item->val.datetime.typid; + switch (item->val.datetime.typid) + { + case DATEOID: + cestate = &coercions->date; + break; + case TIMEOID: + cestate = &coercions->time; + break; + case TIMETZOID: + cestate = &coercions->timetz; + break; + case TIMESTAMPOID: + cestate = &coercions->timestamp; + break; + case TIMESTAMPTZOID: + cestate = &coercions->timestamptz; + break; + default: + elog(ERROR, "unexpected jsonb datetime type oid %d", + item->val.datetime.typid); + return (Datum) 0; + } + break; + + case jbvArray: + case jbvObject: + case jbvBinary: + cestate = &coercions->composite; + if (is_jsonb) + { + Jsonb *jb = JsonbValueToJsonb(item); + + res = JsonbPGetDatum(jb); + typid = JSONBOID; + } + else + { + Json *js = JsonbValueToJson(item); + + res = JsonPGetDatum(js); + typid = JSONOID; + } + break; + + default: + elog(ERROR, "unexpected jsonb value type %d", item->type); + return (Datum) 0; + } + + /* on-demand initialization of coercion state */ + if (!cestate->initialized) + { + MemoryContext oldCxt = MemoryContextSwitchTo(mcxt); + Node *expr; + + if (item->type == jbvNull) + { + expr = (Node *) makeNullConst(UNKNOWNOID, -1, InvalidOid); + } + else + { + CaseTestExpr *placeholder = makeNode(CaseTestExpr); + + placeholder->typeId = typid; + placeholder->typeMod = -1; + placeholder->collation = InvalidOid; + + expr = (Node *) placeholder; + } + + cestate->result_expr = + coerceJsonExpr(NULL, expr, returning, + &cestate->coerce_via_io, + &cestate->coerce_via_populate); + + cestate->result_expr_state = + ExecInitExpr((Expr *) cestate->result_expr, NULL); + + MemoryContextSwitchTo(oldCxt); + + cestate->initialized = true; + } + + *pcestate = cestate; + + return res; +} + +/* ---------------------------------------------------------------- + * ExecEvalJson + * ---------------------------------------------------------------- + */ +void +ExecEvalJson(ExprState *state, ExprEvalStep *op, ExprContext *econtext) +{ + JsonExpr *jexpr = op->d.jsonexpr.jsexpr; + Datum item; + Datum res = (Datum) 0; + JsonPath *path; + ListCell *lc; + Oid formattedType = exprType(jexpr->formatted_expr ? + jexpr->formatted_expr : + jexpr->raw_expr); + bool isjsonb = formattedType == JSONBOID; + MemoryContext mcxt = CurrentMemoryContext; + + *op->resnull = true; /* until we get a result */ + *op->resvalue = (Datum) 0; + + if (op->d.jsonexpr.raw_expr->isnull) + { + /* execute domain checks for NULLs */ + (void) ExecEvalJsonExprCoercion(op, econtext, res, op->resnull, isjsonb); + return; + } + + item = op->d.jsonexpr.raw_expr->value; + + path = DatumGetJsonPathP(jexpr->path_spec->constvalue); + + /* reset JSON path variable contexts */ + foreach(lc, op->d.jsonexpr.args) + { + JsonPathVariableEvalContext *var = lfirst(lc); + + var->econtext = econtext; + var->evaluated = false; + } + + PG_TRY(); + { + bool empty = false; + + if (op->d.jsonexpr.formatted_expr) + { + bool isnull; + + item = ExecEvalExprPassingCaseValue(op->d.jsonexpr.formatted_expr, + econtext, &isnull, item, false); + if (isnull) + { + /* execute domain checks for NULLs */ + (void) ExecEvalJsonExprCoercion(op, econtext, res, op->resnull, + isjsonb); + return; + } + } + + switch (jexpr->op) + { + case IS_JSON_QUERY: + res = (isjsonb ? JsonbPathQuery : JsonPathQuery) + (item, path, jexpr->wrapper, &empty, op->d.jsonexpr.args); + *op->resnull = !DatumGetPointer(res); + break; + + case IS_JSON_VALUE: + { + JsonbValue *jbv = (isjsonb ? JsonbPathValue : JsonPathValue) + (item, path, &empty, op->d.jsonexpr.args); + struct JsonScalarCoercionExprState *cestate; + + if (!jbv) + break; + + *op->resnull = false; + + res = ExecPrepareJsonItemCoercion(jbv, isjsonb, + &op->d.jsonexpr.jsexpr->returning, + &op->d.jsonexpr.scalar, + econtext->ecxt_per_query_memory, + &cestate); + + /* coerce item datum to the output type */ + if (cestate->coerce_via_io || + cestate->coerce_via_populate || /* ignored for scalars jsons */ + jexpr->returning.typid == JSONOID || + jexpr->returning.typid == JSONBOID) + { + /* use coercion from json[b] to the output type */ + res = isjsonb + ? JsonbPGetDatum(JsonbValueToJsonb(jbv)) + : JsonPGetDatum(JsonbValueToJson(jbv)); + res = ExecEvalJsonExprCoercion(op, econtext, res, + op->resnull, isjsonb); + } + else if (cestate->result_expr_state) + { + res = ExecEvalExprPassingCaseValue(cestate->result_expr_state, + econtext, + op->resnull, + res, false); + } + /* else no coercion */ + } + break; + + case IS_JSON_EXISTS: + res = BoolGetDatum((isjsonb ? JsonbPathExists : JsonPathExists) + (item, path, op->d.jsonexpr.args)); + *op->resnull = false; + break; + + case IS_JSON_TABLE: + res = item; + *op->resnull = false; + break; + + default: + elog(ERROR, "unrecognized SQL/JSON expression op %d", + jexpr->op); + return; + } + + if (empty) + { + if (jexpr->on_empty.btype == JSON_BEHAVIOR_ERROR) + ereport(ERROR, + (errcode(ERRCODE_NO_JSON_ITEM), + errmsg("no SQL/JSON item"))); + + /* execute ON EMPTY behavior */ + res = ExecEvalJsonBehavior(econtext, &jexpr->on_empty, + op->d.jsonexpr.default_on_empty, + isjsonb, op->resnull); + } + + if (jexpr->op != IS_JSON_EXISTS && + jexpr->op != IS_JSON_TABLE && + (!empty ? jexpr->op != IS_JSON_VALUE : + /* result is already coerced in DEFAULT behavior case */ + jexpr->on_empty.btype != JSON_BEHAVIOR_DEFAULT)) + res = ExecEvalJsonExprCoercion(op, econtext, res, op->resnull, + isjsonb); + } + PG_CATCH(); + { + if (jexpr->on_error.btype == JSON_BEHAVIOR_ERROR || + ERRCODE_TO_CATEGORY(geterrcode()) != ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + + /* execute ON ERROR behavior */ + res = ExecEvalJsonBehavior(econtext, &jexpr->on_error, + op->d.jsonexpr.default_on_error, + isjsonb, op->resnull); + + if (jexpr->op != IS_JSON_EXISTS && + /* result is already coerced in DEFAULT behavior case */ + jexpr->on_error.btype != JSON_BEHAVIOR_DEFAULT) + res = ExecEvalJsonExprCoercion(op, econtext, res, op->resnull, + isjsonb); + } + PG_END_TRY(); + + *op->resvalue = res; +} diff --git a/src/backend/executor/nodeTableFuncscan.c b/src/backend/executor/nodeTableFuncscan.c index 165fae8c83..c657143575 100644 --- a/src/backend/executor/nodeTableFuncscan.c +++ b/src/backend/executor/nodeTableFuncscan.c @@ -23,11 +23,14 @@ #include "postgres.h" #include "nodes/execnodes.h" +#include "catalog/pg_type.h" #include "executor/executor.h" #include "executor/nodeTableFuncscan.h" #include "executor/tablefunc.h" #include "miscadmin.h" +#include "nodes/nodeFuncs.h" #include "utils/builtins.h" +#include "utils/jsonpath.h" #include "utils/lsyscache.h" #include "utils/memutils.h" #include "utils/xml.h" @@ -167,8 +170,10 @@ ExecInitTableFuncScan(TableFuncScan *node, EState *estate, int eflags) ExecAssignResultTypeFromTL(&scanstate->ss.ps); ExecAssignScanProjectionInfo(&scanstate->ss); - /* Only XMLTABLE is supported currently */ - scanstate->routine = &XmlTableRoutine; + /* Only XMLTABLE and JSON_TABLE are supported currently */ + scanstate->routine = + tf->functype == TFT_XMLTABLE ? &XmlTableRoutine : + exprType(tf->docexpr) == JSONBOID ? &JsonbTableRoutine : &JsonTableRoutine; scanstate->perValueCxt = AllocSetContextCreate(CurrentMemoryContext, @@ -371,14 +376,17 @@ tfuncInitialize(TableFuncScanState *tstate, ExprContext *econtext, Datum doc) routine->SetNamespace(tstate, ns_name, ns_uri); } - /* Install the row filter expression into the table builder context */ - value = ExecEvalExpr(tstate->rowexpr, econtext, &isnull); - if (isnull) - ereport(ERROR, - (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED), - errmsg("row filter expression must not be null"))); + if (routine->SetRowFilter) + { + /* Install the row filter expression into the table builder context */ + value = ExecEvalExpr(tstate->rowexpr, econtext, &isnull); + if (isnull) + ereport(ERROR, + (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED), + errmsg("row filter expression must not be null"))); - routine->SetRowFilter(tstate, TextDatumGetCString(value)); + routine->SetRowFilter(tstate, TextDatumGetCString(value)); + } /* * Install the column filter expressions into the table builder context. diff --git a/src/backend/lib/stringinfo.c b/src/backend/lib/stringinfo.c index cb2026c3b2..060a19860d 100644 --- a/src/backend/lib/stringinfo.c +++ b/src/backend/lib/stringinfo.c @@ -306,3 +306,24 @@ enlargeStringInfo(StringInfo str, int needed) str->maxlen = newlen; } + +/* + * alignStringInfoInt - aling StringInfo to int by adding + * zero padding bytes + */ +void +alignStringInfoInt(StringInfo buf) +{ + switch(INTALIGN(buf->len) - buf->len) + { + case 3: + appendStringInfoCharMacro(buf, 0); + case 2: + appendStringInfoCharMacro(buf, 0); + case 1: + appendStringInfoCharMacro(buf, 0); + default: + break; + } +} + diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c index c1a83ca909..710679fbc3 100644 --- a/src/backend/nodes/copyfuncs.c +++ b/src/backend/nodes/copyfuncs.c @@ -1224,6 +1224,7 @@ _copyTableFunc(const TableFunc *from) { TableFunc *newnode = makeNode(TableFunc); + COPY_SCALAR_FIELD(functype); COPY_NODE_FIELD(ns_uris); COPY_NODE_FIELD(ns_names); COPY_NODE_FIELD(docexpr); @@ -1234,7 +1235,9 @@ _copyTableFunc(const TableFunc *from) COPY_NODE_FIELD(colcollations); COPY_NODE_FIELD(colexprs); COPY_NODE_FIELD(coldefexprs); + COPY_NODE_FIELD(colvalexprs); COPY_BITMAPSET_FIELD(notnulls); + COPY_NODE_FIELD(plan); COPY_SCALAR_FIELD(ordinalitycol); COPY_LOCATION_FIELD(location); @@ -2122,6 +2125,341 @@ _copyOnConflictExpr(const OnConflictExpr *from) return newnode; } +/* + * _copyJsonKeyValue + */ +static JsonKeyValue * +_copyJsonKeyValue(const JsonKeyValue *from) +{ + JsonKeyValue *newnode = makeNode(JsonKeyValue); + + COPY_NODE_FIELD(key); + COPY_NODE_FIELD(value); + + return newnode; +} + +/* + * _copyJsonObjectCtor + */ +static JsonObjectCtor * +_copyJsonObjectCtor(const JsonObjectCtor *from) +{ + JsonObjectCtor *newnode = makeNode(JsonObjectCtor); + + COPY_NODE_FIELD(exprs); + COPY_NODE_FIELD(output); + COPY_SCALAR_FIELD(absent_on_null); + COPY_SCALAR_FIELD(unique); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonObjectAgg + */ +static JsonObjectAgg * +_copyJsonObjectAgg(const JsonObjectAgg *from) +{ + JsonObjectAgg *newnode = makeNode(JsonObjectAgg); + + COPY_NODE_FIELD(ctor.output); + COPY_NODE_FIELD(ctor.agg_filter); + COPY_NODE_FIELD(ctor.agg_order); + COPY_NODE_FIELD(ctor.over); + COPY_LOCATION_FIELD(ctor.location); + COPY_NODE_FIELD(arg); + COPY_SCALAR_FIELD(absent_on_null); + COPY_SCALAR_FIELD(unique); + + return newnode; +} + +/* + * _copyJsonArrayCtor + */ +static JsonArrayCtor * +_copyJsonArrayCtor(const JsonArrayCtor *from) +{ + JsonArrayCtor *newnode = makeNode(JsonArrayCtor); + + COPY_NODE_FIELD(exprs); + COPY_NODE_FIELD(output); + COPY_SCALAR_FIELD(absent_on_null); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonArrayAgg + */ +static JsonArrayAgg * +_copyJsonArrayAgg(const JsonArrayAgg *from) +{ + JsonArrayAgg *newnode = makeNode(JsonArrayAgg); + + COPY_NODE_FIELD(ctor.output); + COPY_NODE_FIELD(ctor.agg_filter); + COPY_NODE_FIELD(ctor.agg_order); + COPY_NODE_FIELD(ctor.over); + COPY_LOCATION_FIELD(ctor.location); + COPY_NODE_FIELD(arg); + COPY_SCALAR_FIELD(absent_on_null); + + return newnode; +} + +/* + * _copyJsonArrayQueryCtor + */ +static JsonArrayQueryCtor * +_copyJsonArrayQueryCtor(const JsonArrayQueryCtor *from) +{ + JsonArrayQueryCtor *newnode = makeNode(JsonArrayQueryCtor); + + COPY_NODE_FIELD(query); + COPY_NODE_FIELD(output); + COPY_SCALAR_FIELD(format); + COPY_SCALAR_FIELD(absent_on_null); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonExpr + */ +static JsonExpr * +_copyJsonExpr(const JsonExpr *from) +{ + JsonExpr *newnode = makeNode(JsonExpr); + + COPY_SCALAR_FIELD(op); + COPY_NODE_FIELD(raw_expr); + COPY_NODE_FIELD(formatted_expr); + COPY_NODE_FIELD(result_expr); + COPY_SCALAR_FIELD(coerce_via_io); + COPY_SCALAR_FIELD(coerce_via_io_collation); + COPY_SCALAR_FIELD(format); + COPY_NODE_FIELD(path_spec); + COPY_NODE_FIELD(passing.values); + COPY_NODE_FIELD(passing.names); + COPY_SCALAR_FIELD(returning); + COPY_SCALAR_FIELD(on_error); + COPY_NODE_FIELD(on_error.default_expr); + COPY_SCALAR_FIELD(on_empty); + COPY_NODE_FIELD(on_empty.default_expr); + COPY_SCALAR_FIELD(wrapper); + COPY_SCALAR_FIELD(omit_quotes); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonFuncExpr + */ +static JsonFuncExpr * +_copyJsonFuncExpr(const JsonFuncExpr *from) +{ + JsonFuncExpr *newnode = makeNode(JsonFuncExpr); + + COPY_SCALAR_FIELD(op); + COPY_NODE_FIELD(common); + COPY_NODE_FIELD(output); + COPY_NODE_FIELD(on_empty); + COPY_NODE_FIELD(on_error); + COPY_SCALAR_FIELD(wrapper); + COPY_SCALAR_FIELD(omit_quotes); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonIsPredicate + */ +static JsonIsPredicate * +_copyJsonIsPredicate(const JsonIsPredicate *from) +{ + JsonIsPredicate *newnode = makeNode(JsonIsPredicate); + + COPY_NODE_FIELD(expr); + COPY_SCALAR_FIELD(format); + COPY_SCALAR_FIELD(vtype); + COPY_SCALAR_FIELD(unique_keys); + + return newnode; +} + +/* + * _copyJsonBehavior + */ +static JsonBehavior * +_copyJsonBehavior(const JsonBehavior *from) +{ + JsonBehavior *newnode = makeNode(JsonBehavior); + + COPY_SCALAR_FIELD(btype); + COPY_NODE_FIELD(default_expr); + + return newnode; +} + +/* + * _copyJsonOutput + */ +static JsonOutput * +_copyJsonOutput(const JsonOutput *from) +{ + JsonOutput *newnode = makeNode(JsonOutput); + + COPY_NODE_FIELD(typename); + COPY_SCALAR_FIELD(returning); + + return newnode; +} + +/* + * _copyJsonCommon + */ +static JsonCommon * +_copyJsonCommon(const JsonCommon *from) +{ + JsonCommon *newnode = makeNode(JsonCommon); + + COPY_NODE_FIELD(expr); + COPY_STRING_FIELD(pathspec); + COPY_STRING_FIELD(pathname); + COPY_NODE_FIELD(passing); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonValueExpr + */ +static JsonValueExpr * +_copyJsonValueExpr(const JsonValueExpr *from) +{ + JsonValueExpr *newnode = makeNode(JsonValueExpr); + + COPY_NODE_FIELD(expr); + COPY_SCALAR_FIELD(format); + + return newnode; +} + +/* + * _copyJsonArgument + */ +static JsonArgument * +_copyJsonArgument(const JsonArgument *from) +{ + JsonArgument *newnode = makeNode(JsonArgument); + + COPY_NODE_FIELD(val); + COPY_STRING_FIELD(name); + + return newnode; +} + +/* + * _copyJsonTable + */ +static JsonTable * +_copyJsonTable(const JsonTable *from) +{ + JsonTable *newnode = makeNode(JsonTable); + + COPY_NODE_FIELD(common); + COPY_NODE_FIELD(columns); + COPY_NODE_FIELD(plan); + COPY_NODE_FIELD(on_error); + COPY_NODE_FIELD(alias); + COPY_SCALAR_FIELD(location); + + return newnode; +} + +/* + * _copyJsonTableColumn + */ +static JsonTableColumn * +_copyJsonTableColumn(const JsonTableColumn *from) +{ + JsonTableColumn *newnode = makeNode(JsonTableColumn); + + COPY_SCALAR_FIELD(coltype); + COPY_STRING_FIELD(name); + COPY_NODE_FIELD(typename); + COPY_STRING_FIELD(pathspec); + COPY_STRING_FIELD(pathname); + COPY_SCALAR_FIELD(format); + COPY_SCALAR_FIELD(wrapper); + COPY_SCALAR_FIELD(omit_quotes); + COPY_NODE_FIELD(columns); + COPY_NODE_FIELD(on_empty); + COPY_NODE_FIELD(on_error); + COPY_SCALAR_FIELD(location); + + return newnode; +} + +/* + * _copyJsonTablePlan + */ +static JsonTablePlan * +_copyJsonTablePlan(const JsonTablePlan *from) +{ + JsonTablePlan *newnode = makeNode(JsonTablePlan); + + COPY_SCALAR_FIELD(plan_type); + COPY_SCALAR_FIELD(join_type); + COPY_STRING_FIELD(pathname); + COPY_NODE_FIELD(plan1); + COPY_NODE_FIELD(plan2); + COPY_SCALAR_FIELD(location); + + return newnode; +} + +/* + * _copyJsonTableParentNode + */ +static JsonTableParentNode * +_copyJsonTableParentNode(const JsonTableParentNode *from) +{ + JsonTableParentNode *newnode = makeNode(JsonTableParentNode); + + COPY_NODE_FIELD(path); + COPY_STRING_FIELD(name); + COPY_NODE_FIELD(child); + COPY_SCALAR_FIELD(outerJoin); + COPY_SCALAR_FIELD(colMin); + COPY_SCALAR_FIELD(colMax); + + return newnode; +} + +/* + * _copyJsonTableSiblingNode + */ +static JsonTableSiblingNode * +_copyJsonTableSiblingNode(const JsonTableSiblingNode *from) +{ + JsonTableSiblingNode *newnode = makeNode(JsonTableSiblingNode); + + COPY_NODE_FIELD(larg); + COPY_NODE_FIELD(rarg); + COPY_SCALAR_FIELD(cross); + + return newnode; +} + /* **************************************************************** * relation.h copy functions * @@ -4989,6 +5327,63 @@ copyObjectImpl(const void *from) case T_OnConflictExpr: retval = _copyOnConflictExpr(from); break; + case T_JsonKeyValue: + retval = _copyJsonKeyValue(from); + break; + case T_JsonObjectCtor: + retval = _copyJsonObjectCtor(from); + break; + case T_JsonObjectAgg: + retval = _copyJsonObjectAgg(from); + break; + case T_JsonArrayCtor: + retval = _copyJsonArrayCtor(from); + break; + case T_JsonArrayQueryCtor: + retval = _copyJsonArrayQueryCtor(from); + break; + case T_JsonArrayAgg: + retval = _copyJsonArrayAgg(from); + break; + case T_JsonFuncExpr: + retval = _copyJsonFuncExpr(from); + break; + case T_JsonIsPredicate: + retval = _copyJsonIsPredicate(from); + break; + case T_JsonExpr: + retval = _copyJsonExpr(from); + break; + case T_JsonCommon: + retval = _copyJsonCommon(from); + break; + case T_JsonOutput: + retval = _copyJsonOutput(from); + break; + case T_JsonBehavior: + retval = _copyJsonBehavior(from); + break; + case T_JsonValueExpr: + retval = _copyJsonValueExpr(from); + break; + case T_JsonArgument: + retval = _copyJsonArgument(from); + break; + case T_JsonTable: + retval = _copyJsonTable(from); + break; + case T_JsonTableColumn: + retval = _copyJsonTableColumn(from); + break; + case T_JsonTablePlan: + retval = _copyJsonTablePlan(from); + break; + case T_JsonTableParentNode: + retval = _copyJsonTableParentNode(from); + break; + case T_JsonTableSiblingNode: + retval = _copyJsonTableSiblingNode(from); + break; /* * RELATION NODES diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c index 7a700018e7..3c5329b1c2 100644 --- a/src/backend/nodes/equalfuncs.c +++ b/src/backend/nodes/equalfuncs.c @@ -119,6 +119,7 @@ _equalRangeVar(const RangeVar *a, const RangeVar *b) static bool _equalTableFunc(const TableFunc *a, const TableFunc *b) { + COMPARE_SCALAR_FIELD(functype); COMPARE_NODE_FIELD(ns_uris); COMPARE_NODE_FIELD(ns_names); COMPARE_NODE_FIELD(docexpr); @@ -129,13 +130,38 @@ _equalTableFunc(const TableFunc *a, const TableFunc *b) COMPARE_NODE_FIELD(colcollations); COMPARE_NODE_FIELD(colexprs); COMPARE_NODE_FIELD(coldefexprs); + COMPARE_NODE_FIELD(colvalexprs); COMPARE_BITMAPSET_FIELD(notnulls); + COMPARE_NODE_FIELD(plan); COMPARE_SCALAR_FIELD(ordinalitycol); COMPARE_LOCATION_FIELD(location); return true; } +static bool +_equalJsonTableParentNode(const JsonTableParentNode *a, const JsonTableParentNode *b) +{ + COMPARE_NODE_FIELD(path); + COMPARE_STRING_FIELD(name); + COMPARE_NODE_FIELD(child); + COMPARE_SCALAR_FIELD(outerJoin); + COMPARE_SCALAR_FIELD(colMin); + COMPARE_SCALAR_FIELD(colMax); + + return true; +} + +static bool +_equalJsonTableSiblingNode(const JsonTableSiblingNode *a, const JsonTableSiblingNode *b) +{ + COMPARE_NODE_FIELD(larg); + COMPARE_NODE_FIELD(rarg); + COMPARE_SCALAR_FIELD(cross); + + return true; +} + static bool _equalIntoClause(const IntoClause *a, const IntoClause *b) { @@ -812,6 +838,40 @@ _equalOnConflictExpr(const OnConflictExpr *a, const OnConflictExpr *b) return true; } +/* + * _equalJsonExpr + */ +static bool +_equalJsonExpr(const JsonExpr *a, const JsonExpr *b) +{ + COMPARE_SCALAR_FIELD(op); + COMPARE_NODE_FIELD(raw_expr); + COMPARE_NODE_FIELD(formatted_expr); + COMPARE_NODE_FIELD(result_expr); + COMPARE_SCALAR_FIELD(coerce_via_io); + COMPARE_SCALAR_FIELD(coerce_via_io_collation); + COMPARE_SCALAR_FIELD(format.type); + COMPARE_SCALAR_FIELD(format.encoding); + COMPARE_LOCATION_FIELD(format.location); + COMPARE_NODE_FIELD(path_spec); + COMPARE_NODE_FIELD(passing.values); + COMPARE_NODE_FIELD(passing.names); + COMPARE_SCALAR_FIELD(returning.format.type); + COMPARE_SCALAR_FIELD(returning.format.encoding); + COMPARE_LOCATION_FIELD(returning.format.location); + COMPARE_SCALAR_FIELD(returning.typid); + COMPARE_SCALAR_FIELD(returning.typmod); + COMPARE_SCALAR_FIELD(on_error.btype); + COMPARE_NODE_FIELD(on_error.default_expr); + COMPARE_SCALAR_FIELD(on_empty.btype); + COMPARE_NODE_FIELD(on_empty.default_expr); + COMPARE_SCALAR_FIELD(wrapper); + COMPARE_SCALAR_FIELD(omit_quotes); + COMPARE_LOCATION_FIELD(location); + + return true; +} + /* * Stuff from relation.h */ @@ -3146,6 +3206,15 @@ equal(const void *a, const void *b) case T_JoinExpr: retval = _equalJoinExpr(a, b); break; + case T_JsonExpr: + retval = _equalJsonExpr(a, b); + break; + case T_JsonTableParentNode: + retval = _equalJsonTableParentNode(a, b); + break; + case T_JsonTableSiblingNode: + retval = _equalJsonTableSiblingNode(a, b); + break; /* * RELATION NODES diff --git a/src/backend/nodes/makefuncs.c b/src/backend/nodes/makefuncs.c index 7a676531ae..4b988315ac 100644 --- a/src/backend/nodes/makefuncs.c +++ b/src/backend/nodes/makefuncs.c @@ -20,6 +20,7 @@ #include "fmgr.h" #include "nodes/makefuncs.h" #include "nodes/nodeFuncs.h" +#include "utils/errcodes.h" #include "utils/lsyscache.h" @@ -628,3 +629,91 @@ makeVacuumRelation(RangeVar *relation, Oid oid, List *va_cols) v->va_cols = va_cols; return v; } + +/* + * makeJsonBehavior - + * creates a JsonBehavior node + */ +JsonBehavior * +makeJsonBehavior(JsonBehaviorType type, Node *default_expr) +{ + JsonBehavior *behavior = makeNode(JsonBehavior); + + behavior->btype = type; + behavior->default_expr = default_expr; + + return behavior; +} + +/* + * makeJsonTableJoinedPlan - + * creates a joined JsonTablePlan node + */ +Node * +makeJsonTableJoinedPlan(JsonTablePlanJoinType type, Node *plan1, Node *plan2, + int location) +{ + JsonTablePlan *n = makeNode(JsonTablePlan); + + n->plan_type = JSTP_JOINED; + n->join_type = type; + n->plan1 = castNode(JsonTablePlan, plan1); + n->plan2 = castNode(JsonTablePlan, plan2); + n->location = location; + + return (Node *) n; +} + +/* + * makeJsonEncoding - + * converts JSON encoding name to enum JsonEncoding + */ +JsonEncoding +makeJsonEncoding(char *name) +{ + if (!pg_strcasecmp(name, "utf8")) + return JS_ENC_UTF8; + if (!pg_strcasecmp(name, "utf16")) + return JS_ENC_UTF16; + if (!pg_strcasecmp(name, "utf32")) + return JS_ENC_UTF32; + + ereport(ERROR, + (errcode(ERRCODE_INVALID_PARAMETER_VALUE), + errmsg("unrecognized JSON encoding: %s", name))); + + return JS_ENC_DEFAULT; +} + +/* + * makeJsonKeyValue - + * creates a JsonKeyValue node + */ +Node * +makeJsonKeyValue(Node *key, Node *value) +{ + JsonKeyValue *n = makeNode(JsonKeyValue); + + n->key = (Expr *) key; + n->value = castNode(JsonValueExpr, value); + + return (Node *) n; +} + +/* + * makeJsonIsPredicate - + * creates a JsonIsPredicate node + */ +Node * +makeJsonIsPredicate(Node *expr, JsonFormat format, JsonValueType vtype, + bool unique_keys) +{ + JsonIsPredicate *n = makeNode(JsonIsPredicate); + + n->expr = expr; + n->format = format; + n->vtype = vtype; + n->unique_keys = unique_keys; + + return (Node *) n; +} diff --git a/src/backend/nodes/nodeFuncs.c b/src/backend/nodes/nodeFuncs.c index 8e6f27e153..f0992a25c7 100644 --- a/src/backend/nodes/nodeFuncs.c +++ b/src/backend/nodes/nodeFuncs.c @@ -259,6 +259,9 @@ exprType(const Node *expr) case T_PlaceHolderVar: type = exprType((Node *) ((const PlaceHolderVar *) expr)->phexpr); break; + case T_JsonExpr: + type = ((const JsonExpr *) expr)->returning.typid; + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); type = InvalidOid; /* keep compiler quiet */ @@ -492,6 +495,8 @@ exprTypmod(const Node *expr) return ((const SetToDefault *) expr)->typeMod; case T_PlaceHolderVar: return exprTypmod((Node *) ((const PlaceHolderVar *) expr)->phexpr); + case T_JsonExpr: + return ((JsonExpr *) expr)->returning.typmod; default: break; } @@ -903,6 +908,18 @@ exprCollation(const Node *expr) case T_PlaceHolderVar: coll = exprCollation((Node *) ((const PlaceHolderVar *) expr)->phexpr); break; + case T_JsonExpr: + { + JsonExpr *jexpr = (JsonExpr *) expr; + + if (jexpr->result_expr) + coll = exprCollation(jexpr->result_expr); + else if (jexpr->coerce_via_io) + coll = jexpr->coerce_via_io_collation; + else + coll = InvalidOid; + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); coll = InvalidOid; /* keep compiler quiet */ @@ -1104,6 +1121,18 @@ exprSetCollation(Node *expr, Oid collation) Assert(!OidIsValid(collation)); /* result is always an integer * type */ break; + case T_JsonExpr: + { + JsonExpr *jexpr = (JsonExpr *) expr; + + if (jexpr->result_expr) + exprSetCollation(jexpr->result_expr, collation); + else if (jexpr->coerce_via_io) + jexpr->coerce_via_io_collation = collation; + else + Assert(!OidIsValid(collation)); + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); break; @@ -1544,6 +1573,15 @@ exprLocation(const Node *expr) case T_PartitionRangeDatum: loc = ((const PartitionRangeDatum *) expr)->location; break; + case T_JsonExpr: + { + const JsonExpr *jsexpr = (const JsonExpr *) expr; + + /* consider both function name and leftmost arg */ + loc = leftmostLoc(jsexpr->location, + exprLocation(jsexpr->raw_expr)); + } + break; default: /* for any other node type it's just unknown... */ loc = -1; @@ -2216,6 +2254,27 @@ expression_tree_walker(Node *node, return true; if (walker(tf->coldefexprs, context)) return true; + if (walker(tf->colvalexprs, context)) + return true; + } + break; + case T_JsonExpr: + { + JsonExpr *jexpr = (JsonExpr *) node; + + if (walker(jexpr->raw_expr, context)) + return true; + if (walker(jexpr->formatted_expr, context)) + return true; + if (walker(jexpr->result_expr, context)) + return true; + if (walker(jexpr->passing.values, context)) + return true; + /* we assume walker doesn't care about passing.names */ + if (walker(jexpr->on_empty.default_expr, context)) + return true; + if (walker(jexpr->on_error.default_expr, context)) + return true; } break; default: @@ -3032,9 +3091,28 @@ expression_tree_mutator(Node *node, MUTATE(newnode->rowexpr, tf->rowexpr, Node *); MUTATE(newnode->colexprs, tf->colexprs, List *); MUTATE(newnode->coldefexprs, tf->coldefexprs, List *); + MUTATE(newnode->colvalexprs, tf->colvalexprs, List *); return (Node *) newnode; } break; + case T_JsonExpr: + { + JsonExpr *xexpr = (JsonExpr *) node; + JsonExpr *newnode; + + FLATCOPY(newnode, xexpr, JsonExpr); + MUTATE(newnode->raw_expr, xexpr->raw_expr, Node *); + MUTATE(newnode->formatted_expr, xexpr->formatted_expr, Node *); + MUTATE(newnode->result_expr, xexpr->result_expr, Node *); + MUTATE(newnode->passing.values, xexpr->passing.values, List *); + /* assume mutator does not care about passing.names */ + MUTATE(newnode->on_empty.default_expr, + xexpr->on_empty.default_expr, Node *); + MUTATE(newnode->on_error.default_expr, + xexpr->on_error.default_expr, Node *); + + return (Node *) newnode; + } default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); @@ -3679,6 +3757,143 @@ raw_expression_tree_walker(Node *node, break; case T_CommonTableExpr: return walker(((CommonTableExpr *) node)->ctequery, context); + case T_JsonValueExpr: + return walker(((JsonValueExpr *) node)->expr, context); + case T_JsonOutput: + return walker(((JsonOutput *) node)->typename, context); + case T_JsonKeyValue: + { + JsonKeyValue *jkv = (JsonKeyValue *) node; + + if (walker(jkv->key, context)) + return true; + if (walker(jkv->value, context)) + return true; + } + break; + case T_JsonObjectCtor: + { + JsonObjectCtor *joc = (JsonObjectCtor *) node; + + if (walker(joc->output, context)) + return true; + if (walker(joc->exprs, context)) + return true; + } + break; + case T_JsonArrayCtor: + { + JsonArrayCtor *jac = (JsonArrayCtor *) node; + + if (walker(jac->output, context)) + return true; + if (walker(jac->exprs, context)) + return true; + } + break; + case T_JsonObjectAgg: + { + JsonObjectAgg *joa = (JsonObjectAgg *) node; + + if (walker(joa->ctor.output, context)) + return true; + if (walker(joa->ctor.agg_order, context)) + return true; + if (walker(joa->ctor.agg_filter, context)) + return true; + if (walker(joa->ctor.over, context)) + return true; + if (walker(joa->arg, context)) + return true; + } + break; + case T_JsonArrayAgg: + { + JsonArrayAgg *jaa = (JsonArrayAgg *) node; + + if (walker(jaa->ctor.output, context)) + return true; + if (walker(jaa->ctor.agg_order, context)) + return true; + if (walker(jaa->ctor.agg_filter, context)) + return true; + if (walker(jaa->ctor.over, context)) + return true; + if (walker(jaa->arg, context)) + return true; + } + break; + case T_JsonArrayQueryCtor: + { + JsonArrayQueryCtor *jaqc = (JsonArrayQueryCtor *) node; + + if (walker(jaqc->output, context)) + return true; + if (walker(jaqc->query, context)) + return true; + } + break; + case T_JsonIsPredicate: + return walker(((JsonIsPredicate *) node)->expr, context); + case T_JsonArgument: + return walker(((JsonArgument *) node)->val, context); + case T_JsonCommon: + { + JsonCommon *jc = (JsonCommon *) node; + + if (walker(jc->expr, context)) + return true; + if (walker(jc->passing, context)) + return true; + } + break; + case T_JsonBehavior: + { + JsonBehavior *jb = (JsonBehavior *) node; + + if (jb->btype == JSON_BEHAVIOR_DEFAULT && + walker(jb->default_expr, context)) + return true; + } + break; + case T_JsonFuncExpr: + { + JsonFuncExpr *jfe = (JsonFuncExpr *) node; + + if (walker(jfe->common, context)) + return true; + if (jfe->output && walker(jfe->output, context)) + return true; + if (walker(jfe->on_empty, context)) + return true; + if (walker(jfe->on_error, context)) + return true; + } + break; + case T_JsonTable: + { + JsonTable *jt = (JsonTable *) node; + + if (walker(jt->common, context)) + return true; + if (walker(jt->columns, context)) + return true; + } + break; + case T_JsonTableColumn: + { + JsonTableColumn *jtc = (JsonTableColumn *) node; + + if (walker(jtc->typename, context)) + return true; + if (walker(jtc->on_empty, context)) + return true; + if (walker(jtc->on_error, context)) + return true; + if (jtc->coltype == JTC_NESTED && walker(jtc->columns, context)) + return true; + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c index 43d62062bc..8d05db17bd 100644 --- a/src/backend/nodes/outfuncs.c +++ b/src/backend/nodes/outfuncs.c @@ -1043,6 +1043,7 @@ _outTableFunc(StringInfo str, const TableFunc *node) { WRITE_NODE_TYPE("TABLEFUNC"); + WRITE_ENUM_FIELD(functype, TableFuncType); WRITE_NODE_FIELD(ns_uris); WRITE_NODE_FIELD(ns_names); WRITE_NODE_FIELD(docexpr); @@ -1053,7 +1054,9 @@ _outTableFunc(StringInfo str, const TableFunc *node) WRITE_NODE_FIELD(colcollations); WRITE_NODE_FIELD(colexprs); WRITE_NODE_FIELD(coldefexprs); + WRITE_NODE_FIELD(colvalexprs); WRITE_BITMAPSET_FIELD(notnulls); + WRITE_NODE_FIELD(plan); WRITE_INT_FIELD(ordinalitycol); WRITE_LOCATION_FIELD(location); } @@ -1696,6 +1699,60 @@ _outOnConflictExpr(StringInfo str, const OnConflictExpr *node) WRITE_NODE_FIELD(exclRelTlist); } +static void +_outJsonExpr(StringInfo str, const JsonExpr *node) +{ + WRITE_NODE_TYPE("JSONEXPR"); + + WRITE_ENUM_FIELD(op, JsonExprOp); + WRITE_NODE_FIELD(raw_expr); + WRITE_NODE_FIELD(formatted_expr); + WRITE_NODE_FIELD(result_expr); + WRITE_BOOL_FIELD(coerce_via_io); + WRITE_OID_FIELD(coerce_via_io_collation); + WRITE_ENUM_FIELD(format.type, JsonFormatType); + WRITE_ENUM_FIELD(format.encoding, JsonEncoding); + WRITE_LOCATION_FIELD(format.location); + WRITE_NODE_FIELD(path_spec); + WRITE_NODE_FIELD(passing.values); + WRITE_NODE_FIELD(passing.names); + WRITE_ENUM_FIELD(returning.format.type, JsonFormatType); + WRITE_ENUM_FIELD(returning.format.encoding, JsonEncoding); + WRITE_LOCATION_FIELD(returning.format.location); + WRITE_OID_FIELD(returning.typid); + WRITE_INT_FIELD(returning.typmod); + WRITE_ENUM_FIELD(on_error.btype, JsonBehaviorType); + WRITE_NODE_FIELD(on_error.default_expr); + WRITE_ENUM_FIELD(on_empty.btype, JsonBehaviorType); + WRITE_NODE_FIELD(on_empty.default_expr); + WRITE_ENUM_FIELD(wrapper, JsonWrapper); + WRITE_BOOL_FIELD(omit_quotes); + WRITE_LOCATION_FIELD(location); +} + +static void +_outJsonTableParentNode(StringInfo str, const JsonTableParentNode *node) +{ + WRITE_NODE_TYPE("JSONTABPNODE"); + + WRITE_NODE_FIELD(path); + WRITE_STRING_FIELD(name); + WRITE_NODE_FIELD(child); + WRITE_BOOL_FIELD(outerJoin); + WRITE_INT_FIELD(colMin); + WRITE_INT_FIELD(colMax); +} + +static void +_outJsonTableSiblingNode(StringInfo str, const JsonTableSiblingNode *node) +{ + WRITE_NODE_TYPE("JSONTABSNODE"); + + WRITE_NODE_FIELD(larg); + WRITE_NODE_FIELD(rarg); + WRITE_BOOL_FIELD(cross); +} + /***************************************************************************** * * Stuff from relation.h. @@ -4236,6 +4293,15 @@ outNode(StringInfo str, const void *obj) case T_PartitionRangeDatum: _outPartitionRangeDatum(str, obj); break; + case T_JsonExpr: + _outJsonExpr(str, obj); + break; + case T_JsonTableParentNode: + _outJsonTableParentNode(str, obj); + break; + case T_JsonTableSiblingNode: + _outJsonTableSiblingNode(str, obj); + break; default: diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c index ccb6a1f4ac..f241057326 100644 --- a/src/backend/nodes/readfuncs.c +++ b/src/backend/nodes/readfuncs.c @@ -474,6 +474,7 @@ _readTableFunc(void) { READ_LOCALS(TableFunc); + READ_ENUM_FIELD(functype, TableFuncType); READ_NODE_FIELD(ns_uris); READ_NODE_FIELD(ns_names); READ_NODE_FIELD(docexpr); @@ -484,7 +485,9 @@ _readTableFunc(void) READ_NODE_FIELD(colcollations); READ_NODE_FIELD(colexprs); READ_NODE_FIELD(coldefexprs); + READ_NODE_FIELD(colvalexprs); READ_BITMAPSET_FIELD(notnulls); + READ_NODE_FIELD(plan); READ_INT_FIELD(ordinalitycol); READ_LOCATION_FIELD(location); @@ -1323,6 +1326,69 @@ _readOnConflictExpr(void) READ_DONE(); } +/* + * _readJsonExpr + */ +static JsonExpr * +_readJsonExpr(void) +{ + READ_LOCALS(JsonExpr); + + READ_ENUM_FIELD(op, JsonExprOp); + READ_NODE_FIELD(raw_expr); + READ_NODE_FIELD(formatted_expr); + READ_NODE_FIELD(result_expr); + READ_BOOL_FIELD(coerce_via_io); + READ_OID_FIELD(coerce_via_io_collation); + READ_ENUM_FIELD(format.type, JsonFormatType); + READ_ENUM_FIELD(format.encoding, JsonEncoding); + READ_LOCATION_FIELD(format.location); + READ_NODE_FIELD(path_spec); + READ_NODE_FIELD(passing.values); + READ_NODE_FIELD(passing.names); + READ_ENUM_FIELD(returning.format.type, JsonFormatType); + READ_ENUM_FIELD(returning.format.encoding, JsonEncoding); + READ_LOCATION_FIELD(returning.format.location); + READ_OID_FIELD(returning.typid); + READ_INT_FIELD(returning.typmod); + READ_ENUM_FIELD(on_error.btype, JsonBehaviorType); + READ_NODE_FIELD(on_error.default_expr); + READ_ENUM_FIELD(on_empty.btype, JsonBehaviorType); + READ_NODE_FIELD(on_empty.default_expr); + READ_ENUM_FIELD(wrapper, JsonWrapper); + READ_BOOL_FIELD(omit_quotes); + READ_LOCATION_FIELD(location); + + READ_DONE(); +} + +static JsonTableParentNode * +_readJsonTableParentNode(void) +{ + READ_LOCALS(JsonTableParentNode); + + READ_NODE_FIELD(path); + READ_STRING_FIELD(name); + READ_NODE_FIELD(child); + READ_BOOL_FIELD(outerJoin); + READ_INT_FIELD(colMin); + READ_INT_FIELD(colMax); + + READ_DONE(); +} + +static JsonTableSiblingNode * +_readJsonTableSiblingNode(void) +{ + READ_LOCALS(JsonTableSiblingNode); + + READ_NODE_FIELD(larg); + READ_NODE_FIELD(rarg); + READ_BOOL_FIELD(cross); + + READ_DONE(); +} + /* * Stuff from parsenodes.h. */ @@ -2666,6 +2732,12 @@ parseNodeString(void) return_value = _readPartitionBoundSpec(); else if (MATCH("PARTITIONRANGEDATUM", 19)) return_value = _readPartitionRangeDatum(); + else if (MATCH("JSONEXPR", 8)) + return_value = _readJsonExpr(); + else if (MATCH("JSONTABPNODE", 12)) + return_value = _readJsonTableParentNode(); + else if (MATCH("JSONTABSNODE", 12)) + return_value = _readJsonTableSiblingNode(); else { elog(ERROR, "badly formatted node string \"%.32s\"...", token); diff --git a/src/backend/optimizer/path/costsize.c b/src/backend/optimizer/path/costsize.c index 98fb16e85a..df0c02cd8c 100644 --- a/src/backend/optimizer/path/costsize.c +++ b/src/backend/optimizer/path/costsize.c @@ -3704,7 +3704,8 @@ cost_qual_eval_walker(Node *node, cost_qual_eval_context *context) IsA(node, SQLValueFunction) || IsA(node, XmlExpr) || IsA(node, CoerceToDomain) || - IsA(node, NextValueExpr)) + IsA(node, NextValueExpr) || + IsA(node, JsonExpr)) { /* Treat all these as having cost 1 */ context->total.per_tuple += cpu_operator_cost; diff --git a/src/backend/parser/gram.y b/src/backend/parser/gram.y index 4c83a63f7d..438d18f12b 100644 --- a/src/backend/parser/gram.y +++ b/src/backend/parser/gram.y @@ -211,6 +211,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); JoinType jtype; DropBehavior dbehavior; OnCommitAction oncommit; + JsonFormat jsformat; List *list; Node *node; Value *value; @@ -241,6 +242,12 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); PartitionSpec *partspec; PartitionBoundSpec *partboundspec; RoleSpec *rolespec; + JsonBehavior *jsbehavior; + struct { + JsonBehavior *on_empty; + JsonBehavior *on_error; + } on_behavior; + JsonQuotes js_quotes; } %type stmt schema_stmt @@ -581,6 +588,99 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); %type partbound_datum PartitionRangeDatum %type partbound_datum_list range_datum_list +%type json_value_expr + json_func_expr + json_value_func_expr + json_query_expr + json_exists_predicate + json_api_common_syntax + json_context_item + json_argument + json_output_clause_opt + json_value_constructor + json_object_constructor + json_object_constructor_args_opt + json_object_args + json_object_ctor_args_opt + json_object_func_args + json_array_constructor + json_name_and_value + json_aggregate_func + json_object_aggregate_constructor + json_array_aggregate_constructor + json_table + json_table_column_definition + json_table_ordinality_column_definition + json_table_regular_column_definition + json_table_formatted_column_definition + json_table_nested_columns + json_table_plan_clause_opt + json_table_specific_plan + json_table_plan + json_table_plan_simple + json_table_plan_parent_child + json_table_plan_outer + json_table_plan_inner + json_table_plan_sibling + json_table_plan_union + json_table_plan_cross + json_table_plan_primary + json_table_default_plan + +%type json_arguments + json_passing_clause_opt + json_table_columns_clause + json_table_column_definition_list + json_name_and_value_list + json_value_expr_list + json_array_aggregate_order_by_clause_opt + +%type json_returning_clause_opt + +%type json_path_specification + json_table_column_path_specification_clause_opt + json_table_path_name + json_as_path_name_clause_opt + +%type json_encoding + json_encoding_clause_opt + json_table_default_plan_choices + json_table_default_plan_inner_outer + json_table_default_plan_union_cross + json_wrapper_clause_opt + json_wrapper_behavior + json_conditional_or_unconditional_opt + json_predicate_type_constraint_opt + +%type json_format_clause_opt + json_representation + +%type json_behavior_error + json_behavior_null + json_behavior_true + json_behavior_false + json_behavior_unknown + json_behavior_empty + json_behavior_empty_array + json_behavior_empty_object + json_behavior_default + json_value_behavior + json_query_behavior + json_exists_error_behavior + json_exists_error_clause_opt + json_table_error_behavior + json_table_error_clause_opt + +%type json_value_on_behavior_clause_opt + json_query_on_behavior_clause_opt + +%type json_quotes_behavior + json_quotes_clause_opt + +%type json_key_uniqueness_constraint_opt + json_object_constructor_null_clause_opt + json_array_constructor_null_clause_opt + /* * Non-keyword token types. These are hard-wired into the "flex" lexer. * They must be listed first so that their numeric codes do not depend on @@ -603,7 +703,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); */ /* ordinary key words in alphabetical order */ -%token ABORT_P ABSOLUTE_P ACCESS ACTION ADD_P ADMIN AFTER +%token ABORT_P ABSENT ABSOLUTE_P ACCESS ACTION ADD_P ADMIN AFTER AGGREGATE ALL ALSO ALTER ALWAYS ANALYSE ANALYZE AND ANY ARRAY AS ASC ASSERTION ASSIGNMENT ASYMMETRIC AT ATTACH ATTRIBUTE AUTHORIZATION @@ -613,8 +713,8 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); CACHE CALLED CASCADE CASCADED CASE CAST CATALOG_P CHAIN CHAR_P CHARACTER CHARACTERISTICS CHECK CHECKPOINT CLASS CLOSE CLUSTER COALESCE COLLATE COLLATION COLUMN COLUMNS COMMENT COMMENTS COMMIT - COMMITTED CONCURRENTLY CONFIGURATION CONFLICT CONNECTION CONSTRAINT - CONSTRAINTS CONTENT_P CONTINUE_P CONVERSION_P COPY COST CREATE + COMMITTED CONCURRENTLY CONDITIONAL CONFIGURATION CONFLICT CONNECTION + CONSTRAINT CONSTRAINTS CONTENT_P CONTINUE_P CONVERSION_P COPY COST CREATE CROSS CSV CUBE CURRENT_P CURRENT_CATALOG CURRENT_DATE CURRENT_ROLE CURRENT_SCHEMA CURRENT_TIME CURRENT_TIMESTAMP CURRENT_USER CURSOR CYCLE @@ -624,12 +724,12 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); DETACH DICTIONARY DISABLE_P DISCARD DISTINCT DO DOCUMENT_P DOMAIN_P DOUBLE_P DROP - EACH ELSE ENABLE_P ENCODING ENCRYPTED END_P ENUM_P ESCAPE EVENT EXCEPT - EXCLUDE EXCLUDING EXCLUSIVE EXECUTE EXISTS EXPLAIN + EACH ELSE EMPTY_P ENABLE_P ENCODING ENCRYPTED END_P ENUM_P ERROR_P ESCAPE + EVENT EXCEPT EXCLUDE EXCLUDING EXCLUSIVE EXECUTE EXISTS EXPLAIN EXTENSION EXTERNAL EXTRACT FALSE_P FAMILY FETCH FILTER FIRST_P FLOAT_P FOLLOWING FOR - FORCE FOREIGN FORWARD FREEZE FROM FULL FUNCTION FUNCTIONS + FORCE FOREIGN FORMAT FORWARD FREEZE FROM FULL FUNCTION FUNCTIONS GENERATED GLOBAL GRANT GRANTED GREATEST GROUP_P GROUPING @@ -640,9 +740,10 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); INNER_P INOUT INPUT_P INSENSITIVE INSERT INSTEAD INT_P INTEGER INTERSECT INTERVAL INTO INVOKER IS ISNULL ISOLATION - JOIN + JOIN JSON JSON_ARRAY JSON_ARRAYAGG JSON_EXISTS JSON_OBJECT JSON_OBJECTAGG + JSON_QUERY JSON_TABLE JSON_VALUE - KEY + KEY KEYS KEEP LABEL LANGUAGE LARGE_P LAST_P LATERAL_P LEADING LEAKPROOF LEAST LEFT LEVEL LIKE LIMIT LISTEN LOAD LOCAL @@ -650,36 +751,36 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); MAPPING MATCH MATERIALIZED MAXVALUE METHOD MINUTE_P MINVALUE MODE MONTH_P MOVE - NAME_P NAMES NATIONAL NATURAL NCHAR NEW NEXT NO NONE + NAME_P NAMES NATIONAL NATURAL NCHAR NESTED NEW NEXT NO NONE NOT NOTHING NOTIFY NOTNULL NOWAIT NULL_P NULLIF NULLS_P NUMERIC - OBJECT_P OF OFF OFFSET OIDS OLD ON ONLY OPERATOR OPTION OPTIONS OR + OBJECT_P OF OFF OFFSET OIDS OLD OMIT ON ONLY OPERATOR OPTION OPTIONS OR ORDER ORDINALITY OUT_P OUTER_P OVER OVERLAPS OVERLAY OVERRIDING OWNED OWNER - PARALLEL PARSER PARTIAL PARTITION PASSING PASSWORD PLACING PLANS POLICY - POSITION PRECEDING PRECISION PRESERVE PREPARE PREPARED PRIMARY + PARALLEL PARSER PARTIAL PARTITION PASSING PASSWORD PATH PLACING PLAN PLANS + POLICY POSITION PRECEDING PRECISION PRESERVE PREPARE PREPARED PRIMARY PRIOR PRIVILEGES PROCEDURAL PROCEDURE PROGRAM PUBLICATION - QUOTE + QUOTE QUOTES RANGE READ REAL REASSIGN RECHECK RECURSIVE REF REFERENCES REFERENCING REFRESH REINDEX RELATIVE_P RELEASE RENAME REPEATABLE REPLACE REPLICA RESET RESTART RESTRICT RETURNING RETURNS REVOKE RIGHT ROLE ROLLBACK ROLLUP ROW ROWS RULE - SAVEPOINT SCHEMA SCHEMAS SCROLL SEARCH SECOND_P SECURITY SELECT SEQUENCE SEQUENCES - SERIALIZABLE SERVER SESSION SESSION_USER SET SETS SETOF SHARE SHOW - SIMILAR SIMPLE SKIP SMALLINT SNAPSHOT SOME SQL_P STABLE STANDALONE_P - START STATEMENT STATISTICS STDIN STDOUT STORAGE STRICT_P STRIP_P + SAVEPOINT SCALAR SCHEMA SCHEMAS SCROLL SEARCH SECOND_P SECURITY SELECT + SEQUENCE SEQUENCES SERIALIZABLE SERVER SESSION SESSION_USER SET SETS SETOF + SHARE SHOW SIMILAR SIMPLE SKIP SMALLINT SNAPSHOT SOME SQL_P STABLE STANDALONE_P + START STATEMENT STATISTICS STDIN STDOUT STORAGE STRICT_P STRING STRIP_P SUBSCRIPTION SUBSTRING SYMMETRIC SYSID SYSTEM_P TABLE TABLES TABLESAMPLE TABLESPACE TEMP TEMPLATE TEMPORARY TEXT_P THEN TIME TIMESTAMP TO TRAILING TRANSACTION TRANSFORM TREAT TRIGGER TRIM TRUE_P TRUNCATE TRUSTED TYPE_P TYPES_P - UNBOUNDED UNCOMMITTED UNENCRYPTED UNION UNIQUE UNKNOWN UNLISTEN UNLOGGED - UNTIL UPDATE USER USING + UNBOUNDED UNCOMMITTED UNCONDITIONAL UNENCRYPTED UNION UNIQUE UNKNOWN + UNLISTEN UNLOGGED UNTIL UPDATE USER USING VACUUM VALID VALIDATE VALIDATOR VALUE_P VALUES VARCHAR VARIADIC VARYING VERBOSE VERSION_P VIEW VIEWS VOLATILE @@ -703,11 +804,11 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); * as NOT, at least with respect to their left-hand subexpression. * NULLS_LA and WITH_LA are needed to make the grammar LALR(1). */ -%token NOT_LA NULLS_LA WITH_LA - +%token NOT_LA NULLS_LA WITH_LA WITH_LA_UNIQUE WITHOUT_LA /* Precedence: lowest to highest */ %nonassoc SET /* see relation_expr_opt_alias */ +%right FORMAT %left UNION EXCEPT %left INTERSECT %left OR @@ -745,6 +846,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); * blame any funny behavior of UNBOUNDED on the SQL standard, though. */ %nonassoc UNBOUNDED /* ideally should have same precedence as IDENT */ +%nonassoc ERROR_P EMPTY_P DEFAULT ABSENT /* JSON error/empty behavior */ %nonassoc IDENT GENERATED NULL_P PARTITION RANGE ROWS PRECEDING FOLLOWING CUBE ROLLUP %left Op OPERATOR /* multi-character ops and user-defined operators */ %left '+' '-' @@ -769,6 +871,13 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); /* kluge to keep xml_whitespace_option from causing shift/reduce conflicts */ %right PRESERVE STRIP_P +%nonassoc json_table_column +%nonassoc NESTED +%left PATH + +%nonassoc empty_json_unique +%left WITHOUT WITH_LA_UNIQUE + %% /* @@ -11517,6 +11626,19 @@ table_ref: relation_expr opt_alias_clause $2->alias = $4; $$ = (Node *) $2; } + | json_table opt_alias_clause + { + JsonTable *jt = castNode(JsonTable, $1); + jt->alias = $2; + $$ = (Node *) jt; + } + | LATERAL_P json_table opt_alias_clause + { + JsonTable *jt = castNode(JsonTable, $2); + jt->alias = $3; + jt->lateral = true; + $$ = (Node *) jt; + } ; @@ -12020,6 +12142,8 @@ xmltable_column_option_el: { $$ = makeDefElem("is_not_null", (Node *) makeInteger(true), @1); } | NULL_P { $$ = makeDefElem("is_not_null", (Node *) makeInteger(false), @1); } + | PATH b_expr + { $$ = makeDefElem("path", $2, @1); } ; xml_namespace_list: @@ -12431,7 +12555,7 @@ ConstInterval: opt_timezone: WITH_LA TIME ZONE { $$ = TRUE; } - | WITHOUT TIME ZONE { $$ = FALSE; } + | WITHOUT_LA TIME ZONE { $$ = FALSE; } | /*EMPTY*/ { $$ = FALSE; } ; @@ -12932,6 +13056,48 @@ a_expr: c_expr { $$ = $1; } list_make1($1), @2), @2); } + | a_expr + IS JSON + json_predicate_type_constraint_opt + json_key_uniqueness_constraint_opt %prec IS + { + JsonFormat format = { JS_FORMAT_DEFAULT, JS_ENC_DEFAULT, -1 }; + $$ = makeJsonIsPredicate($1, format, $4, $5); + } + /* + * Required by standard, but it would conflict with expressions + * like: 'str' || format(...) + | a_expr + FORMAT json_representation + IS JSON + json_predicate_type_constraint_opt + json_key_uniqueness_constraint_opt %prec FORMAT + { + $3.location = @2; + $$ = makeJsonIsPredicate($1, $3, $6, $7); + } + */ + | a_expr + IS NOT JSON + json_predicate_type_constraint_opt + json_key_uniqueness_constraint_opt %prec IS + { + JsonFormat format = { JS_FORMAT_DEFAULT, JS_ENC_DEFAULT, -1 }; + $$ = makeNotExpr(makeJsonIsPredicate($1, format, $5, $6), @1); + } + /* + * Required by standard, but it would conflict with expressions + * like: 'str' || format(...) + | a_expr + FORMAT json_representation + IS NOT JSON + json_predicate_type_constraint_opt + json_key_uniqueness_constraint_opt %prec FORMAT + { + $3.location = @2; + $$ = makeNotExpr(makeJsonIsPredicate($1, $3, $7, $8), @1); + } + */ | DEFAULT { /* @@ -13024,6 +13190,25 @@ b_expr: c_expr } ; +json_predicate_type_constraint_opt: + VALUE_P { $$ = JS_TYPE_ANY; } + | ARRAY { $$ = JS_TYPE_ARRAY; } + | OBJECT_P { $$ = JS_TYPE_OBJECT; } + | SCALAR { $$ = JS_TYPE_SCALAR; } + | /* EMPTY */ { $$ = JS_TYPE_ANY; } + ; + +json_key_uniqueness_constraint_opt: + WITH_LA_UNIQUE UNIQUE opt_keys { $$ = TRUE; } + | WITHOUT UNIQUE opt_keys { $$ = FALSE; } + | /* EMPTY */ %prec empty_json_unique { $$ = FALSE; } + ; + +opt_keys: + KEYS { } + | /* EMPTY */ { } + ; + /* * Productions that can be used in both a_expr and b_expr. * @@ -13284,6 +13469,13 @@ func_expr: func_application within_group_clause filter_clause over_clause n->over = $4; $$ = (Node *) n; } + | json_aggregate_func filter_clause over_clause + { + JsonAggCtor *n = (JsonAggCtor *) $1; + n->agg_filter = $2; + n->over = $3; + $$ = (Node *) $1; + } | func_expr_common_subexpr { $$ = $1; } ; @@ -13297,6 +13489,7 @@ func_expr: func_application within_group_clause filter_clause over_clause func_expr_windowless: func_application { $$ = $1; } | func_expr_common_subexpr { $$ = $1; } + | json_aggregate_func { $$ = $1; } ; /* @@ -13518,6 +13711,8 @@ func_expr_common_subexpr: n->location = @1; $$ = (Node *)n; } + | json_func_expr + { $$ = $1; } ; /* @@ -14204,6 +14399,729 @@ opt_asymmetric: ASYMMETRIC | /*EMPTY*/ ; +/* SQL/JSON support */ +json_func_expr: + json_value_func_expr + | json_query_expr + | json_exists_predicate + | json_value_constructor + ; + + +json_value_func_expr: + JSON_VALUE '(' + json_api_common_syntax + json_returning_clause_opt + json_value_on_behavior_clause_opt + ')' + { + JsonFuncExpr *n = makeNode(JsonFuncExpr); + n->op = IS_JSON_VALUE; + n->common = (JsonCommon *) $3; + if ($4) + { + n->output = (JsonOutput *) makeNode(JsonOutput); + n->output->typename = $4; + n->output->returning.format.location = @4; + n->output->returning.format.type = JS_FORMAT_DEFAULT; + n->output->returning.format.encoding = JS_ENC_DEFAULT; + } + else + n->output = NULL; + n->on_empty = $5.on_empty; + n->on_error = $5.on_error; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_api_common_syntax: + json_context_item ',' json_path_specification + json_as_path_name_clause_opt + json_passing_clause_opt + { + JsonCommon *n = makeNode(JsonCommon); + n->expr = (JsonValueExpr *) $1; + n->pathspec = $3; + n->pathname = $4; + n->passing = $5; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_context_item: + json_value_expr { $$ = $1; } + ; + +json_path_specification: + Sconst { $$ = $1; } + ; + +json_as_path_name_clause_opt: + AS json_table_path_name { $$ = $2; } + | /* EMPTY */ { $$ = NULL; } + ; + +json_table_path_name: + name { $$ = $1; } + ; + +json_passing_clause_opt: + PASSING json_arguments { $$ = $2; } + | /* EMPTY */ { $$ = NIL; } + ; + +json_arguments: + json_argument { $$ = list_make1($1); } + | json_arguments ',' json_argument { $$ = lappend($1, $3); } + ; + +json_argument: + json_value_expr AS ColLabel + { + JsonArgument *n = makeNode(JsonArgument); + n->val = (JsonValueExpr *) $1; + n->name = $3; + $$ = (Node *) n; + } + ; + +json_value_expr: + a_expr json_format_clause_opt + { + JsonValueExpr *n = makeNode(JsonValueExpr); + n->expr = (Expr *) $1; + n->format = $2; + $$ = (Node *) n; + } + ; + +json_format_clause_opt: + FORMAT json_representation + { + $$ = $2; + $$.location = @1; + } + | /* EMPTY */ + { + $$.type = JS_FORMAT_DEFAULT; + $$.encoding = JS_ENC_DEFAULT; + $$.location = -1; + } + ; + +json_representation: + JSON json_encoding_clause_opt + { + $$.type = JS_FORMAT_JSON; + $$.encoding = $2; + } + /* | implementation_defined_JSON_representation_option (BSON, AVRO etc) */ + ; + +json_encoding_clause_opt: + ENCODING json_encoding { $$ = $2; } + | /* EMPTY */ { $$ = JS_ENC_DEFAULT; } + ; + +json_encoding: + name { $$ = makeJsonEncoding($1); } + /* + | UTF8 { $$ = JS_ENC_UTF8; } + | UTF16 { $$ = JS_ENC_UTF16; } + | UTF32 { $$ = JS_ENC_UTF32; } + */ + ; + +json_returning_clause_opt: + RETURNING Typename { $$ = $2; } + | /* EMPTY */ { $$ = NULL; } + ; + +json_behavior_error: + ERROR_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_ERROR, NULL); } + ; + +json_behavior_null: + NULL_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_NULL, NULL); } + ; + +json_behavior_true: + TRUE_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_TRUE, NULL); } + ; + +json_behavior_false: + FALSE_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_FALSE, NULL); } + ; + +json_behavior_unknown: + UNKNOWN { $$ = makeJsonBehavior(JSON_BEHAVIOR_UNKNOWN, NULL); } + ; + +json_behavior_empty: + EMPTY_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_EMPTY_OBJECT, NULL); } + ; + +json_behavior_empty_array: + EMPTY_P ARRAY { $$ = makeJsonBehavior(JSON_BEHAVIOR_EMPTY_ARRAY, NULL); } + ; + +json_behavior_empty_object: + EMPTY_P OBJECT_P { $$ = makeJsonBehavior(JSON_BEHAVIOR_EMPTY_OBJECT, NULL); } + ; + +json_behavior_default: + DEFAULT a_expr { $$ = makeJsonBehavior(JSON_BEHAVIOR_DEFAULT, $2); } + ; + + +json_value_behavior: + json_behavior_null + | json_behavior_error + | json_behavior_default + ; + +json_value_on_behavior_clause_opt: + json_value_behavior ON EMPTY_P + { $$.on_empty = $1; $$.on_error = NULL; } + | json_value_behavior ON EMPTY_P json_value_behavior ON ERROR_P + { $$.on_empty = $1; $$.on_error = $4; } + | json_value_behavior ON ERROR_P + { $$.on_empty = NULL; $$.on_error = $1; } + | /* EMPTY */ + { $$.on_empty = NULL; $$.on_error = NULL; } + ; + +json_query_expr: + JSON_QUERY '(' + json_api_common_syntax + json_output_clause_opt + json_wrapper_clause_opt + json_quotes_clause_opt + json_query_on_behavior_clause_opt + ')' + { + JsonFuncExpr *n = makeNode(JsonFuncExpr); + n->op = IS_JSON_QUERY; + n->common = (JsonCommon *) $3; + n->output = (JsonOutput *) $4; + n->wrapper = $5; + if (n->wrapper != JSW_NONE && $6 != JS_QUOTES_UNSPEC) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used"), + parser_errposition(@6))); + n->omit_quotes = $6 == JS_QUOTES_OMIT; + n->on_empty = $7.on_empty; + n->on_error = $7.on_error; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_wrapper_clause_opt: + json_wrapper_behavior WRAPPER { $$ = $1; } + | /* EMPTY */ { $$ = 0; } + ; + +json_wrapper_behavior: + WITHOUT array_opt { $$ = JSW_NONE; } + | WITH json_conditional_or_unconditional_opt array_opt { $$ = $2; } + ; + +array_opt: + ARRAY { } + | /* EMPTY */ { } + ; + +json_conditional_or_unconditional_opt: + CONDITIONAL { $$ = JSW_CONDITIONAL; } + | UNCONDITIONAL { $$ = JSW_UNCONDITIONAL; } + | /* EMPTY */ { $$ = JSW_UNCONDITIONAL; } + ; + +json_quotes_clause_opt: + json_quotes_behavior QUOTES json_on_scalar_string_opt { $$ = $1; } + | /* EMPTY */ { $$ = JS_QUOTES_UNSPEC; } + ; + +json_quotes_behavior: + KEEP { $$ = JS_QUOTES_KEEP; } + | OMIT { $$ = JS_QUOTES_OMIT; } + ; + +json_on_scalar_string_opt: + ON SCALAR STRING { } + | /* EMPTY */ { } + ; + +json_query_behavior: + json_behavior_error + | json_behavior_null + | json_behavior_empty_array + | json_behavior_empty_object + ; + +json_query_on_behavior_clause_opt: + json_query_behavior ON EMPTY_P + { $$.on_empty = $1; $$.on_error = NULL; } + | json_query_behavior ON EMPTY_P json_query_behavior ON ERROR_P + { $$.on_empty = $1; $$.on_error = $4; } + | json_query_behavior ON ERROR_P + { $$.on_empty = NULL; $$.on_error = $1; } + | /* EMPTY */ + { $$.on_empty = NULL; $$.on_error = NULL; } + ; + +json_table: + JSON_TABLE '(' + json_api_common_syntax + json_table_columns_clause + json_table_plan_clause_opt + json_table_error_clause_opt + ')' + { + JsonTable *n = makeNode(JsonTable); + n->common = (JsonCommon *) $3; + n->columns = $4; + n->plan = (JsonTablePlan *) $5; + n->on_error = $6; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_columns_clause: + COLUMNS '(' json_table_column_definition_list ')' { $$ = $3; } + ; + +json_table_column_definition_list: + json_table_column_definition + { $$ = list_make1($1); } + | json_table_column_definition_list ',' json_table_column_definition + { $$ = lappend($1, $3); } + ; + +json_table_column_definition: + json_table_ordinality_column_definition %prec json_table_column + | json_table_regular_column_definition %prec json_table_column + | json_table_formatted_column_definition %prec json_table_column + | json_table_nested_columns + ; + +json_table_ordinality_column_definition: + ColId FOR ORDINALITY + { + JsonTableColumn *n = makeNode(JsonTableColumn); + n->coltype = JTC_FOR_ORDINALITY; + n->name = $1; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_regular_column_definition: + ColId Typename + json_table_column_path_specification_clause_opt + json_value_on_behavior_clause_opt + { + JsonTableColumn *n = makeNode(JsonTableColumn); + n->coltype = JTC_REGULAR; + n->name = $1; + n->typename = $2; + n->format.type = JS_FORMAT_DEFAULT; + n->format.encoding = JS_ENC_DEFAULT; + n->wrapper = JSW_NONE; + n->omit_quotes = false; + n->pathspec = $3; + n->on_empty = $4.on_empty; + n->on_error = $4.on_error; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_error_behavior: + json_behavior_error + | json_behavior_empty + ; + +json_table_error_clause_opt: + json_table_error_behavior ON ERROR_P { $$ = $1; } + | /* EMPTY */ { $$ = NULL; } + ; + +json_table_column_path_specification_clause_opt: + PATH json_path_specification { $$ = $2; } + | /* EMPTY */ %prec json_table_column { $$ = NULL; } + ; + +json_table_formatted_column_definition: + ColId Typename FORMAT json_representation + json_table_column_path_specification_clause_opt + json_wrapper_clause_opt + json_quotes_clause_opt + json_query_on_behavior_clause_opt + { + JsonTableColumn *n = makeNode(JsonTableColumn); + n->coltype = JTC_FORMATTED; + n->name = $1; + n->typename = $2; + n->format = $4; + n->pathspec = $5; + n->wrapper = $6; + if (n->wrapper != JSW_NONE && $7 != JS_QUOTES_UNSPEC) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used"), + parser_errposition(@7))); + n->omit_quotes = $7 == JS_QUOTES_OMIT; + n->on_empty = $8.on_empty; + n->on_error = $8.on_error; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_nested_columns: + NESTED path_opt json_path_specification + json_as_path_name_clause_opt + json_table_columns_clause + { + JsonTableColumn *n = makeNode(JsonTableColumn); + n->coltype = JTC_NESTED; + n->pathspec = $3; + n->pathname = $4; + n->columns = $5; + n->location = @1; + $$ = (Node *) n; + } + ; + +path_opt: + PATH { } + | /* EMPTY */ { } + ; + +json_table_plan_clause_opt: + json_table_specific_plan { $$ = $1; } + | json_table_default_plan { $$ = $1; } + | /* EMPTY */ { $$ = NULL; } + ; + +json_table_specific_plan: + PLAN '(' json_table_plan ')' { $$ = $3; } + ; + +json_table_plan: + json_table_plan_simple + | json_table_plan_parent_child + | json_table_plan_sibling + ; + +json_table_plan_simple: + json_table_path_name + { + JsonTablePlan *n = makeNode(JsonTablePlan); + n->plan_type = JSTP_SIMPLE; + n->pathname = $1; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_plan_parent_child: + json_table_plan_outer + | json_table_plan_inner + ; + +json_table_plan_outer: + json_table_plan_simple OUTER_P json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_OUTER, $1, $3, @1); } + ; + +json_table_plan_inner: + json_table_plan_simple INNER_P json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_INNER, $1, $3, @1); } + ; + +json_table_plan_sibling: + json_table_plan_union + | json_table_plan_cross + ; + +json_table_plan_union: + json_table_plan_primary UNION json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_UNION, $1, $3, @1); } + | json_table_plan_union UNION json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_UNION, $1, $3, @1); } + ; + +json_table_plan_cross: + json_table_plan_primary CROSS json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_CROSS, $1, $3, @1); } + | json_table_plan_cross CROSS json_table_plan_primary + { $$ = makeJsonTableJoinedPlan(JSTP_CROSS, $1, $3, @1); } + ; + +json_table_plan_primary: + json_table_plan_simple { $$ = $1; } + | '(' json_table_plan ')' + { + castNode(JsonTablePlan, $2)->location = @1; + $$ = $2; + } + ; + +json_table_default_plan: + PLAN DEFAULT '(' json_table_default_plan_choices ')' + { + JsonTablePlan *n = makeNode(JsonTablePlan); + n->plan_type = JSTP_DEFAULT; + n->join_type = $4; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_table_default_plan_choices: + json_table_default_plan_inner_outer { $$ = $1; } + | json_table_default_plan_inner_outer ',' + json_table_default_plan_union_cross { $$ = $1 | $3; } + | json_table_default_plan_union_cross { $$ = $1; } + | json_table_default_plan_union_cross ',' + json_table_default_plan_inner_outer { $$ = $1 | $3; } + ; + +json_table_default_plan_inner_outer: + INNER_P { $$ = JSTP_INNER; } + | OUTER_P { $$ = JSTP_OUTER; } + ; + +json_table_default_plan_union_cross: + UNION { $$ = JSTP_UNION; } + | CROSS { $$ = JSTP_CROSS; } + ; + +json_output_clause_opt: + RETURNING Typename json_format_clause_opt + { + JsonOutput *n = makeNode(JsonOutput); + n->typename = $2; + n->returning.format = $3; + $$ = (Node *) n; + } + | /* EMPTY */ { $$ = NULL; } + ; + +json_exists_predicate: + JSON_EXISTS '(' + json_api_common_syntax + json_exists_error_clause_opt + ')' + { + JsonFuncExpr *p = makeNode(JsonFuncExpr); + p->op = IS_JSON_EXISTS; + p->common = (JsonCommon *) $3; + p->on_error = $4; + p->location = @1; + $$ = (Node *) p; + } + ; + +json_exists_error_clause_opt: + json_exists_error_behavior ON ERROR_P { $$ = $1; } + | /* EMPTY */ { $$ = NULL; } + ; + +json_exists_error_behavior: + json_behavior_error + | json_behavior_true + | json_behavior_false + | json_behavior_unknown + ; + +json_value_constructor: + json_object_constructor + | json_array_constructor + ; + +json_object_constructor: + JSON_OBJECT '(' json_object_args ')' + { + $$ = $3; + } + ; + +json_object_args: + json_object_ctor_args_opt + | json_object_func_args + ; + +json_object_func_args: + func_arg_list + { + List *func = list_make1(makeString("json_object")); + $$ = (Node *) makeFuncCall(func, $1, @1); + } + ; + +json_object_ctor_args_opt: + json_object_constructor_args_opt json_output_clause_opt + { + JsonObjectCtor *n = (JsonObjectCtor *) $1; + n->output = (JsonOutput *) $2; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_object_constructor_args_opt: + json_name_and_value_list + json_object_constructor_null_clause_opt + json_key_uniqueness_constraint_opt + { + JsonObjectCtor *n = makeNode(JsonObjectCtor); + n->exprs = $1; + n->absent_on_null = $2; + n->unique = $3; + $$ = (Node *) n; + } + | /* EMPTY */ + { + JsonObjectCtor *n = makeNode(JsonObjectCtor); + n->exprs = NULL; + n->absent_on_null = FALSE; + n->unique = FALSE; + $$ = (Node *) n; + } + ; + +json_name_and_value_list: + json_name_and_value + { $$ = list_make1($1); } + | json_name_and_value_list ',' json_name_and_value + { $$ = lappend($1, $3); } + ; + +json_name_and_value: +/* TODO + KEY c_expr VALUE_P json_value_expr %prec POSTFIXOP + { $$ = makeJsonKeyValue($2, $4); } + | +*/ + c_expr VALUE_P json_value_expr + { $$ = makeJsonKeyValue($1, $3); } + | + a_expr ':' json_value_expr + { $$ = makeJsonKeyValue($1, $3); } + ; + +json_object_constructor_null_clause_opt: + NULL_P ON NULL_P { $$ = FALSE; } + | ABSENT ON NULL_P { $$ = TRUE; } + | /* EMPTY */ { $$ = FALSE; } + ; + +json_array_constructor: + JSON_ARRAY '(' + json_value_expr_list + json_array_constructor_null_clause_opt + json_output_clause_opt + ')' + { + JsonArrayCtor *n = makeNode(JsonArrayCtor); + n->exprs = $3; + n->absent_on_null = $4; + n->output = (JsonOutput *) $5; + n->location = @1; + $$ = (Node *) n; + } + | JSON_ARRAY '(' + select_no_parens + /* json_format_clause_opt */ + /* json_array_constructor_null_clause_opt */ + json_output_clause_opt + ')' + { + JsonArrayQueryCtor *n = makeNode(JsonArrayQueryCtor); + n->query = $3; + /* n->format = $4; */ + n->absent_on_null = true /* $5 */; + n->output = (JsonOutput *) $4; + n->location = @1; + $$ = (Node *) n; + } + | JSON_ARRAY '(' + json_output_clause_opt + ')' + { + JsonArrayCtor *n = makeNode(JsonArrayCtor); + n->exprs = NIL; + n->output = (JsonOutput *) $3; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_value_expr_list: + json_value_expr { $$ = list_make1($1); } + | json_value_expr_list ',' json_value_expr { $$ = lappend($1, $3);} + ; + +json_array_constructor_null_clause_opt: + NULL_P ON NULL_P { $$ = FALSE; } + | ABSENT ON NULL_P { $$ = TRUE; } + | /* EMPTY */ { $$ = TRUE; } + ; + +json_aggregate_func: + json_object_aggregate_constructor + | json_array_aggregate_constructor + ; + +json_object_aggregate_constructor: + JSON_OBJECTAGG '(' + json_name_and_value + json_object_constructor_null_clause_opt + json_key_uniqueness_constraint_opt + json_output_clause_opt + ')' + { + JsonObjectAgg *n = makeNode(JsonObjectAgg); + n->arg = (JsonKeyValue *) $3; + n->absent_on_null = $4; + n->unique = $5; + n->ctor.output = (JsonOutput *) $6; + n->ctor.agg_order = NULL; + n->ctor.location = @1; + $$ = (Node *) n; + } + ; + +json_array_aggregate_constructor: + JSON_ARRAYAGG '(' + json_value_expr + json_array_aggregate_order_by_clause_opt + json_array_constructor_null_clause_opt + json_output_clause_opt + ')' + { + JsonArrayAgg *n = makeNode(JsonArrayAgg); + n->arg = (JsonValueExpr *) $3; + n->ctor.agg_order = $4; + n->absent_on_null = $5; + n->ctor.output = (JsonOutput *) $6; + n->ctor.location = @1; + $$ = (Node *) n; + } + ; + +json_array_aggregate_order_by_clause_opt: + ORDER BY sortby_list { $$ = $3; } + | /* EMPTY */ { $$ = NIL; } + ; /***************************************************************************** * @@ -14597,6 +15515,7 @@ ColLabel: IDENT { $$ = $1; } */ unreserved_keyword: ABORT_P + | ABSENT | ABSOLUTE_P | ACCESS | ACTION @@ -14632,6 +15551,7 @@ unreserved_keyword: | COMMENTS | COMMIT | COMMITTED + | CONDITIONAL | CONFIGURATION | CONFLICT | CONNECTION @@ -14667,10 +15587,12 @@ unreserved_keyword: | DOUBLE_P | DROP | EACH + | EMPTY_P | ENABLE_P | ENCODING | ENCRYPTED | ENUM_P + | ERROR_P | ESCAPE | EVENT | EXCLUDE @@ -14714,7 +15636,10 @@ unreserved_keyword: | INSTEAD | INVOKER | ISOLATION + | JSON + | KEEP | KEY + | KEYS | LABEL | LANGUAGE | LARGE_P @@ -14740,6 +15665,7 @@ unreserved_keyword: | MOVE | NAME_P | NAMES + | NESTED | NEW | NEXT | NO @@ -14752,6 +15678,7 @@ unreserved_keyword: | OFF | OIDS | OLD + | OMIT | OPERATOR | OPTION | OPTIONS @@ -14766,6 +15693,8 @@ unreserved_keyword: | PARTITION | PASSING | PASSWORD + | PATH + | PLAN | PLANS | POLICY | PRECEDING @@ -14779,6 +15708,7 @@ unreserved_keyword: | PROGRAM | PUBLICATION | QUOTE + | QUOTES | RANGE | READ | REASSIGN @@ -14805,6 +15735,7 @@ unreserved_keyword: | ROWS | RULE | SAVEPOINT + | SCALAR | SCHEMA | SCHEMAS | SCROLL @@ -14852,6 +15783,7 @@ unreserved_keyword: | TYPES_P | UNBOUNDED | UNCOMMITTED + | UNCONDITIONAL | UNENCRYPTED | UNKNOWN | UNLISTEN @@ -14909,6 +15841,14 @@ col_name_keyword: | INT_P | INTEGER | INTERVAL + | JSON_ARRAY + | JSON_ARRAYAGG + | JSON_EXISTS + | JSON_OBJECT + | JSON_OBJECTAGG + | JSON_QUERY + | JSON_TABLE + | JSON_VALUE | LEAST | NATIONAL | NCHAR @@ -14923,6 +15863,7 @@ col_name_keyword: | ROW | SETOF | SMALLINT + | STRING | SUBSTRING | TIME | TIMESTAMP @@ -14960,6 +15901,7 @@ type_func_name_keyword: | CONCURRENTLY | CROSS | CURRENT_SCHEMA + | FORMAT | FREEZE | FULL | ILIKE diff --git a/src/backend/parser/parse_clause.c b/src/backend/parser/parse_clause.c index af99e65aa7..d9b122c307 100644 --- a/src/backend/parser/parse_clause.c +++ b/src/backend/parser/parse_clause.c @@ -43,10 +43,21 @@ #include "parser/parse_target.h" #include "parser/parse_type.h" #include "rewrite/rewriteManip.h" +#include "utils/builtins.h" #include "utils/guc.h" +#include "utils/json.h" #include "utils/lsyscache.h" #include "utils/rel.h" +/* Context for JSON_TABLE transformation */ +typedef struct JsonTableContext +{ + JsonTable *table; /* untransformed node */ + TableFunc *tablefunc; /* transformed node */ + List *pathNames; /* list of all path and columns names */ + int pathNameId; /* path name id counter */ + Oid contextItemTypid; /* type oid of context item (json/jsonb) */ +} JsonTableContext; /* Convenience macro for the most common makeNamespaceItem() case */ #define makeDefaultNSItem(rte) makeNamespaceItem(rte, true, true, false, true) @@ -96,6 +107,10 @@ static List *addTargetToGroupList(ParseState *pstate, TargetEntry *tle, static WindowClause *findWindowClause(List *wclist, const char *name); static Node *transformFrameOffset(ParseState *pstate, int frameOptions, Node *clause); +static JsonTableParentNode * transformJsonTableColumns(ParseState *pstate, + JsonTableContext *cxt, JsonTablePlan *plan, + List *columns, char *pathSpec, char **pathName, + int location); /* @@ -753,6 +768,8 @@ transformRangeTableFunc(ParseState *pstate, RangeTableFunc *rtf) Assert(!pstate->p_lateral_active); pstate->p_lateral_active = true; + tf->functype = TFT_XMLTABLE; + /* Transform and apply typecast to the row-generating expression ... */ Assert(rtf->rowexpr != NULL); tf->rowexpr = coerce_to_specific_type(pstate, @@ -1072,6 +1089,620 @@ getRTEForSpecialRelationTypes(ParseState *pstate, RangeVar *rv) return rte; } +/* + * Transform JSON_TABLE column + * - regular column into JSON_VALUE() + * - formatted column into JSON_QUERY() + */ +static Node * +transformJsonTableColumn(JsonTableColumn *jtc, Node *contextItemExpr, + List *passingArgs, bool errorOnError) +{ + JsonFuncExpr *jfexpr = makeNode(JsonFuncExpr); + JsonValueExpr *jvexpr = makeNode(JsonValueExpr); + JsonCommon *common = makeNode(JsonCommon); + JsonOutput *output = makeNode(JsonOutput); + + jfexpr->op = jtc->coltype == JTC_REGULAR ? IS_JSON_VALUE : IS_JSON_QUERY; + jfexpr->common = common; + jfexpr->output = output; + jfexpr->on_empty = jtc->on_empty; + jfexpr->on_error = jtc->on_error; + if (!jfexpr->on_error && errorOnError) + jfexpr->on_error = makeJsonBehavior(JSON_BEHAVIOR_ERROR, NULL); + jfexpr->omit_quotes = jtc->omit_quotes; + jfexpr->wrapper = jtc->wrapper; + jfexpr->location = jtc->location; + + output->typename = jtc->typename; + output->returning.format = jtc->format; + + common->pathname = NULL; + common->expr = jvexpr; + common->passing = passingArgs; + + if (jtc->pathspec) + common->pathspec = jtc->pathspec; + else + { + /* Construct default path as '$."column_name"' */ + StringInfoData path; + + initStringInfo(&path); + + appendStringInfoString(&path, "$."); + escape_json(&path, jtc->name); + + common->pathspec = path.data; + } + + jvexpr->expr = (Expr *) contextItemExpr; + jvexpr->format.type = JS_FORMAT_DEFAULT; + jvexpr->format.encoding = JS_ENC_DEFAULT; + + return (Node *) jfexpr; +} + +static bool +isJsonTablePathNameDuplicate(JsonTableContext *cxt, const char *pathname) +{ + ListCell *lc; + + foreach(lc, cxt->pathNames) + { + if (!strcmp(pathname, (const char *) lfirst(lc))) + return true; + } + + return false; +} + +/* Recursively register column name in the path name list. */ +static void +registerJsonTableColumn(JsonTableContext *cxt, char *colname) +{ + if (isJsonTablePathNameDuplicate(cxt, colname)) + ereport(ERROR, + (errcode(ERRCODE_DUPLICATE_ALIAS), + errmsg("duplicate JSON_TABLE column name: %s", colname), + errhint("JSON_TABLE path names and column names shall be " + "distinct from one another"))); + + cxt->pathNames = lappend(cxt->pathNames, colname); +} + +/* Recursively register all nested column names in the path name list. */ +static void +registerAllJsonTableColumns(JsonTableContext *cxt, List *columns) +{ + ListCell *lc; + + foreach(lc, columns) + { + JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc)); + + if (jtc->coltype == JTC_NESTED) + { + if (jtc->pathname) + registerJsonTableColumn(cxt, jtc->pathname); + + registerAllJsonTableColumns(cxt, jtc->columns); + } + else + { + registerJsonTableColumn(cxt, jtc->name); + } + } +} + +/* Generate a new unique JSON_TABLE path name. */ +static char * +generateJsonTablePathName(JsonTableContext *cxt) +{ + char namebuf[32]; + char *name = namebuf; + + do + { + snprintf(namebuf, sizeof(namebuf), "json_table_path_%d", + ++cxt->pathNameId); + } while (isJsonTablePathNameDuplicate(cxt, name)); + + name = pstrdup(name); + cxt->pathNames = lappend(cxt->pathNames, name); + + return name; +} + +/* Collect sibling path names from plan to the specified list. */ +static void +collectSiblingPathsInJsonTablePlan(JsonTablePlan *plan, List **paths) +{ + if (plan->plan_type == JSTP_SIMPLE) + *paths = lappend(*paths, plan->pathname); + else if (plan->plan_type == JSTP_JOINED) + { + if (plan->join_type == JSTP_INNER || + plan->join_type == JSTP_OUTER) + { + Assert(plan->plan1->plan_type == JSTP_SIMPLE); + *paths = lappend(*paths, plan->plan1->pathname); + } + else if (plan->join_type == JSTP_CROSS || + plan->join_type == JSTP_UNION) + { + collectSiblingPathsInJsonTablePlan(plan->plan1, paths); + collectSiblingPathsInJsonTablePlan(plan->plan2, paths); + } + else + elog(ERROR, "invalid JSON_TABLE jsoin type %d", + plan->join_type); + } +} + +/* + * Validate child JSON_TABLE plan by checking that: + * - all nested columns have path names specified + * - all nested columns have corresponding node in the sibling plan + * - plan does not contain duplicate or extra nodes + */ +static void +validateJsonTableChildPlan(ParseState *pstate, JsonTablePlan *plan, + List *columns) +{ + ListCell *lc1; + List *siblings = NIL; + int nchilds = 0; + + if (plan) + collectSiblingPathsInJsonTablePlan(plan, &siblings); + + foreach(lc1, columns) + { + JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc1)); + + if (jtc->coltype == JTC_NESTED) + { + ListCell *lc2; + bool found = false; + + if (!jtc->pathname) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("nested JSON_TABLE columns shall contain " + "explicit AS pathname specification if " + "explicit PLAN clause is used"), + parser_errposition(pstate, jtc->location))); + + /* find nested path name in the list of sibling path names */ + foreach(lc2, siblings) + { + if ((found = !strcmp(jtc->pathname, lfirst(lc2)))) + break; + } + + if (!found) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE plan"), + errdetail("plan node for nested path %s " + "was not found in plan", jtc->pathname), + parser_errposition(pstate, jtc->location))); + + nchilds++; + } + } + + if (list_length(siblings) > nchilds) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE plan"), + errdetail("plan node contains some extra or " + "duplicate sibling nodes"), + parser_errposition(pstate, plan ? plan->location : -1))); +} + +static JsonTableColumn * +findNestedJsonTableColumn(List *columns, const char *pathname) +{ + ListCell *lc; + + foreach(lc, columns) + { + JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc)); + + if (jtc->coltype == JTC_NESTED && + jtc->pathname && + !strcmp(jtc->pathname, pathname)) + return jtc; + } + + return NULL; +} + +static Node * +transformNestedJsonTableColumn(ParseState *pstate, JsonTableContext *cxt, + JsonTableColumn *jtc, JsonTablePlan *plan) +{ + JsonTableParentNode *node; + char *pathname = jtc->pathname; + + node = transformJsonTableColumns(pstate, cxt, plan, + jtc->columns, jtc->pathspec, + &pathname, jtc->location); + node->name = pstrdup(pathname); + + return (Node *) node; +} + +static Node * +makeJsonTableSiblingJoin(bool cross, Node *lnode, Node *rnode) +{ + JsonTableSiblingNode *join = makeNode(JsonTableSiblingNode); + + join->larg = lnode; + join->rarg = rnode; + join->cross = cross; + + return (Node *) join; +} + +/* + * Recursively transform child JSON_TABLE plan. + * + * Default plan is transformed into a cross/union join of its nested columns. + * Simple and outer/inner plans are transformed into a JsonTableParentNode by + * finding and transforming corresponding nested column. + * Sibling plans are recursively transformed into a JsonTableSiblingNode. + */ +static Node * +transformJsonTableChildPlan(ParseState *pstate, JsonTableContext *cxt, + JsonTablePlan *plan, List *columns) +{ + JsonTableColumn *jtc = NULL; + + if (!plan || plan->plan_type == JSTP_DEFAULT) + { + /* unspecified or default plan */ + Node *res = NULL; + ListCell *lc; + bool cross = plan && (plan->join_type & JSTP_CROSS); + + /* transform all nested columns into cross/union join */ + foreach(lc, columns) + { + JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc)); + Node *node; + + if (jtc->coltype != JTC_NESTED) + continue; + + node = transformNestedJsonTableColumn(pstate, cxt, jtc, plan); + + /* join transformed node with previous sibling nodes */ + res = res ? makeJsonTableSiblingJoin(cross, res, node) : node; + } + + return res; + } + else if (plan->plan_type == JSTP_SIMPLE) + { + jtc = findNestedJsonTableColumn(columns, plan->pathname); + } + else if (plan->plan_type == JSTP_JOINED) + { + if (plan->join_type == JSTP_INNER || + plan->join_type == JSTP_OUTER) + { + Assert(plan->plan1->plan_type == JSTP_SIMPLE); + jtc = findNestedJsonTableColumn(columns, plan->plan1->pathname); + } + else + { + Node *node1 = + transformJsonTableChildPlan(pstate, cxt, plan->plan1, columns); + Node *node2 = + transformJsonTableChildPlan(pstate, cxt, plan->plan2, columns); + + return makeJsonTableSiblingJoin(plan->join_type == JSTP_CROSS, + node1, node2); + } + } + else + elog(ERROR, "invalid JSON_TABLE plan type %d", plan->plan_type); + + if (!jtc) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE plan"), + errdetail("path name was %s not found in nested columns list", + plan->pathname), + parser_errposition(pstate, plan->location))); + + return transformNestedJsonTableColumn(pstate, cxt, jtc, plan); +} + +/* Append transformed non-nested JSON_TABLE columns to the TableFunc node */ +static void +appendJsonTableColumns(ParseState *pstate, JsonTableContext *cxt, List *columns) +{ + JsonTable *jt = cxt->table; + TableFunc *tf = cxt->tablefunc; + bool errorOnError = jt->on_error && + jt->on_error->btype == JSON_BEHAVIOR_ERROR; + ListCell *col; + + foreach(col, columns) + { + JsonTableColumn *rawc = castNode(JsonTableColumn, lfirst(col)); + Oid typid; + int32 typmod; + Node *colexpr; + + if (rawc->name) + { + /* make sure column names are unique */ + ListCell *colname; + + foreach(colname, tf->colnames) + if (!strcmp((const char *) colname, rawc->name)) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("column name \"%s\" is not unique", + rawc->name), + parser_errposition(pstate, rawc->location))); + + tf->colnames = lappend(tf->colnames, + makeString(pstrdup(rawc->name))); + } + + /* + * Determine the type and typmod for the new column. FOR + * ORDINALITY columns are INTEGER by standard; the others are + * user-specified. + */ + switch (rawc->coltype) + { + case JTC_FOR_ORDINALITY: + colexpr = NULL; + typid = INT4OID; + typmod = -1; + break; + + case JTC_REGULAR: + case JTC_FORMATTED: + { + Node *je; + CaseTestExpr *param = makeNode(CaseTestExpr); + + param->collation = InvalidOid; + param->typeId = cxt->contextItemTypid; + param->typeMod = -1; + + je = transformJsonTableColumn(rawc, (Node *) param, + NIL, errorOnError); + + colexpr = transformExpr(pstate, je, EXPR_KIND_FROM_FUNCTION); + assign_expr_collations(pstate, colexpr); + + typid = exprType(colexpr); + typmod = exprTypmod(colexpr); + break; + } + + case JTC_NESTED: + continue; + + default: + elog(ERROR, "unknown JSON_TABLE column type: %d", rawc->coltype); + break; + } + + tf->coltypes = lappend_oid(tf->coltypes, typid); + tf->coltypmods = lappend_int(tf->coltypmods, typmod); + tf->colcollations = lappend_oid(tf->colcollations, + type_is_collatable(typid) + ? DEFAULT_COLLATION_OID + : InvalidOid); + tf->colvalexprs = lappend(tf->colvalexprs, colexpr); + } +} + +/* + * Create transformed JSON_TABLE parent plan node by appending all non-nested + * columns to the TableFunc node and remembering their indices in the + * colvalexprs list. + */ +static JsonTableParentNode * +makeParentJsonTableNode(ParseState *pstate, JsonTableContext *cxt, + char *pathSpec, List *columns) +{ + JsonTableParentNode *node = makeNode(JsonTableParentNode); + + node->path = makeConst(JSONPATHOID, -1, InvalidOid, -1, + DirectFunctionCall1(jsonpath_in, + CStringGetDatum(pathSpec)), + false, false); + + /* save start of column range */ + node->colMin = list_length(cxt->tablefunc->colvalexprs); + + appendJsonTableColumns(pstate, cxt, columns); + + /* save end of column range */ + node->colMax = list_length(cxt->tablefunc->colvalexprs) - 1; + + node->errorOnError = + cxt->table->on_error && + cxt->table->on_error->btype == JSON_BEHAVIOR_ERROR; + + return node; +} + +static JsonTableParentNode * +transformJsonTableColumns(ParseState *pstate, JsonTableContext *cxt, + JsonTablePlan *plan, List *columns, + char *pathSpec, char **pathName, int location) +{ + JsonTableParentNode *node; + JsonTablePlan *childPlan; + bool defaultPlan = !plan || plan->plan_type == JSTP_DEFAULT; + + if (!*pathName) + { + if (cxt->table->plan) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE expression"), + errdetail("JSON_TABLE columns shall contain " + "explicit AS pathname specification if " + "explicit PLAN clause is used"), + parser_errposition(pstate, location))); + + *pathName = generateJsonTablePathName(cxt); + } + + if (defaultPlan) + childPlan = plan; + else + { + /* validate parent and child plans */ + JsonTablePlan *parentPlan = + plan->plan_type == JSTP_JOINED ? plan->plan1 : plan; + + if (strcmp(parentPlan->pathname, *pathName)) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE plan"), + errdetail("path name mismatch: expected %s but %s is given", + *pathName, parentPlan->pathname), + parser_errposition(pstate, plan->location))); + + if (plan->plan_type == JSTP_JOINED) + { + if (plan->join_type != JSTP_INNER && + plan->join_type != JSTP_OUTER) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("invalid JSON_TABLE plan"), + errdetail("expected INNER or OUTER JSON_TABLE plan node"), + parser_errposition(pstate, plan->location))); + + childPlan = plan->plan2; + } + else + childPlan = NULL; + + validateJsonTableChildPlan(pstate, childPlan, columns); + } + + /* transform only non-nested columns */ + node = makeParentJsonTableNode(pstate, cxt, pathSpec, columns); + node->name = pstrdup(*pathName); + + if (childPlan || defaultPlan) + { + /* transform recursively nested columns */ + node->child = transformJsonTableChildPlan(pstate, cxt, childPlan, + columns); + if (node->child) + node->outerJoin = !plan || (plan->join_type & JSTP_OUTER); + /* else: default plan case, no children found */ + } + + return node; +} + +/* + * transformJsonTable - + * Transform a raw JsonTable into TableFunc. + * + * Transform the document-generating expression, the row-generating expression, + * the column-generating expressions, and the default value expressions. + */ +static RangeTblEntry * +transformJsonTable(ParseState *pstate, JsonTable *jt) +{ + JsonTableContext cxt; + TableFunc *tf = makeNode(TableFunc); + JsonFuncExpr *jfe = makeNode(JsonFuncExpr); + JsonCommon *jscommon; + JsonTablePlan *plan = jt->plan; + char *rootPathName = jt->common->pathname; + bool is_lateral; + + cxt.table = jt; + cxt.tablefunc = tf; + cxt.pathNames = NIL; + cxt.pathNameId = 0; + + if (rootPathName) + registerJsonTableColumn(&cxt, rootPathName); + + registerAllJsonTableColumns(&cxt, jt->columns); + + if (plan && plan->plan_type != JSTP_DEFAULT && !rootPathName) + { + /* Assign root path name and create corresponding plan node */ + JsonTablePlan *rootNode = makeNode(JsonTablePlan); + JsonTablePlan *rootPlan = (JsonTablePlan *) + makeJsonTableJoinedPlan(JSTP_OUTER, (Node *) rootNode, + (Node *) plan, jt->location); + + rootPathName = generateJsonTablePathName(&cxt); + + rootNode->plan_type = JSTP_SIMPLE; + rootNode->pathname = rootPathName; + + plan = rootPlan; + } + + jscommon = copyObject(jt->common); + jscommon->pathspec = pstrdup("$"); + + jfe->op = IS_JSON_TABLE; + jfe->common = jscommon; + jfe->on_error = jt->on_error; + jfe->location = jt->common->location; + + /* + * We make lateral_only names of this level visible, whether or not the + * RangeTableFunc is explicitly marked LATERAL. This is needed for SQL + * spec compliance and seems useful on convenience grounds for all + * functions in FROM. + * + * (LATERAL can't nest within a single pstate level, so we don't need + * save/restore logic here.) + */ + Assert(!pstate->p_lateral_active); + pstate->p_lateral_active = true; + + tf->functype = TFT_JSON_TABLE; + tf->docexpr = transformExpr(pstate, (Node *) jfe, EXPR_KIND_FROM_FUNCTION); + + cxt.contextItemTypid = exprType(tf->docexpr); + + tf->plan = (Node *) transformJsonTableColumns(pstate, &cxt, plan, + jt->columns, + jt->common->pathspec, + &rootPathName, + jt->common->location); + + tf->ordinalitycol = -1; /* undefine ordinality column number */ + tf->location = jt->location; + + pstate->p_lateral_active = false; + + /* + * Mark the RTE as LATERAL if the user said LATERAL explicitly, or if + * there are any lateral cross-references in it. + */ + is_lateral = jt->lateral || contain_vars_of_level((Node *) tf, 0); + + return addRangeTableEntryForTableFunc(pstate, + tf, jt->alias, is_lateral, true); +} + /* * transformFromClauseItem - * Transform a FROM-clause item, adding any required entries to the @@ -1204,6 +1835,31 @@ transformFromClauseItem(ParseState *pstate, Node *n, rte->tablesample = transformRangeTableSample(pstate, rts); return (Node *) rtr; } + else if (IsA(n, JsonTable)) + { + /* JsonTable is transformed into RangeSubselect */ + /* + JsonTable *jt = castNode(JsonTable, n); + RangeSubselect *subselect = transformJsonTable(pstate, jt); + + return transformFromClauseItem(pstate, (Node *) subselect, + top_rte, top_rti, namespace); + */ + RangeTblRef *rtr; + RangeTblEntry *rte; + int rtindex; + + rte = transformJsonTable(pstate, (JsonTable *) n); + /* assume new rte is at end */ + rtindex = list_length(pstate->p_rtable); + Assert(rte == rt_fetch(rtindex, pstate->p_rtable)); + *top_rte = rte; + *top_rti = rtindex; + *namespace = list_make1(makeDefaultNSItem(rte)); + rtr = makeNode(RangeTblRef); + rtr->rtindex = rtindex; + return (Node *) rtr; + } else if (IsA(n, JoinExpr)) { /* A newfangled join expression */ diff --git a/src/backend/parser/parse_expr.c b/src/backend/parser/parse_expr.c index 1aaa5244e6..848e525b44 100644 --- a/src/backend/parser/parse_expr.c +++ b/src/backend/parser/parse_expr.c @@ -15,6 +15,8 @@ #include "postgres.h" +#include "catalog/pg_aggregate.h" +#include "catalog/pg_proc.h" #include "catalog/pg_type.h" #include "commands/dbcommands.h" #include "miscadmin.h" @@ -35,6 +37,7 @@ #include "parser/parse_agg.h" #include "utils/builtins.h" #include "utils/date.h" +#include "utils/fmgroids.h" #include "utils/lsyscache.h" #include "utils/timestamp.h" #include "utils/xml.h" @@ -121,6 +124,15 @@ static Node *transformWholeRowRef(ParseState *pstate, RangeTblEntry *rte, static Node *transformIndirection(ParseState *pstate, A_Indirection *ind); static Node *transformTypeCast(ParseState *pstate, TypeCast *tc); static Node *transformCollateClause(ParseState *pstate, CollateClause *c); +static Node *transformJsonObjectCtor(ParseState *pstate, JsonObjectCtor *ctor); +static Node *transformJsonArrayCtor(ParseState *pstate, JsonArrayCtor *ctor); +static Node *transformJsonArrayQueryCtor(ParseState *pstate, + JsonArrayQueryCtor *ctor); +static Node *transformJsonObjectAgg(ParseState *pstate, JsonObjectAgg *agg); +static Node *transformJsonArrayAgg(ParseState *pstate, JsonArrayAgg *agg); +static Node *transformJsonIsPredicate(ParseState *pstate, JsonIsPredicate *p); +static Node *transformJsonFuncExpr(ParseState *pstate, JsonFuncExpr *p); +static Node *transformJsonValueExpr(ParseState *pstate, JsonValueExpr *jve); static Node *make_row_comparison_op(ParseState *pstate, List *opname, List *largs, List *rargs, int location); static Node *make_row_distinct_op(ParseState *pstate, List *opname, @@ -369,6 +381,38 @@ transformExprRecurse(ParseState *pstate, Node *expr) break; } + case T_JsonObjectCtor: + result = transformJsonObjectCtor(pstate, (JsonObjectCtor *) expr); + break; + + case T_JsonArrayCtor: + result = transformJsonArrayCtor(pstate, (JsonArrayCtor *) expr); + break; + + case T_JsonArrayQueryCtor: + result = transformJsonArrayQueryCtor(pstate, (JsonArrayQueryCtor *) expr); + break; + + case T_JsonObjectAgg: + result = transformJsonObjectAgg(pstate, (JsonObjectAgg *) expr); + break; + + case T_JsonArrayAgg: + result = transformJsonArrayAgg(pstate, (JsonArrayAgg *) expr); + break; + + case T_JsonIsPredicate: + result = transformJsonIsPredicate(pstate, (JsonIsPredicate *) expr); + break; + + case T_JsonFuncExpr: + result = transformJsonFuncExpr(pstate, (JsonFuncExpr *) expr); + break; + + case T_JsonValueExpr: + result = transformJsonValueExpr(pstate, (JsonValueExpr *) expr); + break; + default: /* should not reach here */ elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); @@ -3472,3 +3516,1094 @@ ParseExprKindName(ParseExprKind exprKind) } return "unrecognized expression kind"; } + +/* + * Make string Const node from JSON encoding name. + * + * UTF8 is default encoding. + */ +static Const * +getJsonEncodingConst(JsonFormat *format) +{ + JsonEncoding encoding; + const char *enc; + Name encname = palloc(sizeof(NameData)); + + if (!format || + format->type == JS_FORMAT_DEFAULT || + format->encoding == JS_ENC_DEFAULT) + encoding = JS_ENC_UTF8; + else + encoding = format->encoding; + + switch (encoding) + { + case JS_ENC_UTF16: + enc = "UTF16"; + break; + case JS_ENC_UTF32: + enc = "UTF32"; + break; + case JS_ENC_UTF8: + default: + enc = "UTF8"; + break; + } + + namestrcpy(encname, enc); + + return makeConst(NAMEOID, -1, InvalidOid, NAMEDATALEN, + NameGetDatum(encname), false, false); +} + +/* + * Make bytea => text conversion using specified JSON format encoding. + */ +static Node * +makeJsonByteaToTextConversion(Node *expr, JsonFormat *format, int location) +{ + Const *encoding = getJsonEncodingConst(format); + FuncExpr *fexpr = makeFuncExpr(F_PG_CONVERT_FROM, TEXTOID, + list_make2(expr, encoding), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + + fexpr->location = location; + + return (Node *) fexpr; +} + +static Node * +makeCaseTestExpr(Node *expr) +{ + CaseTestExpr *placeholder = makeNode(CaseTestExpr); + + placeholder->typeId = exprType(expr); + placeholder->typeMod = exprTypmod(expr); + placeholder->collation = exprCollation(expr); + + return (Node *) placeholder; +} + +/* + * Transform JSON value expression using specified input JSON format or + * default format otherwise. + */ +static Node * +transformJsonValueExprExt(ParseState *pstate, JsonValueExpr *ve, + JsonFormatType default_format, bool isarg, + Node **rawexpr) +{ + Node *expr = transformExprRecurse(pstate, (Node *) ve->expr); + JsonFormatType format; + Oid exprtype; + int location; + char typcategory; + bool typispreferred; + + if (exprType(expr) == UNKNOWNOID) + expr = coerce_to_specific_type(pstate, expr, TEXTOID, "JSON_VALUE_EXPR"); + + exprtype = exprType(expr); + location = exprLocation(expr); + + get_type_category_preferred(exprtype, &typcategory, &typispreferred); + + if (rawexpr) + { + /* + * Save a raw context item expression if it is needed for the isolation + * of error handling in the formatting stage. + */ + *rawexpr = expr; + expr = makeCaseTestExpr(expr); + } + + if (ve->format.type != JS_FORMAT_DEFAULT) + { + if (ve->format.encoding != JS_ENC_DEFAULT && exprtype != BYTEAOID) + ereport(ERROR, + (errcode(ERRCODE_DATATYPE_MISMATCH), + errmsg("JSON ENCODING clause is only allowed for bytea input type"), + parser_errposition(pstate, ve->format.location))); + + if (exprtype == JSONOID || exprtype == JSONBOID) + { + format = JS_FORMAT_DEFAULT; /* do not format json[b] types */ + ereport(WARNING, + (errmsg("FORMAT JSON has no effect for json and jsonb types"))); + } + else + format = ve->format.type; + } + else if (isarg) + { + /* Pass SQL/JSON item types directly without conversion to json[b]. */ + switch (exprtype) + { + case TEXTOID: + case NUMERICOID: + case BOOLOID: + case INT2OID: + case INT4OID: + case INT8OID: + case FLOAT4OID: + case FLOAT8OID: + case DATEOID: + case TIMEOID: + case TIMETZOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + return expr; + + default: + if (typcategory == TYPCATEGORY_STRING) + return coerce_to_specific_type(pstate, expr, TEXTOID, + "JSON_VALUE_EXPR"); + /* else convert argument to json[b] type */ + break; + } + + format = default_format; + } + else if (exprtype == JSONOID || exprtype == JSONBOID) + format = JS_FORMAT_DEFAULT; /* do not format json[b] types */ + else + format = default_format; + + if (format != JS_FORMAT_DEFAULT) + { + Oid targettype = format == JS_FORMAT_JSONB ? JSONBOID : JSONOID; + Node *coerced; + FuncExpr *fexpr; + + if (!isarg && exprtype != BYTEAOID && typcategory != TYPCATEGORY_STRING) + ereport(ERROR, + (errcode(ERRCODE_DATATYPE_MISMATCH), + errmsg(ve->format.type == JS_FORMAT_DEFAULT ? + "cannot use non-string types with implicit FORMAT JSON clause" : + "cannot use non-string types with explicit FORMAT JSON clause"), + parser_errposition(pstate, ve->format.location >= 0 ? + ve->format.location : location))); + + /* Convert encoded JSON text from bytea. */ + if (format == JS_FORMAT_JSON && exprtype == BYTEAOID) + { + expr = makeJsonByteaToTextConversion(expr, &ve->format, location); + exprtype = TEXTOID; + } + + /* Try to coerce to the target type. */ + coerced = coerce_to_target_type(pstate, expr, exprtype, + targettype, -1, + COERCION_EXPLICIT, + COERCE_EXPLICIT_CAST, + location); + + if (coerced) + return coerced; + + /* If coercion failed, use to_json()/to_jsonb() functions. */ + fexpr = makeFuncExpr(targettype == JSONOID ? F_TO_JSON : F_TO_JSONB, + targettype, list_make1(expr), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + fexpr->location = location; + + expr = (Node *) fexpr; + } + + return expr; +} + +/* + * Transform JSON value expression using FORMAT JSON by default. + */ +static Node * +transformJsonValueExpr(ParseState *pstate, JsonValueExpr *jve) +{ + return transformJsonValueExprExt(pstate, jve, JS_FORMAT_JSON, false, NULL); +} + +/* + * Transform JSON value expression using unspecified format by default. + */ +static Node * +transformJsonValueExprDefault(ParseState *pstate, JsonValueExpr *jve) +{ + return transformJsonValueExprExt(pstate, jve, JS_FORMAT_DEFAULT, false, NULL); +} + +/* + * Checks specified output format for its applicability to the target type. + */ +static void +checkJsonOutputFormat(ParseState *pstate, JsonFormat *format, Oid targettype, + bool allow_format_for_non_strings) +{ + if (!allow_format_for_non_strings && + format->type != JS_FORMAT_DEFAULT && + (targettype != BYTEAOID && + targettype != JSONOID && + targettype != JSONBOID)) + { + char typcategory; + bool typispreferred; + + get_type_category_preferred(targettype, &typcategory, &typispreferred); + + if (typcategory != TYPCATEGORY_STRING) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + parser_errposition(pstate, format->location), + errmsg("cannot use JSON format with non-string output types"))); + } + + if (format->type == JS_FORMAT_JSON) + { + JsonEncoding enc = format->encoding != JS_ENC_DEFAULT ? + format->encoding : JS_ENC_UTF8; + + if (targettype != BYTEAOID && + format->encoding != JS_ENC_DEFAULT) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + parser_errposition(pstate, format->location), + errmsg("cannot set JSON encoding for non-bytea output types"))); + + if (enc != JS_ENC_UTF8) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("unsupported JSON encoding"), + errhint("only UTF8 JSON encoding is supported"), + parser_errposition(pstate, format->location))); + } +} + +/* + * Transform JSON output clause. + * + * Assigns target type oid and modifier. + * Assigns default format or checks specified format for its applicability to + * the target type. + */ +static void +transformJsonOutput(ParseState *pstate, JsonOutput **poutput, bool allow_format) +{ + JsonOutput *output = *poutput; + + /* if output clause is not specified, make default clause value */ + if (!output) + { + output = makeNode(JsonOutput); + + output->returning.format.type = JS_FORMAT_DEFAULT; + output->returning.format.encoding = JS_ENC_DEFAULT; + output->returning.format.location = -1; + output->typename = NULL; + output->returning.typid = InvalidOid; + output->returning.typmod = -1; + + *poutput = output; + + return; + } + + typenameTypeIdAndMod(pstate, output->typename, + &output->returning.typid, &output->returning.typmod); + + if (output->typename->setof) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("returning SETOF types is not supported in SQL/JSON functions"))); + + if (output->returning.format.type == JS_FORMAT_DEFAULT) + /* assign JSONB format when returning jsonb, or JSON format otherwise */ + output->returning.format.type = output->returning.typid == JSONBOID + ? JS_FORMAT_JSONB : JS_FORMAT_JSON; + else + checkJsonOutputFormat(pstate, &output->returning.format, + output->returning.typid, + allow_format); +} + +/* + * Coerce json[b]-valued function expression to the output type. + */ +static Node * +coerceJsonFuncExpr(ParseState *pstate, Node *expr, JsonReturning *returning, + bool report_error) +{ + Node *res; + int location; + Oid exprtype = exprType(expr); + + /* if output type is not specified or equals to function type, return */ + if (!OidIsValid(returning->typid) || returning->typid == exprtype) + return expr; + + location = exprLocation(expr); + + if (location < 0) + location = returning ? returning->format.location : -1; + + /* special case for RETURNING bytea FORMAT json */ + if (returning->format.type == JS_FORMAT_JSON && + returning->typid == BYTEAOID) + { + /* encode json text into bytea using pg_convert_to() */ + Node *texpr = coerce_to_specific_type(pstate, expr, TEXTOID, + "JSON_FUNCTION"); + Const *enc = getJsonEncodingConst(&returning->format); + FuncExpr *fexpr = makeFuncExpr(F_PG_CONVERT_TO, BYTEAOID, + list_make2(texpr, enc), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + fexpr->location = location; + + return (Node *) fexpr; + } + + /* try to coerce expression to the output type */ + res = coerce_to_target_type(pstate, expr, exprtype, + returning->typid, returning->typmod, + /* XXX throwing errors when casting to char(N) */ + COERCION_EXPLICIT, + COERCE_EXPLICIT_CAST, + location); + + if (!res && report_error) + ereport(ERROR, + (errcode(ERRCODE_CANNOT_COERCE), + errmsg("cannot cast type %s to %s", + format_type_be(exprtype), + format_type_be(returning->typid)), + parser_coercion_errposition(pstate, location, expr))); + + return res; +} + +/* + * Transform JSON_OBJECT() constructor. + * + * JSON_OBJECT() is transformed into json[b]_build_object[_ext]() call + * depending on the output JSON format. The first two arguments of + * json[b]_build_object_ext() are absent_on_null and check_key_uniqueness. + * + * Then function call result is coerced to the target type. + */ +static Node * +transformJsonObjectCtor(ParseState *pstate, JsonObjectCtor *ctor) +{ + FuncExpr *fexpr; + List *args = NIL; + Oid funcid; + Oid funcrettype; + + /* transform key-value pairs, if any */ + if (ctor->exprs) + { + ListCell *lc; + + /* append the first two arguments */ + args = lappend(args, makeBoolConst(ctor->absent_on_null, false)); + args = lappend(args, makeBoolConst(ctor->unique, false)); + + /* transform and append key-value arguments */ + foreach(lc, ctor->exprs) + { + JsonKeyValue *kv = castNode(JsonKeyValue, lfirst(lc)); + Node *key = transformExprRecurse(pstate, (Node *) kv->key); + Node *val = transformJsonValueExprDefault(pstate, kv->value); + + args = lappend(args, key); + args = lappend(args, val); + } + } + + transformJsonOutput(pstate, &ctor->output, true); + + if (ctor->output->returning.format.type == JS_FORMAT_JSONB) + { + funcid = args ? F_JSONB_BUILD_OBJECT_EXT : F_JSONB_BUILD_OBJECT_NOARGS; + funcrettype = JSONBOID; + } + else + { + funcid = args ? F_JSON_BUILD_OBJECT_EXT : F_JSON_BUILD_OBJECT_NOARGS; + funcrettype = JSONOID; + } + + fexpr = makeFuncExpr(funcid, funcrettype, args, + InvalidOid, InvalidOid, COERCE_EXPLICIT_CALL); + fexpr->location = ctor->location; + + return coerceJsonFuncExpr(pstate, (Node *) fexpr, &ctor->output->returning, + true); +} + +/* + * Transform JSON_ARRAY(query [FORMAT] [RETURNING] [ON NULL]) into + * (SELECT JSON_ARRAYAGG(a [FORMAT] [RETURNING] [ON NULL]) FROM (query) q(a)) + */ +static Node * +transformJsonArrayQueryCtor(ParseState *pstate, JsonArrayQueryCtor *ctor) +{ + SubLink *sublink = makeNode(SubLink); + SelectStmt *select = makeNode(SelectStmt); + RangeSubselect *range = makeNode(RangeSubselect); + Alias *alias = makeNode(Alias); + ResTarget *target = makeNode(ResTarget); + JsonArrayAgg *agg = makeNode(JsonArrayAgg); + JsonValueExpr *jsexpr = makeNode(JsonValueExpr); + ColumnRef *colref = makeNode(ColumnRef); + Query *query; + ParseState *qpstate; + + /* Transform query only for counting target list entries. */ + qpstate = make_parsestate(pstate); + + query = transformStmt(qpstate, ctor->query); + + if (count_nonjunk_tlist_entries(query->targetList) != 1) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("subquery must return only one column"), + parser_errposition(pstate, ctor->location))); + + free_parsestate(qpstate); + + colref->fields = list_make2(makeString(pstrdup("q")), + makeString(pstrdup("a"))); + colref->location = ctor->location; + + jsexpr->expr = (Expr *) colref; + jsexpr->format = ctor->format; + + agg->arg = jsexpr; + agg->ctor.agg_order = NIL; + agg->ctor.output = ctor->output; + agg->absent_on_null = ctor->absent_on_null; + agg->ctor.location = ctor->location; + + target->name = NULL; + target->indirection = NIL; + target->val = (Node *) agg; + target->location = ctor->location; + + alias->aliasname = pstrdup("q"); + alias->colnames = list_make1(makeString(pstrdup("a"))); + + range->lateral = false; + range->subquery = ctor->query; + range->alias = alias; + + select->targetList = list_make1(target); + select->fromClause = list_make1(range); + + sublink->subLinkType = EXPR_SUBLINK; + sublink->subLinkId = 0; + sublink->testexpr = NULL; + sublink->operName = NIL; + sublink->subselect = (Node *) select; + sublink->location = ctor->location; + + return transformExprRecurse(pstate, (Node *) sublink); +} + +/* + * Common code for JSON_OBJECTAGG and JSON_ARRAYAGG transformation. + */ +static Node * +transformJsonAggCtor(ParseState *pstate, JsonAggCtor *agg_ctor, List *args, + Oid aggfnoid, Oid aggtype) +{ + Node *node; + Expr *aggfilter = agg_ctor->agg_filter ? (Expr *) + transformWhereClause(pstate, agg_ctor->agg_filter, + EXPR_KIND_FILTER, "FILTER") : NULL; + + if (agg_ctor->over) + { + /* window function */ + WindowFunc *wfunc = makeNode(WindowFunc); + + wfunc->winfnoid = aggfnoid; + wfunc->wintype = aggtype; + /* wincollid and inputcollid will be set by parse_collate.c */ + wfunc->args = args; + /* winref will be set by transformWindowFuncCall */ + wfunc->winstar = false; + wfunc->winagg = true; + wfunc->aggfilter = aggfilter; + wfunc->location = agg_ctor->location; + + /* + * ordered aggs not allowed in windows yet + */ + if (agg_ctor->agg_order != NIL) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("aggregate ORDER BY is not implemented for window functions"), + parser_errposition(pstate, agg_ctor->location))); + + /* parse_agg.c does additional window-func-specific processing */ + transformWindowFuncCall(pstate, wfunc, agg_ctor->over); + + node = (Node *) wfunc; + } + else + { + Aggref *aggref = makeNode(Aggref); + + aggref->aggfnoid = aggfnoid; + aggref->aggtype = aggtype; + + /* aggcollid and inputcollid will be set by parse_collate.c */ + aggref->aggtranstype = InvalidOid; /* will be set by planner */ + /* aggargtypes will be set by transformAggregateCall */ + /* aggdirectargs and args will be set by transformAggregateCall */ + /* aggorder and aggdistinct will be set by transformAggregateCall */ + aggref->aggfilter = aggfilter; + aggref->aggstar = false; + aggref->aggvariadic = false; + aggref->aggkind = AGGKIND_NORMAL; + /* agglevelsup will be set by transformAggregateCall */ + aggref->aggsplit = AGGSPLIT_SIMPLE; /* planner might change this */ + aggref->location = agg_ctor->location; + + transformAggregateCall(pstate, aggref, args, agg_ctor->agg_order, false); + + node = (Node *) aggref; + } + + return coerceJsonFuncExpr(pstate, node, &agg_ctor->output->returning, true); +} + +/* + * Transform JSON_OBJECTAGG() aggregate function. + * + * JSON_OBJECTAGG() is transformed into + * json[b]_objectagg(key, value, absent_on_null, check_unique) call depending on + * the output JSON format. Then the function call result is coerced to the + * target output type. + */ +static Node * +transformJsonObjectAgg(ParseState *pstate, JsonObjectAgg *agg) +{ + Node *key; + Node *val; + List *args; + Oid aggfnoid; + Oid aggtype; + + transformJsonOutput(pstate, &agg->ctor.output, true); + + key = transformExprRecurse(pstate, (Node *) agg->arg->key); + val = transformJsonValueExprDefault(pstate, agg->arg->value); + args = list_make4(key, + val, + makeBoolConst(agg->absent_on_null, false), + makeBoolConst(agg->unique, false)); + + if (agg->ctor.output->returning.format.type == JS_FORMAT_JSONB) + { + aggfnoid = F_JSONB_OBJECTAGG; + aggtype = JSONBOID; + } + else + { + aggfnoid = F_JSON_OBJECTAGG; + aggtype = JSONOID; + } + + return transformJsonAggCtor(pstate, &agg->ctor, args, aggfnoid, aggtype); +} + +/* + * Transform JSON_ARRAYAGG() aggregate function. + * + * JSON_ARRAYAGG() is transformed into json[b]_agg[_strict]() call depending + * on the output JSON format and absent_on_null. Then the function call result + * is coerced to the target output type. + */ +static Node * +transformJsonArrayAgg(ParseState *pstate, JsonArrayAgg *agg) +{ + Node *arg; + Oid aggfnoid; + Oid aggtype; + + transformJsonOutput(pstate, &agg->ctor.output, true); + + arg = transformJsonValueExprDefault(pstate, agg->arg); + + if (agg->ctor.output->returning.format.type == JS_FORMAT_JSONB) + { + aggfnoid = agg->absent_on_null ? F_JSONB_AGG_STRICT : F_JSONB_AGG; + aggtype = JSONBOID; + } + else + { + aggfnoid = agg->absent_on_null ? F_JSON_AGG_STRICT : F_JSON_AGG; + aggtype = JSONOID; + } + + return transformJsonAggCtor(pstate, &agg->ctor, list_make1(arg), + aggfnoid, aggtype); +} + +/* + * Transform JSON_ARRAY() constructor. + * + * JSON_ARRAY() is transformed into json[b]_build_array[_ext]() call + * depending on the output JSON format. The first argument of + * json[b]_build_array_ext() is absent_on_null. + * + * Then function call result is coerced to the target type. + */ +static Node * +transformJsonArrayCtor(ParseState *pstate, JsonArrayCtor *ctor) +{ + FuncExpr *fexpr; + List *args = NIL; + Oid funcid; + Oid funcrettype; + + /* transform element expressions, if any */ + if (ctor->exprs) + { + ListCell *lc; + + /* append the first absent_on_null argument */ + args = lappend(args, makeBoolConst(ctor->absent_on_null, false)); + + /* transform and append element arguments */ + foreach(lc, ctor->exprs) + { + JsonValueExpr *jsval = castNode(JsonValueExpr, lfirst(lc)); + Node *val = transformJsonValueExprDefault(pstate, jsval); + + args = lappend(args, val); + } + } + + transformJsonOutput(pstate, &ctor->output, true); + + if (ctor->output->returning.format.type == JS_FORMAT_JSONB) + { + funcid = args ? F_JSONB_BUILD_ARRAY_EXT : F_JSONB_BUILD_ARRAY_NOARGS; + funcrettype = JSONBOID; + } + else + { + funcid = args ? F_JSON_BUILD_ARRAY_EXT : F_JSON_BUILD_ARRAY_NOARGS; + funcrettype = JSONOID; + } + + fexpr = makeFuncExpr(funcid, funcrettype, args, + InvalidOid, InvalidOid, COERCE_EXPLICIT_CALL); + fexpr->location = ctor->location; + + return coerceJsonFuncExpr(pstate, (Node *) fexpr, &ctor->output->returning, + true); +} + +static const char * +JsonValueTypeStrings[] = +{ + "any", + "object", + "array", + "scalar", +}; + +static Const * +makeJsonValueTypeConst(JsonValueType type) +{ + return makeConst(TEXTOID, -1, InvalidOid, -1, + PointerGetDatum(cstring_to_text( + JsonValueTypeStrings[(int) type])), + false, false); +} + +/* + * Transform IS JSON predicate into + * json[b]_is_valid(json, value_type [, check_key_uniqueness]) call. + */ +static Node * +transformJsonIsPredicate(ParseState *pstate, JsonIsPredicate *pred) +{ + Node *expr = transformExprRecurse(pstate, pred->expr); + Oid exprtype = exprType(expr); + + /* prepare input document */ + if (exprtype == BYTEAOID) + { + expr = makeJsonByteaToTextConversion(expr, &pred->format, + exprLocation(expr)); + exprtype = TEXTOID; + } + else + { + char typcategory; + bool typispreferred; + + get_type_category_preferred(exprtype, &typcategory, &typispreferred); + + if (exprtype == UNKNOWNOID || typcategory == TYPCATEGORY_STRING) + { + expr = coerce_to_target_type(pstate, (Node *) expr, exprtype, + TEXTOID, -1, + COERCION_IMPLICIT, + COERCE_IMPLICIT_CAST, -1); + exprtype = TEXTOID; + } + + if (pred->format.encoding != JS_ENC_DEFAULT) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + parser_errposition(pstate, pred->format.location), + errmsg("cannot use JSON FORMAT ENCODING clause for non-bytea input types"))); + } + + /* make resulting expression */ + if (exprtype == TEXTOID || exprtype == JSONOID) + { + FuncExpr *fexpr = makeFuncExpr(F_JSON_IS_VALID, BOOLOID, + list_make3(expr, + makeJsonValueTypeConst(pred->vtype), + makeBoolConst(pred->unique_keys, + false)), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + + fexpr->location = pred->location; + return (Node *) fexpr; + } + else if (exprtype == JSONBOID) + { + /* XXX the following expressions also can be used here: + * jsonb_type(jsonb) = 'type' (for object and array checks) + * CASE jsonb_type(jsonb) WHEN ... END (for scalars checks) + */ + FuncExpr *fexpr = makeFuncExpr(F_JSONB_IS_VALID, BOOLOID, + list_make2(expr, + makeJsonValueTypeConst(pred->vtype)), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + + fexpr->location = pred->location; + return (Node *) fexpr; + } + else + { + ereport(ERROR, + (errcode(ERRCODE_DATATYPE_MISMATCH), + errmsg("cannot use type %s in IS JSON predicate", + format_type_be(exprtype)))); + return NULL; + } +} + +/* + * Transform a JSON PASSING clause. + */ +static void +transformJsonPassingArgs(ParseState *pstate, JsonFormatType format, List *args, + JsonPassing *passing) +{ + ListCell *lc; + + passing->values = NIL; + passing->names = NIL; + + foreach(lc, args) + { + JsonArgument *arg = castNode(JsonArgument, lfirst(lc)); + Node *expr = transformJsonValueExprExt(pstate, arg->val, + format, true, NULL); + + passing->values = lappend(passing->values, expr); + passing->names = lappend(passing->names, makeString(arg->name)); + } +} + +/* + * Transform a JSON BEHAVIOR clause. + */ +static JsonBehavior +transformJsonBehavior(ParseState *pstate, JsonBehavior *behavior, + JsonBehaviorType default_behavior) +{ + JsonBehavior b; + + b.btype = behavior ? behavior->btype : default_behavior; + b.default_expr = b.btype != JSON_BEHAVIOR_DEFAULT ? NULL : + transformExprRecurse(pstate, behavior->default_expr); + + return b; +} + +/* + * Common code for JSON_VALUE, JSON_QUERY, JSON_EXISTS transformation + * into a JsonExpr node. + */ +static JsonExpr * +transformJsonExprCommon(ParseState *pstate, JsonFuncExpr *func) +{ + JsonExpr *jsexpr = makeNode(JsonExpr); + Datum jsonpath; + JsonFormatType format; + + if (func->common->pathname && func->op != IS_JSON_TABLE) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("JSON_TABLE path name is not allowed here"), + parser_errposition(pstate, func->location))); + + jsexpr->location = func->location; + jsexpr->op = func->op; + jsexpr->formatted_expr = transformJsonValueExprExt(pstate, + func->common->expr, + JS_FORMAT_JSON, + false, + &jsexpr->raw_expr); + + /* format is determined by context item type */ + format = exprType(jsexpr->formatted_expr) == JSONBOID ? + JS_FORMAT_JSONB : JS_FORMAT_JSON; + + if (jsexpr->formatted_expr == jsexpr->raw_expr) + jsexpr->formatted_expr = NULL; + + jsexpr->result_expr = NULL; + jsexpr->coerce_via_io = false; + jsexpr->omit_quotes = false; + + jsexpr->format = func->common->expr->format; + + /* parse JSON path string */ + jsonpath = DirectFunctionCall1(jsonpath_in, + CStringGetDatum(func->common->pathspec)); + + jsexpr->path_spec = makeConst(JSONPATHOID, -1, InvalidOid, -1, + jsonpath, false, false); + + /* transform and coerce to json[b] passing arguments */ + transformJsonPassingArgs(pstate, format, func->common->passing, + &jsexpr->passing); + + if (func->op != IS_JSON_EXISTS && func->op != IS_JSON_TABLE) + jsexpr->on_empty = transformJsonBehavior(pstate, func->on_empty, + JSON_BEHAVIOR_NULL); + + jsexpr->on_error = transformJsonBehavior(pstate, func->on_error, + func->op == IS_JSON_EXISTS ? JSON_BEHAVIOR_FALSE : + func->op == IS_JSON_TABLE ? JSON_BEHAVIOR_EMPTY : JSON_BEHAVIOR_NULL); + + return jsexpr; +} + +/* + * Assign default JSON returning type from the specified format or from + * the context item type. + */ +static void +assignDefaultJsonReturningType(Node *context_item, JsonFormat *context_format, + JsonReturning *ret) +{ + bool is_jsonb; + + ret->format = *context_format; + + if (ret->format.type == JS_FORMAT_DEFAULT) + is_jsonb = exprType(context_item) == JSONBOID; + else + is_jsonb = ret->format.type == JS_FORMAT_JSONB; + + ret->typid = is_jsonb ? JSONBOID : JSONOID; + ret->typmod = -1; +} + +/* + * Try to coerce expression to the output type or + * use json_populate_type() for composite, array and domain types or + * use coercion via I/O. + */ +Node * +coerceJsonExpr(ParseState *pstate, Node *expr, JsonReturning *returning, + bool *coerce_via_io, bool *coerce_via_populate) +{ + Node *res = coerceJsonFuncExpr(pstate, expr, returning, false); + char typtype; + + if (res) + return res == expr ? NULL : res; + + typtype = get_typtype(returning->typid); + + if (returning->typid == RECORDOID || + typtype == TYPTYPE_COMPOSITE || + typtype == TYPTYPE_DOMAIN || + type_is_array(returning->typid)) + *coerce_via_populate = true; + else + *coerce_via_io = true; + + return NULL; +} + +/* + * Transform a JSON output clause of JSON_VALUE, JSON_QUERY, JSON_EXISTS. + */ +static void +transformJsonFuncExprOutput(ParseState *pstate, JsonFuncExpr *func, + JsonExpr *jsexpr) +{ + Node *expr = jsexpr->formatted_expr ? + jsexpr->formatted_expr : jsexpr->raw_expr; + + transformJsonOutput(pstate, &func->output, false); + + jsexpr->returning = func->output->returning; + + /* JSON_VALUE returns text by default */ + if (func->op == IS_JSON_VALUE && !OidIsValid(jsexpr->returning.typid)) + { + jsexpr->returning.typid = TEXTOID; + jsexpr->returning.typmod = -1; + } + + if (OidIsValid(jsexpr->returning.typid)) + { + JsonReturning ret; + + if (func->op == IS_JSON_VALUE && + jsexpr->returning.typid != JSONOID && + jsexpr->returning.typid != JSONBOID) + { + /* Forced coercion via I/O for JSON_VALUE for non-JSON types */ + jsexpr->result_expr = NULL; + jsexpr->coerce_via_io = true; + return; + } + + assignDefaultJsonReturningType(jsexpr->raw_expr, &jsexpr->format, &ret); + + if (ret.typid != jsexpr->returning.typid || + ret.typmod != jsexpr->returning.typmod) + { + Node *placeholder = makeCaseTestExpr(expr); + + Assert(((CaseTestExpr *) placeholder)->typeId == ret.typid); + Assert(((CaseTestExpr *) placeholder)->typeMod == ret.typmod); + + jsexpr->result_expr = coerceJsonExpr(pstate, placeholder, + &jsexpr->returning, + &jsexpr->coerce_via_io, + &jsexpr->coerce_via_populate); + } + } + else + assignDefaultJsonReturningType(jsexpr->raw_expr, &jsexpr->format, + &jsexpr->returning); +} + +/* + * Coerce a expression in JSON DEFAULT behavior to the target output type. + */ +static Node * +coerceDefaultJsonExpr(ParseState *pstate, JsonExpr *jsexpr, Node *defexpr) +{ + int location; + Oid exprtype; + + if (!defexpr) + return NULL; + + exprtype = exprType(defexpr); + location = exprLocation(defexpr); + + if (location < 0) + location = jsexpr->location; + + defexpr = coerce_to_target_type(pstate, + defexpr, + exprtype, + jsexpr->returning.typid, + jsexpr->returning.typmod, + COERCION_EXPLICIT, + COERCE_IMPLICIT_CAST, + location); + + if (!defexpr) + ereport(ERROR, + (errcode(ERRCODE_CANNOT_COERCE), + errmsg("cannot cast DEFAULT expression type %s to %s", + format_type_be(exprtype), + format_type_be(jsexpr->returning.typid)), + parser_errposition(pstate, location))); + + return defexpr; +} + +/* + * Transform JSON_VALUE, JSON_QUERY, JSON_EXISTS functions into a JsonExpr node. + */ +static Node * +transformJsonFuncExpr(ParseState *pstate, JsonFuncExpr *func) +{ + JsonExpr *jsexpr = transformJsonExprCommon(pstate, func); + + switch (func->op) + { + case IS_JSON_VALUE: + transformJsonFuncExprOutput(pstate, func, jsexpr); + + jsexpr->returning.format.type = JS_FORMAT_DEFAULT; + jsexpr->returning.format.encoding = JS_ENC_DEFAULT; + + jsexpr->on_empty.default_expr = + coerceDefaultJsonExpr(pstate, jsexpr, + jsexpr->on_empty.default_expr); + + jsexpr->on_error.default_expr = + coerceDefaultJsonExpr(pstate, jsexpr, + jsexpr->on_error.default_expr); + break; + + case IS_JSON_QUERY: + transformJsonFuncExprOutput(pstate, func, jsexpr); + + jsexpr->wrapper = func->wrapper; + jsexpr->omit_quotes = func->omit_quotes; + + break; + + case IS_JSON_EXISTS: + jsexpr->returning.format.type = JS_FORMAT_DEFAULT; + jsexpr->returning.format.encoding = JS_ENC_DEFAULT; + jsexpr->returning.format.location = -1; + jsexpr->returning.typid = BOOLOID; + jsexpr->returning.typmod = -1; + + break; + + case IS_JSON_TABLE: + { + Node *expr = jsexpr->formatted_expr ? + jsexpr->formatted_expr : jsexpr->raw_expr; + + jsexpr->returning.format.type = JS_FORMAT_DEFAULT; + jsexpr->returning.format.encoding = JS_ENC_DEFAULT; + jsexpr->returning.format.location = -1; + jsexpr->returning.typid = exprType(expr); + jsexpr->returning.typmod = -1; + + break; + } + } + + return (Node *) jsexpr; +} diff --git a/src/backend/parser/parse_relation.c b/src/backend/parser/parse_relation.c index 6acc21dfe6..186c5ef1aa 100644 --- a/src/backend/parser/parse_relation.c +++ b/src/backend/parser/parse_relation.c @@ -1662,7 +1662,8 @@ addRangeTableEntryForTableFunc(ParseState *pstate, bool inFromCl) { RangeTblEntry *rte = makeNode(RangeTblEntry); - char *refname = alias ? alias->aliasname : pstrdup("xmltable"); + char *refname = alias ? alias->aliasname : + pstrdup(tf->functype == TFT_XMLTABLE ? "xmltable" : "json_table"); Alias *eref; int numaliases; diff --git a/src/backend/parser/parse_target.c b/src/backend/parser/parse_target.c index 01fd726a3d..8410465a71 100644 --- a/src/backend/parser/parse_target.c +++ b/src/backend/parser/parse_target.c @@ -1917,6 +1917,24 @@ FigureColnameInternal(Node *node, char **name) case T_XmlSerialize: *name = "xmlserialize"; return 2; + case T_JsonExpr: + /* make SQL/JSON functions act like a regular function */ + switch (((JsonExpr *) node)->op) + { + case IS_JSON_QUERY: + *name = "json_query"; + return 2; + case IS_JSON_VALUE: + *name = "json_value"; + return 2; + case IS_JSON_EXISTS: + *name = "json_exists"; + return 2; + case IS_JSON_TABLE: + *name = "json_table"; + return 2; + } + break; default: break; } diff --git a/src/backend/parser/parser.c b/src/backend/parser/parser.c index 245b4cda3b..d3ce7c840d 100644 --- a/src/backend/parser/parser.c +++ b/src/backend/parser/parser.c @@ -24,7 +24,6 @@ #include "parser/gramparse.h" #include "parser/parser.h" - /* * raw_parser * Given a query in string form, do lexical and grammatical analysis. @@ -117,6 +116,9 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner) case WITH: cur_token_length = 4; break; + case WITHOUT: + cur_token_length = 7; + break; default: return cur_token; } @@ -188,8 +190,22 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner) case ORDINALITY: cur_token = WITH_LA; break; + case UNIQUE: + cur_token = WITH_LA_UNIQUE; + break; } break; + + case WITHOUT: + /* Replace WITHOUT by WITHOUT_LA if it's followed by TIME */ + switch (next_token) + { + case TIME: + cur_token = WITHOUT_LA; + break; + } + break; + } return cur_token; diff --git a/src/backend/utils/adt/Makefile b/src/backend/utils/adt/Makefile index 1fb018416e..b263d88510 100644 --- a/src/backend/utils/adt/Makefile +++ b/src/backend/utils/adt/Makefile @@ -16,7 +16,8 @@ OBJS = acl.o amutils.o arrayfuncs.o array_expanded.o array_selfuncs.o \ float.o format_type.o formatting.o genfile.o \ geo_ops.o geo_selfuncs.o geo_spgist.o inet_cidr_ntop.o inet_net_pton.o \ int.o int8.o json.o jsonb.o jsonb_gin.o jsonb_op.o jsonb_util.o \ - jsonfuncs.o like.o lockfuncs.o mac.o mac8.o misc.o nabstime.o name.o \ + jsonfuncs.o jsonpath_gram.o jsonpath_scan.o jsonpath.o jsonpath_exec.o jsonpath_json.o \ + like.o lockfuncs.o mac.o mac8.o misc.o nabstime.o name.o \ network.o network_gist.o network_selfuncs.o network_spgist.o \ numeric.o numutils.o oid.o oracle_compat.o \ orderedsetaggs.o pg_locale.o pg_lsn.o pg_upgrade_support.o \ @@ -31,6 +32,28 @@ OBJS = acl.o amutils.o arrayfuncs.o array_expanded.o array_selfuncs.o \ txid.o uuid.o varbit.o varchar.o varlena.o version.o \ windowfuncs.o xid.o xml.o +# Latest flex causes warnings in this file. +ifeq ($(GCC),yes) +scan.o: CFLAGS += -Wno-error +endif + +jsonpath_gram.c: BISONFLAGS += -d + +jsonpath_scan.c: FLEXFLAGS = -CF -p -p + +jsonpath_gram.h: jsonpath_gram.c ; + +# Force these dependencies to be known even without dependency info built: +jsonpath_gram.o jsonpath_scan.o jsonpath_parser.o: jsonpath_gram.h + +jsonpath_json.o: jsonpath_exec.c + +# jsonpath_gram.c, jsonpath_gram.h, and jsonpath_scan.c are in the distribution +# tarball, so they are not cleaned here. +clean distclean maintainer-clean: + rm -f lex.backup + + like.o: like.c like_match.c varlena.o: varlena.c levenshtein.c diff --git a/src/backend/utils/adt/date.c b/src/backend/utils/adt/date.c index 04e737d080..6b1ded92fe 100644 --- a/src/backend/utils/adt/date.c +++ b/src/backend/utils/adt/date.c @@ -43,9 +43,6 @@ static int time2tm(TimeADT time, struct pg_tm *tm, fsec_t *fsec); static int timetz2tm(TimeTzADT *time, struct pg_tm *tm, fsec_t *fsec, int *tzp); -static int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result); -static int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result); -static void AdjustTimeForTypmod(TimeADT *time, int32 typmod); /* common code for timetypmodin and timetztypmodin */ @@ -1234,7 +1231,7 @@ time_in(PG_FUNCTION_ARGS) /* tm2time() * Convert a tm structure to a time data type. */ -static int +int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result) { *result = ((((tm->tm_hour * MINS_PER_HOUR + tm->tm_min) * SECS_PER_MINUTE) + tm->tm_sec) @@ -1400,7 +1397,7 @@ time_scale(PG_FUNCTION_ARGS) * have a fundamental tie together but rather a coincidence of * implementation. - thomas */ -static void +void AdjustTimeForTypmod(TimeADT *time, int32 typmod) { static const int64 TimeScales[MAX_TIME_PRECISION + 1] = { @@ -1939,7 +1936,7 @@ time_part(PG_FUNCTION_ARGS) /* tm2timetz() * Convert a tm structure to a time data type. */ -static int +int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result) { result->time = ((((tm->tm_hour * MINS_PER_HOUR + tm->tm_min) * SECS_PER_MINUTE) + tm->tm_sec) * diff --git a/src/backend/utils/adt/formatting.c b/src/backend/utils/adt/formatting.c index 7877af2d6b..80f1053187 100644 --- a/src/backend/utils/adt/formatting.c +++ b/src/backend/utils/adt/formatting.c @@ -87,6 +87,7 @@ #endif #include "catalog/pg_collation.h" +#include "catalog/pg_type.h" #include "mb/pg_wchar.h" #include "utils/builtins.h" #include "utils/date.h" @@ -426,7 +427,10 @@ typedef struct j, us, yysz, /* is it YY or YYYY ? */ - clock; /* 12 or 24 hour clock? */ + clock, /* 12 or 24 hour clock? */ + tzsign, + tzh, + tzm; } TmFromChar; #define ZERO_tmfc(_X) memset(_X, 0, sizeof(TmFromChar)) @@ -472,6 +476,7 @@ do { \ (_X)->tm_sec = (_X)->tm_year = (_X)->tm_min = (_X)->tm_wday = \ (_X)->tm_hour = (_X)->tm_yday = (_X)->tm_isdst = 0; \ (_X)->tm_mday = (_X)->tm_mon = 1; \ + (_X)->tm_zone = NULL; \ } while(0) #define ZERO_tmtc(_X) \ @@ -611,6 +616,8 @@ typedef enum DCH_RM, DCH_SSSS, DCH_SS, + DCH_TZH, + DCH_TZM, DCH_TZ, DCH_US, DCH_WW, @@ -758,7 +765,9 @@ static const KeyWord DCH_keywords[] = { {"RM", 2, DCH_RM, FALSE, FROM_CHAR_DATE_GREGORIAN}, /* R */ {"SSSS", 4, DCH_SSSS, TRUE, FROM_CHAR_DATE_NONE}, /* S */ {"SS", 2, DCH_SS, TRUE, FROM_CHAR_DATE_NONE}, - {"TZ", 2, DCH_TZ, FALSE, FROM_CHAR_DATE_NONE}, /* T */ + {"TZH", 3, DCH_TZH, FALSE, FROM_CHAR_DATE_NONE}, /* T */ + {"TZM", 3, DCH_TZM, TRUE, FROM_CHAR_DATE_NONE}, + {"TZ", 2, DCH_TZ, FALSE, FROM_CHAR_DATE_NONE}, {"US", 2, DCH_US, TRUE, FROM_CHAR_DATE_NONE}, /* U */ {"WW", 2, DCH_WW, TRUE, FROM_CHAR_DATE_GREGORIAN}, /* W */ {"W", 1, DCH_W, TRUE, FROM_CHAR_DATE_GREGORIAN}, @@ -881,7 +890,7 @@ static const int DCH_index[KeyWord_INDEX_SIZE] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, DCH_A_D, DCH_B_C, DCH_CC, DCH_DAY, -1, DCH_FX, -1, DCH_HH24, DCH_IDDD, DCH_J, -1, -1, DCH_MI, -1, DCH_OF, - DCH_P_M, DCH_Q, DCH_RM, DCH_SSSS, DCH_TZ, DCH_US, -1, DCH_WW, -1, DCH_Y_YYY, + DCH_P_M, DCH_Q, DCH_RM, DCH_SSSS, DCH_TZH, DCH_US, -1, DCH_WW, -1, DCH_Y_YYY, -1, -1, -1, -1, -1, -1, -1, DCH_a_d, DCH_b_c, DCH_cc, DCH_day, -1, DCH_fx, -1, DCH_hh24, DCH_iddd, DCH_j, -1, -1, DCH_mi, -1, -1, DCH_p_m, DCH_q, DCH_rm, DCH_ssss, DCH_tz, DCH_us, -1, DCH_ww, @@ -947,6 +956,10 @@ typedef struct NUMProc *L_currency_symbol; } NUMProc; +/* Return flags for DCH_from_char() */ +#define DCH_DATED 0x01 +#define DCH_TIMED 0x02 +#define DCH_ZONED 0x04 /* ---------- * Functions @@ -961,7 +974,8 @@ static void parse_format(FormatNode *node, const char *str, const KeyWord *kw, static void DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid collid); -static void DCH_from_char(FormatNode *node, char *in, TmFromChar *out); +static void DCH_from_char(FormatNode *node, char *in, TmFromChar *out, + bool strict); #ifdef DEBUG_TO_FROM_CHAR static void dump_index(const KeyWord *k, const int *index); @@ -978,8 +992,8 @@ static int from_char_parse_int_len(int *dest, char **src, const int len, FormatN static int from_char_parse_int(int *dest, char **src, FormatNode *node); static int seq_search(char *name, const char *const *array, int type, int max, int *len); static int from_char_seq_search(int *dest, char **src, const char *const *array, int type, int max, FormatNode *node); -static void do_to_timestamp(text *date_txt, text *fmt, - struct pg_tm *tm, fsec_t *fsec); +static void do_to_timestamp(text *date_txt, const char *fmt, int fmt_len, + bool strict, struct pg_tm *tm, fsec_t *fsec, int *flags); static char *fill_str(char *str, int c, int max); static FormatNode *NUM_cache(int len, NUMDesc *Num, text *pars_str, bool *shouldFree); static char *int_to_roman(int number); @@ -2529,6 +2543,13 @@ DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid col s += strlen(s); } break; + case DCH_TZH: + case DCH_TZM: + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("formatting field \"%s\" is only supported in " + "to_timestamp", n->key->name))); + break; case DCH_OF: INVALID_FOR_INTERVAL; sprintf(s, "%c%0*d", @@ -2975,7 +2996,7 @@ DCH_to_char(FormatNode *node, bool is_interval, TmToChar *in, char *out, Oid col * ---------- */ static void -DCH_from_char(FormatNode *node, char *in, TmFromChar *out) +DCH_from_char(FormatNode *node, char *in, TmFromChar *out, bool strict) { FormatNode *n; char *s; @@ -3080,6 +3101,19 @@ DCH_from_char(FormatNode *node, char *in, TmFromChar *out) errmsg("formatting field \"%s\" is only supported in to_char", n->key->name))); break; + case DCH_TZH: + out->tzsign = *s == '-' ? -1 : +1; + + if (*s == '+' || *s == '-' || *s == ' ') + s++; + + from_char_parse_int_len(&out->tzh, &s, 2, n); + break; + case DCH_TZM: + if (!out->tzsign) + out->tzsign = +1; + from_char_parse_int_len(&out->tzm, &s, 2, n); + break; case DCH_A_D: case DCH_B_C: case DCH_a_d: @@ -3243,6 +3277,118 @@ DCH_from_char(FormatNode *node, char *in, TmFromChar *out) break; } } + + if (strict) + { + if (n->type != NODE_TYPE_END) + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("input string is too short for datetime format"))); + + while (*s == ' ') + s++; + + if (*s != '\0') + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("trailing characters remain in input string after " + "date time format"))); + } +} + +static int +DCH_datetime_type(FormatNode *node) +{ + FormatNode *n; + int flags = 0; + + for (n = node; n->type != NODE_TYPE_END; n++) + { + if (n->type != NODE_TYPE_ACTION) + continue; + + switch (n->key->id) + { + case DCH_FX: + break; + case DCH_A_M: + case DCH_P_M: + case DCH_a_m: + case DCH_p_m: + case DCH_AM: + case DCH_PM: + case DCH_am: + case DCH_pm: + case DCH_HH: + case DCH_HH12: + case DCH_HH24: + case DCH_MI: + case DCH_SS: + case DCH_MS: /* millisecond */ + case DCH_US: /* microsecond */ + case DCH_SSSS: + flags |= DCH_TIMED; + break; + case DCH_tz: + case DCH_TZ: + case DCH_OF: + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("formatting field \"%s\" is only supported in to_char", + n->key->name))); + flags |= DCH_ZONED; + break; + case DCH_TZH: + case DCH_TZM: + flags |= DCH_ZONED; + break; + case DCH_A_D: + case DCH_B_C: + case DCH_a_d: + case DCH_b_c: + case DCH_AD: + case DCH_BC: + case DCH_ad: + case DCH_bc: + case DCH_MONTH: + case DCH_Month: + case DCH_month: + case DCH_MON: + case DCH_Mon: + case DCH_mon: + case DCH_MM: + case DCH_DAY: + case DCH_Day: + case DCH_day: + case DCH_DY: + case DCH_Dy: + case DCH_dy: + case DCH_DDD: + case DCH_IDDD: + case DCH_DD: + case DCH_D: + case DCH_ID: + case DCH_WW: + case DCH_Q: + case DCH_CC: + case DCH_Y_YYY: + case DCH_YYYY: + case DCH_IYYY: + case DCH_YYY: + case DCH_IYY: + case DCH_YY: + case DCH_IY: + case DCH_Y: + case DCH_I: + case DCH_RM: + case DCH_rm: + case DCH_W: + case DCH_J: + flags |= DCH_DATED; + } + } + + return flags; } /* select a DCHCacheEntry to hold the given format picture */ @@ -3544,9 +3690,18 @@ to_timestamp(PG_FUNCTION_ARGS) struct pg_tm tm; fsec_t fsec; - do_to_timestamp(date_txt, fmt, &tm, &fsec); + do_to_timestamp(date_txt, VARDATA(fmt), VARSIZE_ANY_EXHDR(fmt), false, + &tm, &fsec, NULL); + + if (tm.tm_zone) + { + int dterr = DecodeTimezone((char *) tm.tm_zone, &tz); - tz = DetermineTimeZoneOffset(&tm, session_timezone); + if (dterr) + DateTimeParseError(dterr, text_to_cstring(date_txt), "timestamptz"); + } + else + tz = DetermineTimeZoneOffset(&tm, session_timezone); if (tm2timestamp(&tm, fsec, &tz, &result) != 0) ereport(ERROR, @@ -3570,7 +3725,8 @@ to_date(PG_FUNCTION_ARGS) struct pg_tm tm; fsec_t fsec; - do_to_timestamp(date_txt, fmt, &tm, &fsec); + do_to_timestamp(date_txt, VARDATA(fmt), VARSIZE_ANY_EXHDR(fmt), false, + &tm, &fsec, NULL); /* Prevent overflow in Julian-day routines */ if (!IS_VALID_JULIAN(tm.tm_year, tm.tm_mon, tm.tm_mday)) @@ -3591,6 +3747,150 @@ to_date(PG_FUNCTION_ARGS) PG_RETURN_DATEADT(result); } +Datum +to_datetime(text *date_txt, const char *fmt, int fmt_len, bool strict, + Oid *typid, int32 *typmod) +{ + struct pg_tm tm; + fsec_t fsec; + int flags; + + do_to_timestamp(date_txt, fmt, fmt_len, strict, &tm, &fsec, &flags); + + *typmod = -1; /* TODO implement FF1, ..., FF9 */ + + if (flags & DCH_DATED) + { + if (flags & DCH_TIMED) + { + if (flags & DCH_ZONED) + { + TimestampTz result; + int tz; + + if (tm.tm_zone) + { + int dterr = DecodeTimezone((char *) tm.tm_zone, &tz); + + if (dterr) + DateTimeParseError(dterr, text_to_cstring(date_txt), + "timestamptz"); + } + else + tz = DetermineTimeZoneOffset(&tm, session_timezone); + + if (tm2timestamp(&tm, fsec, &tz, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timestamptz out of range"))); + + AdjustTimestampForTypmod(&result, *typmod); + + *typid = TIMESTAMPTZOID; + return TimestampTzGetDatum(result); + } + else + { + Timestamp result; + + if (tm2timestamp(&tm, fsec, NULL, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timestamp out of range"))); + + AdjustTimestampForTypmod(&result, *typmod); + + *typid = TIMESTAMPOID; + return TimestampGetDatum(result); + } + } + else + { + if (flags & DCH_ZONED) + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("datetime format is zoned but not timed"))); + } + else + { + DateADT result; + + /* Prevent overflow in Julian-day routines */ + if (!IS_VALID_JULIAN(tm.tm_year, tm.tm_mon, tm.tm_mday)) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("date out of range: \"%s\"", + text_to_cstring(date_txt)))); + + result = date2j(tm.tm_year, tm.tm_mon, tm.tm_mday) - + POSTGRES_EPOCH_JDATE; + + /* Now check for just-out-of-range dates */ + if (!IS_VALID_DATE(result)) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("date out of range: \"%s\"", + text_to_cstring(date_txt)))); + + *typid = DATEOID; + return DateADTGetDatum(result); + } + } + } + else if (flags & DCH_TIMED) + { + if (flags & DCH_ZONED) + { + TimeTzADT *result = palloc(sizeof(TimeTzADT)); + int tz; + + if (tm.tm_zone) + { + int dterr = DecodeTimezone((char *) tm.tm_zone, &tz); + + if (dterr) + DateTimeParseError(dterr, text_to_cstring(date_txt), + "timetz"); + } + else + tz = DetermineTimeZoneOffset(&tm, session_timezone); + + if (tm2timetz(&tm, fsec, tz, result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("timetz out of range"))); + + AdjustTimeForTypmod(&result->time, *typmod); + + *typid = TIMETZOID; + return TimeTzADTPGetDatum(result); + } + else + { + TimeADT result; + + if (tm2time(&tm, fsec, &result) != 0) + ereport(ERROR, + (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), + errmsg("time out of range"))); + + AdjustTimeForTypmod(&result, *typmod); + + *typid = TIMEOID; + return TimeADTGetDatum(result); + } + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_DATETIME_FORMAT), + errmsg("datetime format is not dated and not timed"))); + } + + return (Datum) 0; +} + /* * do_to_timestamp: shared code for to_timestamp and to_date * @@ -3605,12 +3905,12 @@ to_date(PG_FUNCTION_ARGS) * struct 'tm' and 'fsec'. */ static void -do_to_timestamp(text *date_txt, text *fmt, - struct pg_tm *tm, fsec_t *fsec) +do_to_timestamp(text *date_txt, const char *fmt_str, int fmt_len, bool strict, + struct pg_tm *tm, fsec_t *fsec, int *flags) { FormatNode *format; TmFromChar tmfc; - int fmt_len; + char *fmt_tmp = NULL; char *date_str; int fmask; @@ -3621,15 +3921,15 @@ do_to_timestamp(text *date_txt, text *fmt, *fsec = 0; fmask = 0; /* bit mask for ValidateDate() */ - fmt_len = VARSIZE_ANY_EXHDR(fmt); + if (fmt_len < 0) /* zero-terminated */ + fmt_len = strlen(fmt_str); + else if (fmt_len > 0) /* not zero-terminated */ + fmt_str = fmt_tmp = pnstrdup(fmt_str, fmt_len); if (fmt_len) { - char *fmt_str; bool incache; - fmt_str = text_to_cstring(fmt); - if (fmt_len > DCH_CACHE_SIZE) { /* @@ -3659,13 +3959,18 @@ do_to_timestamp(text *date_txt, text *fmt, /* dump_index(DCH_keywords, DCH_index); */ #endif - DCH_from_char(format, date_str, &tmfc); + DCH_from_char(format, date_str, &tmfc, strict); + + if (flags) + *flags = DCH_datetime_type(format); - pfree(fmt_str); if (!incache) pfree(format); } + if (fmt_tmp) + pfree(fmt_tmp); + DEBUG_TMFC(&tmfc); /* @@ -3868,6 +4173,22 @@ do_to_timestamp(text *date_txt, text *fmt, *fsec < INT64CONST(0) || *fsec >= USECS_PER_SEC) DateTimeParseError(DTERR_FIELD_OVERFLOW, date_str, "timestamp"); + if (tmfc.tzsign) + { + char *tz; + + if (tmfc.tzh < 0 || tmfc.tzh > MAX_TZDISP_HOUR || + tmfc.tzm < 0 || tmfc.tzm >= MINS_PER_HOUR) + DateTimeParseError(DTERR_TZDISP_OVERFLOW, date_str, "timestamp"); + + tz = palloc(7); + + snprintf(tz, 7, "%c%02d:%02d", + tmfc.tzsign > 0 ? '+' : '-', tmfc.tzh, tmfc.tzm); + + tm->tm_zone = tz; + } + DEBUG_TM(tm); pfree(date_str); diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c index baf1178995..edc3348e81 100644 --- a/src/backend/utils/adt/json.c +++ b/src/backend/utils/adt/json.c @@ -13,6 +13,7 @@ */ #include "postgres.h" +#include "access/hash.h" #include "access/htup_details.h" #include "access/transam.h" #include "catalog/pg_type.h" @@ -66,6 +67,23 @@ typedef enum /* type categories for datum_to_json */ JSONTYPE_OTHER /* all else */ } JsonTypeCategory; +/* Context for key uniqueness check */ +typedef struct JsonUniqueCheckContext +{ + struct JsonKeyInfo + { + int offset; /* key offset: + * in result if positive, + * in skipped_keys if negative */ + int length; /* key length */ + } *keys; /* key info array */ + int nkeys; /* number of processed keys */ + int nallocated; /* number of allocated keys in array */ + StringInfo result; /* resulting json */ + StringInfoData skipped_keys; /* skipped keys with NULL values */ + MemoryContext mcxt; /* context for saving skipped keys */ +} JsonUniqueCheckContext; + typedef struct JsonAggState { StringInfo str; @@ -73,8 +91,23 @@ typedef struct JsonAggState Oid key_output_func; JsonTypeCategory val_category; Oid val_output_func; + JsonUniqueCheckContext unique_check; } JsonAggState; +/* Element of object stack for key uniqueness check */ +typedef struct JsonObjectFields +{ + struct JsonObjectFields *parent; + HTAB *fields; +} JsonObjectFields; + +/* State for key uniqueness check */ +typedef struct JsonUniqueState +{ + JsonLexContext *lex; + JsonObjectFields *stack; +} JsonUniqueState; + static inline void json_lex(JsonLexContext *lex); static inline void json_lex_string(JsonLexContext *lex); static inline void json_lex_number(JsonLexContext *lex, char *s, @@ -106,6 +139,9 @@ static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar); static text *catenate_stringinfo_string(StringInfo buffer, const char *addon); +static JsonIterator *JsonIteratorInitFromLex(JsonContainer *jc, + JsonLexContext *lex, JsonIterator *parent); + /* the null action object used for pure validation */ static JsonSemAction nullSemAction = { @@ -126,6 +162,22 @@ lex_peek(JsonLexContext *lex) return lex->token_type; } +static inline char * +lex_peek_value(JsonLexContext *lex) +{ + if (lex->token_type == JSON_TOKEN_STRING) + return lex->strval ? pstrdup(lex->strval->data) : NULL; + else + { + int len = (lex->token_terminator - lex->token_start); + char *tokstr = palloc(len + 1); + + memcpy(tokstr, lex->token_start, len); + tokstr[len] = '\0'; + return tokstr; + } +} + /* * lex_accept * @@ -141,22 +193,8 @@ lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme) if (lex->token_type == token) { if (lexeme != NULL) - { - if (lex->token_type == JSON_TOKEN_STRING) - { - if (lex->strval != NULL) - *lexeme = pstrdup(lex->strval->data); - } - else - { - int len = (lex->token_terminator - lex->token_start); - char *tokstr = palloc(len + 1); + *lexeme = lex_peek_value(lex); - memcpy(tokstr, lex->token_start, len); - tokstr[len] = '\0'; - *lexeme = tokstr; - } - } json_lex(lex); return true; } @@ -1874,8 +1912,8 @@ to_json(PG_FUNCTION_ARGS) * * aggregate input column as a json array value. */ -Datum -json_agg_transfn(PG_FUNCTION_ARGS) +static Datum +json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null) { MemoryContext aggcontext, oldcontext; @@ -1915,9 +1953,14 @@ json_agg_transfn(PG_FUNCTION_ARGS) else { state = (JsonAggState *) PG_GETARG_POINTER(0); - appendStringInfoString(state->str, ", "); } + if (absent_on_null && PG_ARGISNULL(1)) + PG_RETURN_POINTER(state); + + if (state->str->len > 1) + appendStringInfoString(state->str, ", "); + /* fast path for NULLs */ if (PG_ARGISNULL(1)) { @@ -1929,7 +1972,7 @@ json_agg_transfn(PG_FUNCTION_ARGS) val = PG_GETARG_DATUM(1); /* add some whitespace if structured type and not first item */ - if (!PG_ARGISNULL(0) && + if (!PG_ARGISNULL(0) && state->str->len > 1 && (state->val_category == JSONTYPE_ARRAY || state->val_category == JSONTYPE_COMPOSITE)) { @@ -1947,6 +1990,25 @@ json_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } + +/* + * json_agg aggregate function + */ +Datum +json_agg_transfn(PG_FUNCTION_ARGS) +{ + return json_agg_transfn_worker(fcinfo, false); +} + +/* + * json_agg_strict aggregate function + */ +Datum +json_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return json_agg_transfn_worker(fcinfo, true); +} + /* * json_agg final function */ @@ -1970,18 +2032,115 @@ json_agg_finalfn(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, "]")); } +static inline void +json_unique_check_init(JsonUniqueCheckContext *cxt, + StringInfo result, int nkeys) +{ + cxt->mcxt = CurrentMemoryContext; + cxt->nkeys = 0; + cxt->nallocated = nkeys ? nkeys : 16; + cxt->keys = palloc(sizeof(*cxt->keys) * cxt->nallocated); + cxt->result = result; + cxt->skipped_keys.data = NULL; +} + +static inline void +json_unique_check_free(JsonUniqueCheckContext *cxt) +{ + if (cxt->keys) + pfree(cxt->keys); + + if (cxt->skipped_keys.data) + pfree(cxt->skipped_keys.data); +} + +/* On-demand initialization of skipped_keys StringInfo structure */ +static inline StringInfo +json_unique_check_get_skipped_keys(JsonUniqueCheckContext *cxt) +{ + StringInfo out = &cxt->skipped_keys; + + if (!out->data) + { + MemoryContext oldcxt = MemoryContextSwitchTo(cxt->mcxt); + initStringInfo(out); + MemoryContextSwitchTo(oldcxt); + } + + return out; +} + +/* + * Save current key offset (key is not yet appended) to the key list, key + * length is saved later in json_unique_check_key() when the key is appended. + */ +static inline void +json_unique_check_save_key_offset(JsonUniqueCheckContext *cxt, StringInfo out) +{ + if (cxt->nkeys >= cxt->nallocated) + { + cxt->nallocated *= 2; + cxt->keys = repalloc(cxt->keys, sizeof(*cxt->keys) * cxt->nallocated); + } + + cxt->keys[cxt->nkeys++].offset = out->len; +} + +/* + * Check uniqueness of key already appended to 'out' StringInfo. + */ +static inline void +json_unique_check_key(JsonUniqueCheckContext *cxt, StringInfo out) +{ + struct JsonKeyInfo *keys = cxt->keys; + int curr = cxt->nkeys - 1; + int offset = keys[curr].offset; + int length = out->len - offset; + char *curr_key = &out->data[offset]; + int i; + + keys[curr].length = length; /* save current key length */ + + if (out == &cxt->skipped_keys) + /* invert offset for skipped keys for their recognition */ + keys[curr].offset = -keys[curr].offset; + + /* check collisions with previous keys */ + for (i = 0; i < curr; i++) + { + char *prev_key; + + if (cxt->keys[i].length != length) + continue; + + offset = cxt->keys[i].offset; + + prev_key = offset > 0 + ? &cxt->result->data[offset] + : &cxt->skipped_keys.data[-offset]; + + if (!memcmp(curr_key, prev_key, length)) + ereport(ERROR, + (errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE), + errmsg("duplicate JSON key %s", curr_key))); + } +} + /* * json_object_agg transition function. * * aggregate two input columns as a single json object value. */ -Datum -json_object_agg_transfn(PG_FUNCTION_ARGS) +static Datum +json_object_agg_transfn_worker(FunctionCallInfo fcinfo, + bool absent_on_null, bool unique_keys) { MemoryContext aggcontext, oldcontext; JsonAggState *state; + StringInfo out; Datum arg; + bool skip; if (!AggCheckCallContext(fcinfo, &aggcontext)) { @@ -2002,6 +2161,10 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) oldcontext = MemoryContextSwitchTo(aggcontext); state = (JsonAggState *) palloc(sizeof(JsonAggState)); state->str = makeStringInfo(); + if (unique_keys) + json_unique_check_init(&state->unique_check, state->str, 0); + else + memset(&state->unique_check, 0, sizeof(state->unique_check)); MemoryContextSwitchTo(oldcontext); arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); @@ -2029,7 +2192,6 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) else { state = (JsonAggState *) PG_GETARG_POINTER(0); - appendStringInfoString(state->str, ", "); } /* @@ -2045,11 +2207,41 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("field name must not be null"))); + /* Skip null values if absent_on_null */ + skip = absent_on_null && PG_ARGISNULL(2); + + if (skip) + { + /* If key uniqueness check is needed we must save skipped keys */ + if (!unique_keys) + PG_RETURN_POINTER(state); + + out = json_unique_check_get_skipped_keys(&state->unique_check); + } + else + { + out = state->str; + + if (out->len > 2) + appendStringInfoString(out, ", "); + } + arg = PG_GETARG_DATUM(1); - datum_to_json(arg, false, state->str, state->key_category, + if (unique_keys) + json_unique_check_save_key_offset(&state->unique_check, out); + + datum_to_json(arg, false, out, state->key_category, state->key_output_func, true); + if (unique_keys) + { + json_unique_check_key(&state->unique_check, out); + + if (skip) + PG_RETURN_POINTER(state); + } + appendStringInfoString(state->str, " : "); if (PG_ARGISNULL(2)) @@ -2063,6 +2255,26 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * json_object_agg aggregate function + */ +Datum +json_object_agg_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, false, false); +} + +/* + * json_objectagg aggregate function + */ +Datum +json_objectagg_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, + PG_GETARG_BOOL(3), + PG_GETARG_BOOL(4)); +} + /* * json_object_agg final function. */ @@ -2080,6 +2292,8 @@ json_object_agg_finalfn(PG_FUNCTION_ARGS) if (state == NULL) PG_RETURN_NULL(); + json_unique_check_free(&state->unique_check); + /* Else return state with appropriate object terminator added */ PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, " }")); } @@ -2104,11 +2318,9 @@ catenate_stringinfo_string(StringInfo buffer, const char *addon) return result; } -/* - * SQL function json_build_object(variadic "any") - */ -Datum -json_build_object(PG_FUNCTION_ARGS) +static Datum +json_build_object_worker(FunctionCallInfo fcinfo, int first_vararg, + bool absent_on_null, bool unique_keys) { int nargs = PG_NARGS(); int i; @@ -2117,9 +2329,11 @@ json_build_object(PG_FUNCTION_ARGS) Datum *args; bool *nulls; Oid *types; + JsonUniqueCheckContext unique_check; /* fetch argument values to build the object */ - nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); + nargs = extract_variadic_args(fcinfo, first_vararg, false, + &args, &types, &nulls); if (nargs < 0) PG_RETURN_NULL(); @@ -2134,19 +2348,53 @@ json_build_object(PG_FUNCTION_ARGS) appendStringInfoChar(result, '{'); + if (unique_keys) + json_unique_check_init(&unique_check, result, nargs / 2); + for (i = 0; i < nargs; i += 2) { - appendStringInfoString(result, sep); - sep = ", "; + StringInfo out; + bool skip; + + /* Skip null values if absent_on_null */ + skip = absent_on_null && nulls[i + 1]; + + if (skip) + { + /* If key uniqueness check is needed we must save skipped keys */ + if (!unique_keys) + continue; + + out = json_unique_check_get_skipped_keys(&unique_check); + } + else + { + appendStringInfoString(result, sep); + sep = ", "; + out = result; + } /* process key */ if (nulls[i]) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("argument %d cannot be null", i + 1), + errmsg("argument %d cannot be null", first_vararg + i + 1), errhint("Object keys should be text."))); - add_json(args[i], false, result, types[i], true); + if (unique_keys) + /* save key offset before key appending */ + json_unique_check_save_key_offset(&unique_check, out); + + add_json(args[i], false, out, types[i], true); + + if (unique_keys) + { + /* check key uniqueness after key appending */ + json_unique_check_key(&unique_check, out); + + if (skip) + continue; + } appendStringInfoString(result, " : "); @@ -2156,23 +2404,43 @@ json_build_object(PG_FUNCTION_ARGS) appendStringInfoChar(result, '}'); + if (unique_keys) + json_unique_check_free(&unique_check); + PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } /* - * degenerate case of json_build_object where it gets 0 arguments. + * SQL function json_build_object(variadic "any") */ Datum -json_build_object_noargs(PG_FUNCTION_ARGS) +json_build_object(PG_FUNCTION_ARGS) { - PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2)); + return json_build_object_worker(fcinfo, 0, false, false); } /* - * SQL function json_build_array(variadic "any") + * SQL function json_build_object_ext(absent_on_null bool, unique bool, variadic "any") */ Datum -json_build_array(PG_FUNCTION_ARGS) +json_build_object_ext(PG_FUNCTION_ARGS) +{ + return json_build_object_worker(fcinfo, 2, + PG_GETARG_BOOL(0), PG_GETARG_BOOL(1)); +} + +/* + * degenerate case of json_build_object where it gets 0 arguments. + */ +Datum +json_build_object_noargs(PG_FUNCTION_ARGS) +{ + PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2)); +} + +static Datum +json_build_array_worker(FunctionCallInfo fcinfo, int first_vararg, + bool absent_on_null) { int nargs; int i; @@ -2183,7 +2451,8 @@ json_build_array(PG_FUNCTION_ARGS) Oid *types; /* fetch argument values to build the array */ - nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); + nargs = extract_variadic_args(fcinfo, first_vararg, false, + &args, &types, &nulls); if (nargs < 0) PG_RETURN_NULL(); @@ -2194,6 +2463,9 @@ json_build_array(PG_FUNCTION_ARGS) for (i = 0; i < nargs; i++) { + if (absent_on_null && nulls[i]) + continue; + appendStringInfoString(result, sep); sep = ", "; add_json(args[i], nulls[i], result, types[i], false); @@ -2204,6 +2476,24 @@ json_build_array(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } +/* + * SQL function json_build_array(variadic "any") + */ +Datum +json_build_array(PG_FUNCTION_ARGS) +{ + return json_build_array_worker(fcinfo, 0, false); +} + +/* + * SQL function json_build_array_ext(absent_on_null bool, variadic "any") + */ +Datum +json_build_array_ext(PG_FUNCTION_ARGS) +{ + return json_build_array_worker(fcinfo, 1, PG_GETARG_BOOL(0)); +} + /* * degenerate case of json_build_array where it gets 0 arguments. */ @@ -2435,6 +2725,178 @@ escape_json(StringInfo buf, const char *str) appendStringInfoCharMacro(buf, '"'); } +/* Functions implementing hash table for key uniqueness check */ +static int +json_unique_hash_match(const void *key1, const void *key2, Size keysize) +{ + return strcmp(*(const char **) key1, *(const char **) key2); +} + +static void * +json_unique_hash_keycopy(void *dest, const void *src, Size keysize) +{ + *(const char **) dest = pstrdup(*(const char **) src); + + return dest; +} + +static uint32 +json_unique_hash(const void *key, Size keysize) +{ + const char *s = *(const char **) key; + + return DatumGetUInt32(hash_any((const unsigned char *) s, (int) strlen(s))); +} + +/* Semantic actions for key uniqueness check */ +static void +json_unique_object_start(void *_state) +{ + JsonUniqueState *state = _state; + JsonObjectFields *obj = palloc(sizeof(*obj)); + HASHCTL ctl; + + memset(&ctl, 0, sizeof(ctl)); + ctl.keysize = sizeof(char *); + ctl.entrysize = sizeof(char *); + ctl.hcxt = CurrentMemoryContext; + ctl.hash = json_unique_hash; + ctl.keycopy = json_unique_hash_keycopy; + ctl.match = json_unique_hash_match; + obj->fields = hash_create("json object hashtable", + 32, + &ctl, + HASH_ELEM | HASH_CONTEXT | + HASH_FUNCTION | HASH_COMPARE | HASH_KEYCOPY); + obj->parent = state->stack; /* push object to stack */ + + state->stack = obj; +} + +static void +json_unique_object_end(void *_state) +{ + JsonUniqueState *state = _state; + + hash_destroy(state->stack->fields); + + state->stack = state->stack->parent; /* pop object from stack */ +} + +static void +json_unique_object_field_start(void *_state, char *field, bool isnull) +{ + JsonUniqueState *state = _state; + bool found; + + /* find key collision in the current object */ + (void) hash_search(state->stack->fields, &field, HASH_ENTER, &found); + + if (found) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("duplicate JSON key \"%s\"", field), + report_json_context(state->lex))); +} + +/* + * json_is_valid -- check json text validity, its value type and key uniqueness + */ +Datum +json_is_valid(PG_FUNCTION_ARGS) +{ + text *json = PG_GETARG_TEXT_P(0); + text *type = PG_GETARG_TEXT_P(1); + bool unique = PG_GETARG_BOOL(2); + MemoryContext mcxt = CurrentMemoryContext; + + if (PG_ARGISNULL(0)) + PG_RETURN_NULL(); + + if (!PG_ARGISNULL(1) && + strncmp("any", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + JsonLexContext *lex; + JsonTokenType tok; + + lex = makeJsonLexContext(json, false); + + /* Lex exactly one token from the input and check its type. */ + PG_TRY(); + { + json_lex(lex); + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) == ERRCODE_DATA_EXCEPTION) + { + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + PG_RETURN_BOOL(false); /* invalid json */ + } + PG_RE_THROW(); + } + PG_END_TRY(); + + tok = lex_peek(lex); + + if (!strncmp("object", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + if (tok != JSON_TOKEN_OBJECT_START) + PG_RETURN_BOOL(false); /* json is not a object */ + } + else if (!strncmp("array", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + if (tok != JSON_TOKEN_ARRAY_START) + PG_RETURN_BOOL(false); /* json is not an array */ + } + else + { + if (tok == JSON_TOKEN_OBJECT_START || + tok == JSON_TOKEN_ARRAY_START) + PG_RETURN_BOOL(false); /* json is not a scalar */ + } + } + + /* do full parsing pass only for uniqueness check or JSON text validation */ + if (unique || + get_fn_expr_argtype(fcinfo->flinfo, 0) != JSONOID) + { + JsonLexContext *lex = makeJsonLexContext(json, unique); + JsonSemAction uniqueSemAction = {0}; + JsonUniqueState state; + + if (unique) + { + state.lex = lex; + state.stack = NULL; + + uniqueSemAction.semstate = &state; + uniqueSemAction.object_start = json_unique_object_start; + uniqueSemAction.object_field_start = json_unique_object_field_start; + uniqueSemAction.object_end = json_unique_object_end; + } + + PG_TRY(); + { + pg_parse_json(lex, unique ? &uniqueSemAction : &nullSemAction); + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) == ERRCODE_DATA_EXCEPTION) + { + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + PG_RETURN_BOOL(false); /* invalid json or key collision found */ + } + PG_RE_THROW(); + } + PG_END_TRY(); + } + + PG_RETURN_BOOL(true); /* ok */ +} + /* * SQL function json_typeof(json) -> text * @@ -2489,3 +2951,825 @@ json_typeof(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(cstring_to_text(type)); } + +static void +jsonInitContainer(JsonContainerData *jc, char *json, int len, int type, + int size) +{ + if (size < 0 || size > JB_CMASK) + size = JB_CMASK; /* unknown size */ + + jc->data = json; + jc->len = len; + jc->header = type | size; +} + +/* + * Initialize a JsonContainer from a text datum. + */ +static void +jsonInit(JsonContainerData *jc, Datum value) +{ + text *json = DatumGetTextP(value); + JsonLexContext *lex = makeJsonLexContext(json, false); + JsonTokenType tok; + int type; + int size = -1; + + /* Lex exactly one token from the input and check its type. */ + json_lex(lex); + tok = lex_peek(lex); + + switch (tok) + { + case JSON_TOKEN_OBJECT_START: + type = JB_FOBJECT; + lex_accept(lex, tok, NULL); + if (lex_peek(lex) == JSON_TOKEN_OBJECT_END) + size = 0; + break; + case JSON_TOKEN_ARRAY_START: + type = JB_FARRAY; + lex_accept(lex, tok, NULL); + if (lex_peek(lex) == JSON_TOKEN_ARRAY_END) + size = 0; + break; + case JSON_TOKEN_STRING: + case JSON_TOKEN_NUMBER: + case JSON_TOKEN_TRUE: + case JSON_TOKEN_FALSE: + case JSON_TOKEN_NULL: + type = JB_FARRAY | JB_FSCALAR; + size = 1; + break; + default: + elog(ERROR, "unexpected json token: %d", tok); + type = jbvNull; + break; + } + + pfree(lex); + + jsonInitContainer(jc, VARDATA(json), VARSIZE(json) - VARHDRSZ, type, size); +} + +/* + * Wrap JSON text into a palloc()'d Json structure. + */ +Json * +JsonCreate(text *json) +{ + Json *res = palloc0(sizeof(*res)); + + jsonInit((JsonContainerData *) &res->root, PointerGetDatum(json)); + + return res; +} + +static bool +jsonFillValue(JsonIterator **pit, JsonbValue *res, bool skipNested, + JsontIterState nextState) +{ + JsonIterator *it = *pit; + JsonLexContext *lex = it->lex; + JsonTokenType tok = lex_peek(lex); + + switch (tok) + { + case JSON_TOKEN_NULL: + res->type = jbvNull; + break; + + case JSON_TOKEN_TRUE: + res->type = jbvBool; + res->val.boolean = true; + break; + + case JSON_TOKEN_FALSE: + res->type = jbvBool; + res->val.boolean = false; + break; + + case JSON_TOKEN_STRING: + { + char *token = lex_peek_value(lex); + res->type = jbvString; + res->val.string.val = token; + res->val.string.len = strlen(token); + break; + } + + case JSON_TOKEN_NUMBER: + { + char *token = lex_peek_value(lex); + res->type = jbvNumeric; + res->val.numeric = DatumGetNumeric(DirectFunctionCall3( + numeric_in, CStringGetDatum(token), 0, -1)); + break; + } + + case JSON_TOKEN_OBJECT_START: + case JSON_TOKEN_ARRAY_START: + { + JsonContainerData *cont = palloc(sizeof(*cont)); + char *token_start = lex->token_start; + int len; + + if (skipNested) + { + /* find the end of a container for its length calculation */ + if (tok == JSON_TOKEN_OBJECT_START) + parse_object(lex, &nullSemAction); + else + parse_array(lex, &nullSemAction); + + len = lex->token_start - token_start; + } + else + len = lex->input_length - (lex->token_start - lex->input); + + jsonInitContainer(cont, + token_start, len, + tok == JSON_TOKEN_OBJECT_START ? + JB_FOBJECT : JB_FARRAY, + -1); + + res->type = jbvBinary; + res->val.binary.data = (JsonbContainer *) cont; + res->val.binary.len = len; + + if (skipNested) + return false; + + /* recurse into container */ + it->state = nextState; + *pit = JsonIteratorInitFromLex(cont, lex, *pit); + return true; + } + + default: + report_parse_error(JSON_PARSE_VALUE, lex); + } + + lex_accept(lex, tok, NULL); + + return false; +} + +static inline JsonIterator * +JsonIteratorFreeAndGetParent(JsonIterator *it) +{ + JsonIterator *parent = it->parent; + + pfree(it); + + return parent; +} + +/* + * Free a whole stack of JsonIterator iterators. + */ +void +JsonIteratorFree(JsonIterator *it) +{ + while (it) + it = JsonIteratorFreeAndGetParent(it); +} + +/* + * Get next JsonbValue while iterating through JsonContainer. + * + * For more details, see JsonbIteratorNext(). + */ +JsonbIteratorToken +JsonIteratorNext(JsonIterator **pit, JsonbValue *val, bool skipNested) +{ + JsonIterator *it; + + if (*pit == NULL) + return WJB_DONE; + +recurse: + it = *pit; + + /* parse by recursive descent */ + switch (it->state) + { + case JTI_ARRAY_START: + val->type = jbvArray; + val->val.array.nElems = it->isScalar ? 1 : -1; + val->val.array.rawScalar = it->isScalar; + val->val.array.elems = NULL; + it->state = it->isScalar ? JTI_ARRAY_ELEM_SCALAR : JTI_ARRAY_ELEM; + return WJB_BEGIN_ARRAY; + + case JTI_ARRAY_ELEM_SCALAR: + { + (void) jsonFillValue(pit, val, skipNested, JTI_ARRAY_END); + it->state = JTI_ARRAY_END; + return WJB_ELEM; + } + + case JTI_ARRAY_END: + if (!it->parent && lex_peek(it->lex) != JSON_TOKEN_END) + report_parse_error(JSON_PARSE_END, it->lex); + *pit = JsonIteratorFreeAndGetParent(*pit); + return WJB_END_ARRAY; + + case JTI_ARRAY_ELEM: + if (lex_accept(it->lex, JSON_TOKEN_ARRAY_END, NULL)) + { + it->state = JTI_ARRAY_END; + goto recurse; + } + + if (jsonFillValue(pit, val, skipNested, JTI_ARRAY_ELEM_AFTER)) + goto recurse; + + /* fall through */ + + case JTI_ARRAY_ELEM_AFTER: + if (!lex_accept(it->lex, JSON_TOKEN_COMMA, NULL)) + { + if (lex_peek(it->lex) != JSON_TOKEN_ARRAY_END) + report_parse_error(JSON_PARSE_ARRAY_NEXT, it->lex); + } + + if (it->state == JTI_ARRAY_ELEM_AFTER) + { + it->state = JTI_ARRAY_ELEM; + goto recurse; + } + + return WJB_ELEM; + + case JTI_OBJECT_START: + val->type = jbvObject; + val->val.object.nPairs = -1; + val->val.object.pairs = NULL; + val->val.object.uniquified = false; + it->state = JTI_OBJECT_KEY; + return WJB_BEGIN_OBJECT; + + case JTI_OBJECT_KEY: + if (lex_accept(it->lex, JSON_TOKEN_OBJECT_END, NULL)) + { + if (!it->parent && lex_peek(it->lex) != JSON_TOKEN_END) + report_parse_error(JSON_PARSE_END, it->lex); + *pit = JsonIteratorFreeAndGetParent(*pit); + return WJB_END_OBJECT; + } + + if (lex_peek(it->lex) != JSON_TOKEN_STRING) + report_parse_error(JSON_PARSE_OBJECT_START, it->lex); + + (void) jsonFillValue(pit, val, true, JTI_OBJECT_VALUE); + + if (!lex_accept(it->lex, JSON_TOKEN_COLON, NULL)) + report_parse_error(JSON_PARSE_OBJECT_LABEL, it->lex); + + it->state = JTI_OBJECT_VALUE; + return WJB_KEY; + + case JTI_OBJECT_VALUE: + if (jsonFillValue(pit, val, skipNested, JTI_OBJECT_VALUE_AFTER)) + goto recurse; + + /* fall through */ + + case JTI_OBJECT_VALUE_AFTER: + if (!lex_accept(it->lex, JSON_TOKEN_COMMA, NULL)) + { + if (lex_peek(it->lex) != JSON_TOKEN_OBJECT_END) + report_parse_error(JSON_PARSE_OBJECT_NEXT, it->lex); + } + + if (it->state == JTI_OBJECT_VALUE_AFTER) + { + it->state = JTI_OBJECT_KEY; + goto recurse; + } + + it->state = JTI_OBJECT_KEY; + return WJB_VALUE; + + default: + break; + } + + return WJB_DONE; +} + +static JsonIterator * +JsonIteratorInitFromLex(JsonContainer *jc, JsonLexContext *lex, + JsonIterator *parent) +{ + JsonIterator *it = palloc(sizeof(JsonIterator)); + JsonTokenType tok; + + it->container = jc; + it->parent = parent; + it->lex = lex; + + tok = lex_peek(it->lex); + + switch (tok) + { + case JSON_TOKEN_OBJECT_START: + it->isScalar = false; + it->state = JTI_OBJECT_START; + lex_accept(it->lex, tok, NULL); + break; + case JSON_TOKEN_ARRAY_START: + it->isScalar = false; + it->state = JTI_ARRAY_START; + lex_accept(it->lex, tok, NULL); + break; + case JSON_TOKEN_STRING: + case JSON_TOKEN_NUMBER: + case JSON_TOKEN_TRUE: + case JSON_TOKEN_FALSE: + case JSON_TOKEN_NULL: + it->isScalar = true; + it->state = JTI_ARRAY_START; + break; + default: + report_parse_error(JSON_PARSE_VALUE, it->lex); + } + + return it; +} + +/* + * Given a JsonContainer, expand to JsonIterator to iterate over items + * fully expanded to in-memory representation for manipulation. + * + * See JsonbIteratorNext() for notes on memory management. + */ +JsonIterator * +JsonIteratorInit(JsonContainer *jc) +{ + JsonLexContext *lex = makeJsonLexContextCstringLen(jc->data, jc->len, true); + json_lex(lex); + return JsonIteratorInitFromLex(jc, lex, NULL); +} + +/* + * Serialize a single JsonbValue into text buffer. + */ +static void +JsonEncodeJsonbValue(StringInfo buf, JsonbValue *jbv) +{ + check_stack_depth(); + + switch (jbv->type) + { + case jbvNull: + appendBinaryStringInfo(buf, "null", 4); + break; + + case jbvBool: + if (jbv->val.boolean) + appendBinaryStringInfo(buf, "true", 4); + else + appendBinaryStringInfo(buf, "false", 5); + break; + + case jbvNumeric: + appendStringInfoString(buf, DatumGetCString(DirectFunctionCall1( + numeric_out, NumericGetDatum(jbv->val.numeric)))); + break; + + case jbvString: + { + char *str = jbv->val.string.len < 0 ? jbv->val.string.val : + pnstrdup(jbv->val.string.val, jbv->val.string.len); + + escape_json(buf, str); + + if (jbv->val.string.len >= 0) + pfree(str); + + break; + } + + case jbvDatetime: + { + char *str; + PGFunction typoutput; + + switch (jbv->val.datetime.typid) + { + case DATEOID: + typoutput = date_out; + break; + case TIMEOID: + typoutput = time_out; + break; + case TIMETZOID: + typoutput = timetz_out; + break; + case TIMESTAMPOID: + typoutput = timestamp_out; + break; + case TIMESTAMPTZOID: + typoutput = timestamptz_out; + break; + default: + elog(ERROR, "unknown jsonb value datetime type oid %d", + jbv->val.datetime.typid); + } + + str = DatumGetCString( + DirectFunctionCall1(typoutput, jbv->val.datetime.value)); + + escape_json(buf, str); + pfree(str); + break; + } + + case jbvArray: + { + int i; + + if (!jbv->val.array.rawScalar) + appendStringInfoChar(buf, '['); + + for (i = 0; i < jbv->val.array.nElems; i++) + { + if (i > 0) + appendBinaryStringInfo(buf, ", ", 2); + + JsonEncodeJsonbValue(buf, &jbv->val.array.elems[i]); + } + + if (!jbv->val.array.rawScalar) + appendStringInfoChar(buf, ']'); + + break; + } + + case jbvObject: + { + int i; + + appendStringInfoChar(buf, '{'); + + for (i = 0; i < jbv->val.object.nPairs; i++) + { + if (i > 0) + appendBinaryStringInfo(buf, ", ", 2); + + JsonEncodeJsonbValue(buf, &jbv->val.object.pairs[i].key); + appendBinaryStringInfo(buf, ": ", 2); + JsonEncodeJsonbValue(buf, &jbv->val.object.pairs[i].value); + } + + appendStringInfoChar(buf, '}'); + break; + } + + case jbvBinary: + { + JsonContainer *json = (JsonContainer *) jbv->val.binary.data; + + appendBinaryStringInfo(buf, json->data, json->len); + break; + } + + default: + elog(ERROR, "unknown jsonb value type: %d", jbv->type); + break; + } +} + +/* + * Turn an in-memory JsonbValue into a json for on-disk storage. + */ +Json * +JsonbValueToJson(JsonbValue *jbv) +{ + StringInfoData buf; + Json *json = palloc0(sizeof(*json)); + int type; + int size; + + if (jbv->type == jbvBinary) + { + /* simply copy the whole container and its data */ + JsonContainer *src = (JsonContainer *) jbv->val.binary.data; + JsonContainerData *dst = (JsonContainerData *) &json->root; + + *dst = *src; + dst->data = memcpy(palloc(src->len), src->data, src->len); + + return json; + } + + initStringInfo(&buf); + + JsonEncodeJsonbValue(&buf, jbv); + + switch (jbv->type) + { + case jbvArray: + type = JB_FARRAY; + size = jbv->val.array.nElems; + break; + + case jbvObject: + type = JB_FOBJECT; + size = jbv->val.object.nPairs; + break; + + default: /* scalar */ + type = JB_FARRAY | JB_FSCALAR; + size = 1; + break; + } + + jsonInitContainer((JsonContainerData *) &json->root, + buf.data, buf.len, type, size); + + return json; +} + +/* Context and semantic actions for JsonGetArraySize() */ +typedef struct JsonGetArraySizeState +{ + int level; + uint32 size; +} JsonGetArraySizeState; + +static void +JsonGetArraySize_array_start(void *state) +{ + ((JsonGetArraySizeState *) state)->level++; +} + +static void +JsonGetArraySize_array_end(void *state) +{ + ((JsonGetArraySizeState *) state)->level--; +} + +static void +JsonGetArraySize_array_element_start(void *state, bool isnull) +{ + JsonGetArraySizeState *s = state; + if (s->level == 1) + s->size++; +} + +/* + * Calculate the size of a json array by iterating through its elements. + */ +uint32 +JsonGetArraySize(JsonContainer *jc) +{ + JsonLexContext *lex = makeJsonLexContextCstringLen(jc->data, jc->len, false); + JsonSemAction sem; + JsonGetArraySizeState state; + + state.level = 0; + state.size = 0; + + memset(&sem, 0, sizeof(sem)); + sem.semstate = &state; + sem.array_start = JsonGetArraySize_array_start; + sem.array_end = JsonGetArraySize_array_end; + sem.array_element_end = JsonGetArraySize_array_element_start; + + json_lex(lex); + parse_array(lex, &sem); + + return state.size; +} + +/* + * Find last key in a json object by name. Returns palloc()'d copy of the + * corresponding value, or NULL if is not found. + */ +static inline JsonbValue * +jsonFindLastKeyInObject(JsonContainer *obj, const JsonbValue *key) +{ + JsonbValue *res = NULL; + JsonbValue jbv; + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsObject(obj)); + Assert(key->type == jbvString); + + it = JsonIteratorInit(obj); + + while ((tok = JsonIteratorNext(&it, &jbv, true)) != WJB_DONE) + { + if (tok == WJB_KEY && !lengthCompareJsonbStringValue(key, &jbv)) + { + if (!res) + res = palloc(sizeof(*res)); + + tok = JsonIteratorNext(&it, res, true); + Assert(tok == WJB_VALUE); + } + } + + return res; +} + +/* + * Find scalar element in a array. Returns palloc()'d copy of value or NULL. + */ +static JsonbValue * +jsonFindValueInArray(JsonContainer *array, const JsonbValue *elem) +{ + JsonbValue *val = palloc(sizeof(*val)); + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsArray(array)); + Assert(IsAJsonbScalar(elem)); + + it = JsonIteratorInit(array); + + while ((tok = JsonIteratorNext(&it, val, true)) != WJB_DONE) + { + if (tok == WJB_ELEM && val->type == elem->type && + equalsJsonbScalarValue(val, (JsonbValue *) elem)) + { + JsonIteratorFree(it); + return val; + } + } + + pfree(val); + return NULL; +} + +/* + * Find value in object (i.e. the "value" part of some key/value pair in an + * object), or find a matching element if we're looking through an array. + * The "flags" argument allows the caller to specify which container types are + * of interest. If we cannot find the value, return NULL. Otherwise, return + * palloc()'d copy of value. + * + * For more details, see findJsonbValueFromContainer(). + */ +JsonbValue * +findJsonValueFromContainer(JsonContainer *jc, uint32 flags, JsonbValue *key) +{ + Assert((flags & ~(JB_FARRAY | JB_FOBJECT)) == 0); + + if (!JsonContainerSize(jc)) + return NULL; + + if ((flags & JB_FARRAY) && JsonContainerIsArray(jc)) + return jsonFindValueInArray(jc, key); + + if ((flags & JB_FOBJECT) && JsonContainerIsObject(jc)) + return jsonFindLastKeyInObject(jc, key); + + /* Not found */ + return NULL; +} + +/* + * Get i-th element of a json array. + * + * Returns palloc()'d copy of the value, or NULL if it does not exist. + */ +JsonbValue * +getIthJsonValueFromContainer(JsonContainer *array, uint32 index) +{ + JsonbValue *val = palloc(sizeof(JsonbValue)); + JsonIterator *it; + JsonbIteratorToken tok; + + Assert(JsonContainerIsArray(array)); + + it = JsonIteratorInit(array); + + while ((tok = JsonIteratorNext(&it, val, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + { + if (index-- == 0) + { + JsonIteratorFree(it); + return val; + } + } + } + + pfree(val); + + return NULL; +} + +/* + * Push json JsonbValue into JsonbParseState. + * + * Used for converting an in-memory JsonbValue to a json. For more details, + * see pushJsonbValue(). This function differs from pushJsonbValue() only by + * resetting "uniquified" flag in objects. + */ +JsonbValue * +pushJsonValue(JsonbParseState **pstate, JsonbIteratorToken seq, + JsonbValue *jbval) +{ + JsonIterator *it; + JsonbValue *res = NULL; + JsonbValue v; + JsonbIteratorToken tok; + + if (!jbval || (seq != WJB_ELEM && seq != WJB_VALUE) || + jbval->type != jbvBinary) + { + /* drop through */ + res = pushJsonbValueScalar(pstate, seq, jbval); + + /* reset "uniquified" flag of objects */ + if (seq == WJB_BEGIN_OBJECT) + (*pstate)->contVal.val.object.uniquified = false; + + return res; + } + + /* unpack the binary and add each piece to the pstate */ + it = JsonIteratorInit((JsonContainer *) jbval->val.binary.data); + while ((tok = JsonIteratorNext(&it, &v, false)) != WJB_DONE) + { + res = pushJsonbValueScalar(pstate, tok, + tok < WJB_BEGIN_ARRAY ? &v : NULL); + + /* reset "uniquified" flag of objects */ + if (tok == WJB_BEGIN_OBJECT) + (*pstate)->contVal.val.object.uniquified = false; + } + + return res; +} + +JsonbValue * +JsonExtractScalar(JsonContainer *jbc, JsonbValue *res) +{ + JsonIterator *it = JsonIteratorInit(jbc); + JsonbIteratorToken tok PG_USED_FOR_ASSERTS_ONLY; + JsonbValue tmp; + + tok = JsonIteratorNext(&it, &tmp, true); + Assert(tok == WJB_BEGIN_ARRAY); + Assert(tmp.val.array.nElems == 1 && tmp.val.array.rawScalar); + + tok = JsonIteratorNext(&it, res, true); + Assert(tok == WJB_ELEM); + Assert(IsAJsonbScalar(res)); + + tok = JsonIteratorNext(&it, &tmp, true); + Assert(tok == WJB_END_ARRAY); + + return res; +} + +/* + * Turn a Json into its C-string representation with stripping quotes from + * scalar strings. + */ +char * +JsonUnquote(Json *jb) +{ + if (JsonContainerIsScalar(&jb->root)) + { + JsonbValue v; + + JsonExtractScalar(&jb->root, &v); + + if (v.type == jbvString) + return pnstrdup(v.val.string.val, v.val.string.len); + } + + return pnstrdup(jb->root.data, jb->root.len); +} + +/* + * Turn a JsonContainer into its C-string representation. + */ +char * +JsonToCString(StringInfo out, JsonContainer *jc, int estimated_len) +{ + if (out) + { + appendBinaryStringInfo(out, jc->data, jc->len); + return out->data; + } + else + { + char *str = palloc(jc->len + 1); + + memcpy(str, jc->data, jc->len); + str[jc->len] = 0; + + return str; + } +} diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c index 4b2a541128..74c880931a 100644 --- a/src/backend/utils/adt/jsonb.c +++ b/src/backend/utils/adt/jsonb.c @@ -52,6 +52,16 @@ typedef enum /* type categories for datum_to_jsonb */ JSONBTYPE_OTHER /* all else */ } JsonbTypeCategory; +/* Context for key uniqueness check */ +typedef struct JsonbUniqueCheckContext +{ + JsonbValue *obj; /* object containing skipped keys also */ + int *skipped_keys; /* array of skipped key-value pair indices */ + int skipped_keys_allocated; + int skipped_keys_count; + MemoryContext mcxt; /* context for saving skipped keys */ +} JsonbUniqueCheckContext; + typedef struct JsonbAggState { JsonbInState *res; @@ -59,6 +69,7 @@ typedef struct JsonbAggState Oid key_output_func; JsonbTypeCategory val_category; Oid val_output_func; + JsonbUniqueCheckContext unique_check; } JsonbAggState; static inline Datum jsonb_from_cstring(char *json, int len); @@ -1166,11 +1177,121 @@ to_jsonb(PG_FUNCTION_ARGS) PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } +static inline void +jsonb_unique_check_init(JsonbUniqueCheckContext *cxt, JsonbValue *obj, + MemoryContext mcxt) +{ + cxt->mcxt = mcxt; + cxt->obj = obj; + cxt->skipped_keys = NULL; + cxt->skipped_keys_count = 0; + cxt->skipped_keys_allocated = 0; +} + /* - * SQL function jsonb_build_object(variadic "any") + * Save the index of the skipped key-value pair that has just been appended + * to the object. */ -Datum -jsonb_build_object(PG_FUNCTION_ARGS) +static inline void +jsonb_unique_check_add_skipped(JsonbUniqueCheckContext *cxt) +{ + /* + * Make a room for the skipped index plus one additional index + * (see jsonb_unique_check_remove_skipped_keys()). + */ + if (cxt->skipped_keys_count + 1 >= cxt->skipped_keys_allocated) + { + if (cxt->skipped_keys_allocated) + { + cxt->skipped_keys_allocated *= 2; + cxt->skipped_keys = repalloc(cxt->skipped_keys, + sizeof(*cxt->skipped_keys) * + cxt->skipped_keys_allocated); + } + else + { + cxt->skipped_keys_allocated = 16; + cxt->skipped_keys = MemoryContextAlloc(cxt->mcxt, + sizeof(*cxt->skipped_keys) * + cxt->skipped_keys_allocated); + } + } + + cxt->skipped_keys[cxt->skipped_keys_count++] = cxt->obj->val.object.nPairs; +} + +/* + * Check uniqueness of the key that has just been appended to the object. + */ +static inline void +jsonb_unique_check_key(JsonbUniqueCheckContext *cxt, bool skip) +{ + JsonbPair *pair = cxt->obj->val.object.pairs; + /* nPairs is incremented only after the value is appended */ + JsonbPair *last = &pair[cxt->obj->val.object.nPairs]; + + for (; pair < last; pair++) + if (pair->key.val.string.len == + last->key.val.string.len && + !memcmp(pair->key.val.string.val, + last->key.val.string.val, + last->key.val.string.len)) + ereport(ERROR, + (errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE), + errmsg("duplicate JSON key \"%*s\"", + last->key.val.string.len, + last->key.val.string.val))); + + if (skip) + { + /* save skipped key index */ + jsonb_unique_check_add_skipped(cxt); + + /* add dummy null value for the skipped key */ + last->value.type = jbvNull; + cxt->obj->val.object.nPairs++; + } +} + +/* + * Remove skipped key-value pairs from the resulting object. + */ +static void +jsonb_unique_check_remove_skipped_keys(JsonbUniqueCheckContext *cxt) +{ + int *skipped_keys = cxt->skipped_keys; + int skipped_keys_count = cxt->skipped_keys_count; + + if (!skipped_keys_count) + return; + + if (cxt->obj->val.object.nPairs > skipped_keys_count) + { + /* remove skipped key-value pairs */ + JsonbPair *pairs = cxt->obj->val.object.pairs; + int i; + + /* save total pair count into the last element of skipped_keys */ + Assert(cxt->skipped_keys_count < cxt->skipped_keys_allocated); + cxt->skipped_keys[cxt->skipped_keys_count] = cxt->obj->val.object.nPairs; + + for (i = 0; i < skipped_keys_count; i++) + { + int skipped_key = skipped_keys[i]; + int nkeys = skipped_keys[i + 1] - skipped_key - 1; + + memmove(&pairs[skipped_key - i], + &pairs[skipped_key + 1], + sizeof(JsonbPair) * nkeys); + } + } + + cxt->obj->val.object.nPairs -= skipped_keys_count; +} + +static Datum +jsonb_build_object_worker(FunctionCallInfo fcinfo, int first_vararg, + bool absent_on_null, bool unique_keys) { int nargs; int i; @@ -1178,9 +1299,11 @@ jsonb_build_object(PG_FUNCTION_ARGS) Datum *args; bool *nulls; Oid *types; + JsonbUniqueCheckContext unique_check; /* build argument values to build the object */ - nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls); + nargs = extract_variadic_args(fcinfo, first_vararg, true, + &args, &types, &nulls); if (nargs < 0) PG_RETURN_NULL(); @@ -1195,25 +1318,68 @@ jsonb_build_object(PG_FUNCTION_ARGS) result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL); + /* if (unique_keys) */ + jsonb_unique_check_init(&unique_check, result.res, CurrentMemoryContext); + for (i = 0; i < nargs; i += 2) { /* process key */ + bool skip; + if (nulls[i]) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("argument %d: key must not be null", i + 1))); + errmsg("argument %d: key must not be null", + first_vararg + i + 1))); + + /* skip null values if absent_on_null */ + skip = absent_on_null && nulls[i + 1]; + + /* we need to save skipped keys for the key uniqueness check */ + if (skip && !unique_keys) + continue; add_jsonb(args[i], false, &result, types[i], true); + if (unique_keys) + { + jsonb_unique_check_key(&unique_check, skip); + + if (skip) + continue; /* do not process the value if the key is skipped */ + } + /* process value */ add_jsonb(args[i + 1], nulls[i + 1], &result, types[i + 1], false); } + if (unique_keys && absent_on_null) + jsonb_unique_check_remove_skipped_keys(&unique_check); + result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL); PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } +/* + * SQL function jsonb_build_object(variadic "any") + */ +Datum +jsonb_build_object(PG_FUNCTION_ARGS) +{ + return jsonb_build_object_worker(fcinfo, 0, false, false); +} + +/* + * SQL function jsonb_build_object_ext(absent_on_null bool, unique bool, variadic "any") + */ +Datum +jsonb_build_object_ext(PG_FUNCTION_ARGS) +{ + return jsonb_build_object_worker(fcinfo, 2, + PG_GETARG_BOOL(0), PG_GETARG_BOOL(1)); +} + /* * degenerate case of jsonb_build_object where it gets 0 arguments. */ @@ -1230,11 +1396,9 @@ jsonb_build_object_noargs(PG_FUNCTION_ARGS) PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } -/* - * SQL function jsonb_build_array(variadic "any") - */ -Datum -jsonb_build_array(PG_FUNCTION_ARGS) +static Datum +jsonb_build_array_worker(FunctionCallInfo fcinfo, int first_vararg, + bool absent_on_null) { int nargs; int i; @@ -1244,7 +1408,8 @@ jsonb_build_array(PG_FUNCTION_ARGS) Oid *types; /* build argument values to build the array */ - nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls); + nargs = extract_variadic_args(fcinfo, first_vararg, true, + &args, &types, &nulls); if (nargs < 0) PG_RETURN_NULL(); @@ -1254,13 +1419,36 @@ jsonb_build_array(PG_FUNCTION_ARGS) result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL); for (i = 0; i < nargs; i++) + { + if (absent_on_null && nulls[i]) + continue; + add_jsonb(args[i], nulls[i], &result, types[i], false); + } result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL); PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } +/* + * SQL function jsonb_build_array(variadic "any") + */ +Datum +jsonb_build_array(PG_FUNCTION_ARGS) +{ + return jsonb_build_array_worker(fcinfo, 0, false); +} + +/* + * SQL function jsonb_build_array_ext(absent_on_null bool, variadic "any") + */ +Datum +jsonb_build_array_ext(PG_FUNCTION_ARGS) +{ + return jsonb_build_array_worker(fcinfo, 1, PG_GETARG_BOOL(0)); +} + /* * degenerate case of jsonb_build_array where it gets 0 arguments. */ @@ -1512,12 +1700,8 @@ clone_parse_state(JsonbParseState *state) return result; } - -/* - * jsonb_agg aggregate function - */ -Datum -jsonb_agg_transfn(PG_FUNCTION_ARGS) +static Datum +jsonb_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null) { MemoryContext oldcontext, aggcontext; @@ -1565,6 +1749,9 @@ jsonb_agg_transfn(PG_FUNCTION_ARGS) result = state->res; } + if (absent_on_null && PG_ARGISNULL(1)) + PG_RETURN_POINTER(state); + /* turn the argument into jsonb in the normal function context */ val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1); @@ -1634,6 +1821,24 @@ jsonb_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * jsonb_agg aggregate function + */ +Datum +jsonb_agg_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_agg_transfn_worker(fcinfo, false); +} + +/* + * jsonb_agg_strict aggregate function + */ +Datum +jsonb_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_agg_transfn_worker(fcinfo, true); +} + Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS) { @@ -1666,11 +1871,9 @@ jsonb_agg_finalfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(out); } -/* - * jsonb_object_agg aggregate function - */ -Datum -jsonb_object_agg_transfn(PG_FUNCTION_ARGS) +static Datum +jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo, + bool absent_on_null, bool unique_keys) { MemoryContext oldcontext, aggcontext; @@ -1684,6 +1887,7 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) *jbval; JsonbValue v; JsonbIteratorToken type; + bool skip; if (!AggCheckCallContext(fcinfo, &aggcontext)) { @@ -1703,6 +1907,11 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) state->res = result; result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL); + if (unique_keys) + jsonb_unique_check_init(&state->unique_check, result->res, + aggcontext); + else + memset(&state->unique_check, 0, sizeof(state->unique_check)); MemoryContextSwitchTo(oldcontext); arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); @@ -1738,6 +1947,15 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("field name must not be null"))); + /* + * Skip null values if absent_on_null unless key uniqueness check is + * needed (because we must save keys in this case). + */ + skip = absent_on_null && PG_ARGISNULL(2); + + if (skip && !unique_keys) + PG_RETURN_POINTER(state); + val = PG_GETARG_DATUM(1); memset(&elem, 0, sizeof(JsonbInState)); @@ -1793,6 +2011,18 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) } result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v); + + if (unique_keys) + { + jsonb_unique_check_key(&state->unique_check, skip); + + if (skip) + { + MemoryContextSwitchTo(oldcontext); + PG_RETURN_POINTER(state); + } + } + break; case WJB_END_ARRAY: break; @@ -1865,6 +2095,26 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * jsonb_object_agg aggregate function + */ +Datum +jsonb_object_agg_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, false, false); +} + +/* + * jsonb_objectagg aggregate function + */ +Datum +jsonb_objectagg_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, + PG_GETARG_BOOL(3), + PG_GETARG_BOOL(4)); +} + Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS) { @@ -1897,3 +2147,115 @@ jsonb_object_agg_finalfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(out); } + +/* + * jsonb_is_valid -- check jsonb value type + */ +Datum +jsonb_is_valid(PG_FUNCTION_ARGS) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + text *type = PG_GETARG_TEXT_P(1); + + if (PG_ARGISNULL(0)) + PG_RETURN_NULL(); + + if (!PG_ARGISNULL(1) && + strncmp("any", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + if (!strncmp("object", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + if (!JB_ROOT_IS_OBJECT(jb)) + PG_RETURN_BOOL(false); + } + else if (!strncmp("array", VARDATA(type), VARSIZE_ANY_EXHDR(type))) + { + if (!JB_ROOT_IS_ARRAY(jb) || JB_ROOT_IS_SCALAR(jb)) + PG_RETURN_BOOL(false); + } + else + { + if (!JB_ROOT_IS_ARRAY(jb) || !JB_ROOT_IS_SCALAR(jb)) + PG_RETURN_BOOL(false); + } + } + + PG_RETURN_BOOL(true); +} + +JsonbValue * +JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res) +{ + JsonbIterator *it = JsonbIteratorInit(jbc); + JsonbIteratorToken tok PG_USED_FOR_ASSERTS_ONLY; + JsonbValue tmp; + + tok = JsonbIteratorNext(&it, &tmp, true); + Assert(tok == WJB_BEGIN_ARRAY); + Assert(tmp.val.array.nElems == 1 && tmp.val.array.rawScalar); + + tok = JsonbIteratorNext(&it, res, true); + Assert (tok == WJB_ELEM); + Assert(IsAJsonbScalar(res)); + + tok = JsonbIteratorNext(&it, &tmp, true); + Assert (tok == WJB_END_ARRAY); + + return res; +} + +Jsonb * +JsonbMakeEmptyArray(void) +{ + JsonbValue jbv; + + jbv.type = jbvArray; + jbv.val.array.elems = NULL; + jbv.val.array.nElems = 0; + jbv.val.array.rawScalar = false; + + return JsonbValueToJsonb(&jbv); +} + +Jsonb * +JsonbMakeEmptyObject(void) +{ + JsonbValue jbv; + + jbv.type = jbvObject; + jbv.val.object.pairs = NULL; + jbv.val.object.nPairs = 0; + + return JsonbValueToJsonb(&jbv); +} + +/* + * Convert jsonb to a C-string stripping quotes from scalar strings. + */ +char * +JsonbUnquote(Jsonb *jb) +{ + if (JB_ROOT_IS_SCALAR(jb)) + { + JsonbValue v; + + JsonbExtractScalar(&jb->root, &v); + + if (v.type == jbvString) + return pnstrdup(v.val.string.val, v.val.string.len); + else if (v.type == jbvBool) + return pstrdup(v.val.boolean ? "true" : "false"); + else if (v.type == jbvNumeric) + return DatumGetCString(DirectFunctionCall1(numeric_out, + PointerGetDatum(v.val.numeric))); + else if (v.type == jbvNull) + return pstrdup("null"); + else + { + elog(ERROR, "unrecognized jsonb value type %d", v.type); + return NULL; + } + } + else + return JsonbToCString(NULL, &jb->root, VARSIZE(jb)); +} diff --git a/src/backend/utils/adt/jsonb_gin.c b/src/backend/utils/adt/jsonb_gin.c index 4e1ba10e9c..5484451bbe 100644 --- a/src/backend/utils/adt/jsonb_gin.c +++ b/src/backend/utils/adt/jsonb_gin.c @@ -13,6 +13,7 @@ */ #include "postgres.h" +#include "miscadmin.h" #include "access/gin.h" #include "access/hash.h" #include "access/stratnum.h" @@ -20,6 +21,7 @@ #include "catalog/pg_type.h" #include "utils/builtins.h" #include "utils/jsonb.h" +#include "utils/jsonpath.h" #include "utils/varlena.h" typedef struct PathHashStack @@ -28,9 +30,42 @@ typedef struct PathHashStack struct PathHashStack *parent; } PathHashStack; +typedef enum { eOr, eAnd, eEntry } JsonPathNodeType; + +typedef struct JsonPathNode +{ + JsonPathNodeType type; + union + { + int nargs; + int entry; + } val; + struct JsonPathNode *args[FLEXIBLE_ARRAY_MEMBER]; +} JsonPathNode; + +typedef struct JsonPathExtractionContext +{ + Datum *entries; + int32 nentries; + int32 nallocated; + void *(*addKey)(void *path, char *key, int len); + bool pathOps; + bool lax; +} JsonPathExtractionContext; + +typedef struct JsonPathContext +{ + void *path; + JsonPathItemType last; +} JsonPathContext; + static Datum make_text_key(char flag, const char *str, int len); static Datum make_scalar_key(const JsonbValue *scalarVal, bool is_key); +static JsonPathNode *gin_extract_jsonpath_expr_recursive( + JsonPathExtractionContext *cxt, JsonPathItem *jsp, bool not, + JsonPathContext path); + /* * * jsonb_ops GIN opclass support functions @@ -119,6 +154,413 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) PG_RETURN_POINTER(entries); } +static bool +gin_extract_jsonpath_path(JsonPathExtractionContext *cxt, JsonPathItem *jsp, + JsonPathContext *pathcxt, List **filters) +{ + JsonPathItem next; + + for (;;) + { + if (jsp->type != jpiFilter && jsp->type != jpiCurrent) + pathcxt->last = jsp->type; + + switch (jsp->type) + { + case jpiRoot: + pathcxt->path = NULL; + break; + + case jpiCurrent: + break; + + case jpiKey: + pathcxt->path = cxt->addKey(pathcxt->path, + jsp->content.value.data, + jsp->content.value.datalen); + break; + + case jpiIndexArray: + case jpiAnyArray: + break; + + case jpiAny: + case jpiAnyKey: + if (cxt->pathOps) + /* jsonb_path_ops doesn't support wildcard paths */ + return false; + break; + + case jpiFilter: + { + JsonPathItem arg; + JsonPathNode *filter; + + jspGetArg(jsp, &arg); + + filter = gin_extract_jsonpath_expr_recursive(cxt, &arg, false, *pathcxt); + + if (filter) + *filters = lappend(*filters, filter); + + break; + } + + default: + return false; + } + + if (!jspGetNext(jsp, &next)) + break; + + jsp = &next; + } + + return true; +} + +static inline JsonPathNode * +gin_jsonpath_make_entry_node(JsonPathExtractionContext *cxt, Datum entry) +{ + JsonPathNode *node = palloc(offsetof(JsonPathNode, args)); + + if (cxt->nentries >= cxt->nallocated) + { + if (cxt->entries) + { + cxt->nallocated *= 2; + cxt->entries = repalloc(cxt->entries, + sizeof(cxt->entries[0]) * cxt->nallocated); + } + else + { + cxt->nallocated = 8; + cxt->entries = palloc(sizeof(cxt->entries[0]) * cxt->nallocated); + } + } + + node->type = eEntry; + node->val.entry = cxt->nentries; + + cxt->entries[cxt->nentries++] = entry; + + return node; +} + +static inline JsonPathNode * +gin_jsonpath_make_expr_node(JsonPathNodeType type, int nargs) +{ + JsonPathNode *node = palloc(offsetof(JsonPathNode, args) + + sizeof(node->args[0]) * nargs); + + node->type = type; + node->val.nargs = nargs; + + return node; +} + +static inline JsonPathNode * +gin_jsonpath_make_expr_node_from_list(JsonPathNodeType type, List *args) +{ + JsonPathNode *node = gin_jsonpath_make_expr_node(type, list_length(args)); + ListCell *lc; + int i = 0; + + foreach(lc, args) + node->args[i++] = lfirst(lc); + + return node; +} + +static JsonPathNode * +gin_extract_jsonpath_node(JsonPathExtractionContext *cxt, JsonPathItem *jsp, + JsonPathContext pathcxt, JsonbValue *scalar) +{ + JsonPathNode *node; + List *filters = NIL; + ListCell *lc; + + if (!gin_extract_jsonpath_path(cxt, jsp, &pathcxt, &filters)) + return NULL; + + if (cxt->pathOps) + { + if (scalar) + { + uint32 hash = (uint32)(uintptr_t) pathcxt.path; + + JsonbHashScalarValue(scalar, &hash); + node = gin_jsonpath_make_entry_node(cxt, UInt32GetDatum(hash)); + } + else + node = NULL; /* jsonb_path_ops doesn't support EXISTS queries */ + } + else + { + List *entries = pathcxt.path; + List *nodes = NIL; + + if (scalar) + { + bool lastIsArrayAccessor = + pathcxt.last == jpiIndexArray || + pathcxt.last == jpiAnyArray ? GIN_TRUE : + pathcxt.last == jpiAnyArray ? GIN_MAYBE : GIN_FALSE; + + if (lastIsArrayAccessor == GIN_MAYBE || + (lastIsArrayAccessor == GIN_TRUE && cxt->lax)) + { + node = gin_jsonpath_make_expr_node(eOr, 2); + node->args[0] = gin_jsonpath_make_entry_node(cxt, + make_scalar_key(scalar, true)); + node->args[1] = gin_jsonpath_make_entry_node(cxt, + make_scalar_key(scalar, false)); + } + else + { + Datum entry = make_scalar_key(scalar, lastIsArrayAccessor); + + node = gin_jsonpath_make_entry_node(cxt, entry); + } + + nodes = lappend(nodes, node); + } + + foreach(lc, entries) + nodes = lappend(nodes, gin_jsonpath_make_entry_node(cxt, + PointerGetDatum(lfirst(lc)))); + + if (list_length(nodes) > 0) + node = gin_jsonpath_make_expr_node_from_list(eAnd, nodes); + else + node = NULL; /* need full scan for EXISTS($) queries */ + } + + if (list_length(filters) <= 0) + return node; + + if (node) + filters = lcons(node, filters); + + return gin_jsonpath_make_expr_node_from_list(eAnd, filters); +} + +static JsonPathNode * +gin_extract_jsonpath_expr_recursive(JsonPathExtractionContext *cxt, + JsonPathItem *jsp, bool not, + JsonPathContext path) +{ + check_stack_depth(); + + switch (jsp->type) + { + case jpiAnd: + case jpiOr: + { + JsonPathItem arg; + JsonPathNode *larg; + JsonPathNode *rarg; + JsonPathNode *node; + JsonPathNodeType type; + + jspGetLeftArg(jsp, &arg); + larg = gin_extract_jsonpath_expr_recursive(cxt, &arg, not, path); + + jspGetRightArg(jsp, &arg); + rarg = gin_extract_jsonpath_expr_recursive(cxt, &arg, not, path); + + if (!larg || !rarg) + { + if (jsp->type == jpiOr) + return NULL; + return larg ? larg : rarg; + } + + type = not ^ (jsp->type == jpiAnd) ? eAnd : eOr; + node = gin_jsonpath_make_expr_node(type, 2); + node->args[0] = larg; + node->args[1] = rarg; + + return node; + } + + case jpiNot: + { + JsonPathItem arg; + + jspGetArg(jsp, &arg); + + return gin_extract_jsonpath_expr_recursive(cxt, &arg, !not, path); + } + + case jpiExists: + { + JsonPathItem arg; + + if (not) + return false; + + jspGetArg(jsp, &arg); + + return gin_extract_jsonpath_node(cxt, &arg, path, NULL); + } + + case jpiEqual: + { + JsonPathItem leftItem; + JsonPathItem rightItem; + JsonPathItem *pathItem; + JsonPathItem *scalarItem; + JsonbValue scalar; + + if (not) + return NULL; + + jspGetLeftArg(jsp, &leftItem); + jspGetRightArg(jsp, &rightItem); + + if (jspIsScalar(leftItem.type)) + { + scalarItem = &leftItem; + pathItem = &rightItem; + } + else if (jspIsScalar(rightItem.type)) + { + scalarItem = &rightItem; + pathItem = &leftItem; + } + else + return NULL; + + switch (scalarItem->type) + { + case jpiNull: + scalar.type = jbvNull; + break; + case jpiBool: + scalar.type = jbvBool; + scalar.val.boolean = !!*scalarItem->content.value.data; + break; + case jpiNumeric: + scalar.type = jbvNumeric; + scalar.val.numeric = + (Numeric) scalarItem->content.value.data; + break; + case jpiString: + scalar.type = jbvString; + scalar.val.string.val = scalarItem->content.value.data; + scalar.val.string.len = scalarItem->content.value.datalen; + break; + default: + elog(ERROR, "invalid scalar jsonpath item type: %d", + scalarItem->type); + return NULL; + } + + return gin_extract_jsonpath_node(cxt, pathItem, path, &scalar); + } + + default: + return NULL; + } +} + +static void * +gin_jsonb_ops_add_key(void *path, char *key, int len) +{ + return lappend((List *) path, DatumGetPointer( + make_text_key(JGINFLAG_KEY, key, len))); +} + +static void * +gin_jsonb_path_ops_add_key(void *path, char *key, int len) +{ + JsonbValue jbv; + uint32 hash = (uint32)(uintptr_t) path; + + jbv.type = jbvString; + jbv.val.string.val = key; + jbv.val.string.len = len; + + JsonbHashScalarValue(&jbv, &hash); + + return (void *)(uintptr_t) hash; +} + +static Datum * +gin_extract_jsonpath_query(JsonPath *jp, StrategyNumber strat, bool pathOps, + int32 *nentries, Pointer **extra_data) +{ + JsonPathExtractionContext cxt = { 0 }; + JsonPathItem root; + JsonPathNode *node; + JsonPathContext path = { NULL, GIN_FALSE }; + + cxt.addKey = pathOps ? gin_jsonb_path_ops_add_key : gin_jsonb_ops_add_key; + cxt.pathOps = pathOps; + cxt.lax = (jp->header & JSONPATH_LAX) != 0; + + jspInit(&root, jp); + + node = strat == JsonbJsonpathExistsStrategyNumber + ? gin_extract_jsonpath_node(&cxt, &root, path, NULL) + : gin_extract_jsonpath_expr_recursive(&cxt, &root, false, path); + + if (!node) + { + *nentries = 0; + return NULL; + } + + *nentries = cxt.nentries; + *extra_data = palloc(sizeof(**extra_data) * cxt.nentries); + **extra_data = (Pointer) node; + + return cxt.entries; +} + +static GinTernaryValue +gin_execute_jsonpath(JsonPathNode *node, GinTernaryValue *check) +{ + GinTernaryValue res; + GinTernaryValue v; + int i; + + switch (node->type) + { + case eAnd: + res = GIN_TRUE; + for (i = 0; i < node->val.nargs; i++) + { + v = gin_execute_jsonpath(node->args[i], check); + if (v == GIN_FALSE) + return GIN_FALSE; + else if (v == GIN_MAYBE) + res = GIN_MAYBE; + } + return res; + + case eOr: + res = GIN_FALSE; + for (i = 0; i < node->val.nargs; i++) + { + v = gin_execute_jsonpath(node->args[i], check); + if (v == GIN_TRUE) + return GIN_TRUE; + else if (v == GIN_MAYBE) + res = GIN_MAYBE; + } + return res; + + case eEntry: + return check[node->val.entry] ? GIN_MAYBE : GIN_FALSE; + + default: + elog(ERROR, "invalid jsonpath gin node type: %d", node->type); + return GIN_FALSE; + } +} + Datum gin_extract_jsonb_query(PG_FUNCTION_ARGS) { @@ -181,6 +623,18 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) if (j == 0 && strategy == JsonbExistsAllStrategyNumber) *searchMode = GIN_SEARCH_MODE_ALL; } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(0); + Pointer **extra_data = (Pointer **) PG_GETARG_POINTER(4); + + entries = gin_extract_jsonpath_query(jp, strategy, false, nentries, + extra_data); + + if (!entries) + *searchMode = GIN_SEARCH_MODE_ALL; + } else { elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -199,7 +653,7 @@ gin_consistent_jsonb(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); bool *recheck = (bool *) PG_GETARG_POINTER(5); bool res = true; int32 i; @@ -256,6 +710,13 @@ gin_consistent_jsonb(PG_FUNCTION_ARGS) } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + *recheck = true; + res = nkeys <= 0 || + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check) != GIN_FALSE; + } else elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -270,8 +731,7 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); GinTernaryValue res = GIN_MAYBE; int32 i; @@ -308,6 +768,12 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + res = nkeys <= 0 ? GIN_MAYBE : + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check); + } else elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -432,18 +898,35 @@ gin_extract_jsonb_query_path(PG_FUNCTION_ARGS) int32 *searchMode = (int32 *) PG_GETARG_POINTER(6); Datum *entries; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); + if (strategy == JsonbContainsStrategyNumber) + { + /* Query is a jsonb, so just apply gin_extract_jsonb_path ... */ + entries = (Datum *) + DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb_path, + PG_GETARG_DATUM(0), + PointerGetDatum(nentries))); - /* Query is a jsonb, so just apply gin_extract_jsonb_path ... */ - entries = (Datum *) - DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb_path, - PG_GETARG_DATUM(0), - PointerGetDatum(nentries))); + /* ... although "contains {}" requires a full index scan */ + if (*nentries == 0) + *searchMode = GIN_SEARCH_MODE_ALL; + } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(0); + Pointer **extra_data = (Pointer **) PG_GETARG_POINTER(4); - /* ... although "contains {}" requires a full index scan */ - if (*nentries == 0) - *searchMode = GIN_SEARCH_MODE_ALL; + entries = gin_extract_jsonpath_query(jp, strategy, true, nentries, + extra_data); + + if (!entries) + *searchMode = GIN_SEARCH_MODE_ALL; + } + else + { + elog(ERROR, "unrecognized strategy number: %d", strategy); + entries = NULL; + } PG_RETURN_POINTER(entries); } @@ -456,32 +939,40 @@ gin_consistent_jsonb_path(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); bool *recheck = (bool *) PG_GETARG_POINTER(5); bool res = true; int32 i; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); - - /* - * jsonb_path_ops is necessarily lossy, not only because of hash - * collisions but also because it doesn't preserve complete information - * about the structure of the JSON object. Besides, there are some - * special rules around the containment of raw scalars in arrays that are - * not handled here. So we must always recheck a match. However, if not - * all of the keys are present, the tuple certainly doesn't match. - */ - *recheck = true; - for (i = 0; i < nkeys; i++) + if (strategy == JsonbContainsStrategyNumber) { - if (!check[i]) + /* + * jsonb_path_ops is necessarily lossy, not only because of hash + * collisions but also because it doesn't preserve complete information + * about the structure of the JSON object. Besides, there are some + * special rules around the containment of raw scalars in arrays that are + * not handled here. So we must always recheck a match. However, if not + * all of the keys are present, the tuple certainly doesn't match. + */ + *recheck = true; + for (i = 0; i < nkeys; i++) { - res = false; - break; + if (!check[i]) + { + res = false; + break; + } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + *recheck = true; + res = nkeys <= 0 || + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check); + } + else + elog(ERROR, "unrecognized strategy number: %d", strategy); PG_RETURN_BOOL(res); } @@ -494,27 +985,34 @@ gin_triconsistent_jsonb_path(PG_FUNCTION_ARGS) /* Jsonb *query = PG_GETARG_JSONB_P(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); GinTernaryValue res = GIN_MAYBE; int32 i; - if (strategy != JsonbContainsStrategyNumber) - elog(ERROR, "unrecognized strategy number: %d", strategy); - - /* - * Note that we never return GIN_TRUE, only GIN_MAYBE or GIN_FALSE; this - * corresponds to always forcing recheck in the regular consistent - * function, for the reasons listed there. - */ - for (i = 0; i < nkeys; i++) + if (strategy == JsonbContainsStrategyNumber) { - if (check[i] == GIN_FALSE) + /* + * Note that we never return GIN_TRUE, only GIN_MAYBE or GIN_FALSE; this + * corresponds to always forcing recheck in the regular consistent + * function, for the reasons listed there. + */ + for (i = 0; i < nkeys; i++) { - res = GIN_FALSE; - break; + if (check[i] == GIN_FALSE) + { + res = GIN_FALSE; + break; + } } } + else if (strategy == JsonbJsonpathPredicateStrategyNumber || + strategy == JsonbJsonpathExistsStrategyNumber) + { + res = nkeys <= 0 ? GIN_MAYBE : + gin_execute_jsonpath((JsonPathNode *) extra_data[0], check); + } + else + elog(ERROR, "unrecognized strategy number: %d", strategy); PG_RETURN_GIN_TERNARY_VALUE(res); } diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c index d425f32403..c120f2122d 100644 --- a/src/backend/utils/adt/jsonb_util.c +++ b/src/backend/utils/adt/jsonb_util.c @@ -15,6 +15,7 @@ #include "access/hash.h" #include "catalog/pg_collation.h" +#include "catalog/pg_type.h" #include "miscadmin.h" #include "utils/builtins.h" #include "utils/jsonb.h" @@ -36,7 +37,6 @@ static void fillJsonbValue(JsonbContainer *container, int index, char *base_addr, uint32 offset, JsonbValue *result); -static bool equalsJsonbScalarValue(JsonbValue *a, JsonbValue *b); static int compareJsonbScalarValue(JsonbValue *a, JsonbValue *b); static Jsonb *convertToJsonb(JsonbValue *val); static void convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level); @@ -55,12 +55,8 @@ static JsonbParseState *pushState(JsonbParseState **pstate); static void appendKey(JsonbParseState *pstate, JsonbValue *scalarVal); static void appendValue(JsonbParseState *pstate, JsonbValue *scalarVal); static void appendElement(JsonbParseState *pstate, JsonbValue *scalarVal); -static int lengthCompareJsonbStringValue(const void *a, const void *b); static int lengthCompareJsonbPair(const void *a, const void *b, void *arg); static void uniqueifyJsonbObject(JsonbValue *object); -static JsonbValue *pushJsonbValueScalar(JsonbParseState **pstate, - JsonbIteratorToken seq, - JsonbValue *scalarVal); /* * Turn an in-memory JsonbValue into a Jsonb for on-disk storage. @@ -241,6 +237,7 @@ compareJsonbContainers(JsonbContainer *a, JsonbContainer *b) res = (va.val.object.nPairs > vb.val.object.nPairs) ? 1 : -1; break; case jbvBinary: + case jbvDatetime: elog(ERROR, "unexpected jbvBinary value"); } } @@ -542,7 +539,7 @@ pushJsonbValue(JsonbParseState **pstate, JsonbIteratorToken seq, * Do the actual pushing, with only scalar or pseudo-scalar-array values * accepted. */ -static JsonbValue * +JsonbValue * pushJsonbValueScalar(JsonbParseState **pstate, JsonbIteratorToken seq, JsonbValue *scalarVal) { @@ -580,6 +577,7 @@ pushJsonbValueScalar(JsonbParseState **pstate, JsonbIteratorToken seq, (*pstate)->size = 4; (*pstate)->contVal.val.object.pairs = palloc(sizeof(JsonbPair) * (*pstate)->size); + (*pstate)->contVal.val.object.uniquified = true; break; case WJB_KEY: Assert(scalarVal->type == jbvString); @@ -822,6 +820,7 @@ JsonbIteratorNext(JsonbIterator **it, JsonbValue *val, bool skipNested) /* Set v to object on first object call */ val->type = jbvObject; val->val.object.nPairs = (*it)->nElems; + val->val.object.uniquified = true; /* * v->val.object.pairs is not actually set, because we aren't @@ -1295,7 +1294,7 @@ JsonbHashScalarValueExtended(const JsonbValue *scalarVal, uint64 *hash, /* * Are two scalar JsonbValues of the same type a and b equal? */ -static bool +bool equalsJsonbScalarValue(JsonbValue *aScalar, JsonbValue *bScalar) { if (aScalar->type == bScalar->type) @@ -1741,6 +1740,44 @@ convertJsonbScalar(StringInfo buffer, JEntry *jentry, JsonbValue *scalarVal) JENTRY_ISBOOL_TRUE : JENTRY_ISBOOL_FALSE; break; + case jbvDatetime: + { + char *str; + PGFunction typoutput; + int len; + + switch (scalarVal->val.datetime.typid) + { + case DATEOID: + typoutput = date_out; + break; + case TIMEOID: + typoutput = time_out; + break; + case TIMETZOID: + typoutput = timetz_out; + break; + case TIMESTAMPOID: + typoutput = timestamp_out; + break; + case TIMESTAMPTZOID: + typoutput = timestamptz_out; + break; + default: + elog(ERROR, "unknown jsonb value datetime type oid %d", + scalarVal->val.datetime.typid); + } + + str = DatumGetCString(DirectFunctionCall1(typoutput, + scalarVal->val.datetime.value)); + + len = strlen(str); + + appendToBuffer(buffer, str, len); + *jentry = JENTRY_ISSTRING | len; + } + break; + default: elog(ERROR, "invalid jsonb scalar type"); } @@ -1758,7 +1795,7 @@ convertJsonbScalar(StringInfo buffer, JEntry *jentry, JsonbValue *scalarVal) * a and b are first sorted based on their length. If a tie-breaker is * required, only then do we consider string binary equality. */ -static int +int lengthCompareJsonbStringValue(const void *a, const void *b) { const JsonbValue *va = (const JsonbValue *) a; @@ -1822,6 +1859,9 @@ uniqueifyJsonbObject(JsonbValue *object) Assert(object->type == jbvObject); + if (!object->val.object.uniquified) + return; + if (object->val.object.nPairs > 1) qsort_arg(object->val.object.pairs, object->val.object.nPairs, sizeof(JsonbPair), lengthCompareJsonbPair, &hasNonUniq); diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c index 242d8fe743..daccdf8242 100644 --- a/src/backend/utils/adt/jsonfuncs.c +++ b/src/backend/utils/adt/jsonfuncs.c @@ -3047,6 +3047,50 @@ populate_record_field(ColumnIOData *col, } } +/* recursively populate specified type from a json/jsonb value */ +Datum +json_populate_type(Datum json_val, Oid json_type, Oid typid, int32 typmod, + void **cache, MemoryContext mcxt, bool *isnull) +{ + JsValue jsv = { 0 }; + JsonbValue jbv; + + jsv.is_json = json_type == JSONOID; + + if (*isnull) + { + if (jsv.is_json) + jsv.val.json.str = NULL; + else + jsv.val.jsonb = NULL; + } + else if (jsv.is_json) + { + text *json = DatumGetTextPP(json_val); + + jsv.val.json.str = VARDATA_ANY(json); + jsv.val.json.len = VARSIZE_ANY_EXHDR(json); + jsv.val.json.type = JSON_TOKEN_INVALID; /* not used in populate_composite() */ + } + else + { + Jsonb *jsonb = DatumGetJsonbP(json_val); + + jsv.val.jsonb = &jbv; + + /* fill binary jsonb value pointing to jb */ + jbv.type = jbvBinary; + jbv.val.binary.data = &jsonb->root; + jbv.val.binary.len = VARSIZE(jsonb) - VARHDRSZ; + } + + if (!*cache) + *cache = MemoryContextAllocZero(mcxt, sizeof(ColumnIOData)); + + return populate_record_field(*cache , typid, typmod, NULL, mcxt, + PointerGetDatum(NULL), &jsv, isnull); +} + static RecordIOData * allocate_record_info(MemoryContext mcxt, int ncolumns) { diff --git a/src/backend/utils/adt/jsonpath.c b/src/backend/utils/adt/jsonpath.c new file mode 100644 index 0000000000..cfdb64975b --- /dev/null +++ b/src/backend/utils/adt/jsonpath.c @@ -0,0 +1,1067 @@ +/*------------------------------------------------------------------------- + * + * jsonpath.c + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath.c + * + *------------------------------------------------------------------------- + */ + +#include "postgres.h" +#include "funcapi.h" +#include "miscadmin.h" +#include "lib/stringinfo.h" +#include "utils/builtins.h" +#include "utils/json.h" +#include "utils/jsonpath.h" + +/*****************************INPUT/OUTPUT************************************/ + +/* + * Convert AST to flat jsonpath type representation + */ +static int +flattenJsonPathParseItem(StringInfo buf, JsonPathParseItem *item, + bool allowCurrent, bool insideArraySubscript) +{ + /* position from begining of jsonpath data */ + int32 pos = buf->len - JSONPATH_HDRSZ; + int32 chld, next; + bool allowCurrentInArg = false; + + check_stack_depth(); + + appendStringInfoChar(buf, (char)(item->type)); + alignStringInfoInt(buf); + + next = (item->next) ? buf->len : 0; + + /* + * actual value will be recorded later, after next and + * children processing + */ + appendBinaryStringInfo(buf, (char*)&next /* fake value */, sizeof(next)); + + switch(item->type) + { + case jpiString: + case jpiVariable: + case jpiKey: + appendBinaryStringInfo(buf, (char*)&item->value.string.len, + sizeof(item->value.string.len)); + appendBinaryStringInfo(buf, item->value.string.val, item->value.string.len); + appendStringInfoChar(buf, '\0'); + break; + case jpiNumeric: + appendBinaryStringInfo(buf, (char*)item->value.numeric, + VARSIZE(item->value.numeric)); + break; + case jpiBool: + appendBinaryStringInfo(buf, (char*)&item->value.boolean, + sizeof(item->value.boolean)); + break; + case jpiFold: + case jpiFoldl: + case jpiFoldr: + allowCurrentInArg = true; + /* fall through */ + case jpiAnd: + case jpiOr: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiStartsWith: + { + int32 left, right; + + left = buf->len; + + /* + * first, reserve place for left/right arg's positions, then + * record both args and sets actual position in reserved places + */ + appendBinaryStringInfo(buf, (char*)&left /* fake value */, sizeof(left)); + right = buf->len; + appendBinaryStringInfo(buf, (char*)&right /* fake value */, sizeof(right)); + + chld = flattenJsonPathParseItem(buf, item->value.args.left, + allowCurrent || + allowCurrentInArg, + insideArraySubscript); + *(int32*)(buf->data + left) = chld; + chld = flattenJsonPathParseItem(buf, item->value.args.right, + allowCurrent || + allowCurrentInArg, + insideArraySubscript); + *(int32*)(buf->data + right) = chld; + } + break; + case jpiLikeRegex: + { + int32 offs; + + appendBinaryStringInfo(buf, + (char *) &item->value.like_regex.flags, + sizeof(item->value.like_regex.flags)); + offs = buf->len; + appendBinaryStringInfo(buf, (char *) &offs /* fake value */, sizeof(offs)); + + appendBinaryStringInfo(buf, + (char *) &item->value.like_regex.patternlen, + sizeof(item->value.like_regex.patternlen)); + appendBinaryStringInfo(buf, item->value.like_regex.pattern, + item->value.like_regex.patternlen); + appendStringInfoChar(buf, '\0'); + + chld = flattenJsonPathParseItem(buf, item->value.like_regex.expr, + allowCurrent, + insideArraySubscript); + *(int32 *)(buf->data + offs) = chld; + } + break; + case jpiFilter: + case jpiMap: + case jpiReduce: + allowCurrentInArg = true; + /* fall through */ + case jpiIsUnknown: + case jpiNot: + case jpiPlus: + case jpiMinus: + case jpiExists: + case jpiDatetime: + case jpiArray: + { + int32 arg = item->value.arg ? buf->len : 0; + + appendBinaryStringInfo(buf, (char*)&arg /* fake value */, sizeof(arg)); + + if (!item->value.arg) + break; + + chld = flattenJsonPathParseItem(buf, item->value.arg, + allowCurrent || + allowCurrentInArg, + insideArraySubscript); + *(int32*)(buf->data + arg) = chld; + } + break; + case jpiNull: + break; + case jpiRoot: + break; + case jpiAnyArray: + case jpiAnyKey: + break; + case jpiCurrent: + if (!allowCurrent) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("@ is not allowed in root expressions"))); + break; + case jpiLast: + if (!insideArraySubscript) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("LAST is allowed only in array subscripts"))); + break; + case jpiIndexArray: + { + int32 nelems = item->value.array.nelems; + int offset; + int i; + + appendBinaryStringInfo(buf, (char *) &nelems, sizeof(nelems)); + + offset = buf->len; + + appendStringInfoSpaces(buf, sizeof(int32) * 2 * nelems); + + for (i = 0; i < nelems; i++) + { + int32 *ppos; + int32 topos; + int32 frompos = + flattenJsonPathParseItem(buf, + item->value.array.elems[i].from, + true, true); + + if (item->value.array.elems[i].to) + topos = flattenJsonPathParseItem(buf, + item->value.array.elems[i].to, + true, true); + else + topos = 0; + + ppos = (int32 *) &buf->data[offset + i * 2 * sizeof(int32)]; + + ppos[0] = frompos; + ppos[1] = topos; + } + } + break; + case jpiAny: + appendBinaryStringInfo(buf, + (char*)&item->value.anybounds.first, + sizeof(item->value.anybounds.first)); + appendBinaryStringInfo(buf, + (char*)&item->value.anybounds.last, + sizeof(item->value.anybounds.last)); + break; + case jpiType: + case jpiSize: + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiKeyValue: + case jpiMin: + case jpiMax: + break; + case jpiSequence: + { + int32 nelems = list_length(item->value.sequence.elems); + ListCell *lc; + int offset; + + appendBinaryStringInfo(buf, (char *) &nelems, sizeof(nelems)); + + offset = buf->len; + + appendStringInfoSpaces(buf, sizeof(int32) * nelems); + + foreach(lc, item->value.sequence.elems) + { + int32 pos = + flattenJsonPathParseItem(buf, lfirst(lc), + allowCurrent, insideArraySubscript); + + *(int32 *) &buf->data[offset] = pos; + offset += sizeof(int32); + } + } + break; + case jpiObject: + { + int32 nfields = list_length(item->value.object.fields); + ListCell *lc; + int offset; + + appendBinaryStringInfo(buf, (char *) &nfields, sizeof(nfields)); + + offset = buf->len; + + appendStringInfoSpaces(buf, sizeof(int32) * 2 * nfields); + + foreach(lc, item->value.object.fields) + { + JsonPathParseItem *field = lfirst(lc); + int32 keypos = + flattenJsonPathParseItem(buf, field->value.args.left, + allowCurrent, + insideArraySubscript); + int32 valpos = + flattenJsonPathParseItem(buf, field->value.args.right, + allowCurrent, + insideArraySubscript); + int32 *ppos = (int32 *) &buf->data[offset]; + + ppos[0] = keypos; + ppos[1] = valpos; + + offset += 2 * sizeof(int32); + } + } + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", item->type); + } + + if (item->next) + *(int32*)(buf->data + next) = + flattenJsonPathParseItem(buf, item->next, allowCurrent, + insideArraySubscript); + + return pos; +} + +Datum +jsonpath_in(PG_FUNCTION_ARGS) +{ + char *in = PG_GETARG_CSTRING(0); + int32 len = strlen(in); + JsonPathParseResult *jsonpath = parsejsonpath(in, len); + JsonPath *res; + StringInfoData buf; + + initStringInfo(&buf); + enlargeStringInfo(&buf, 4 * len /* estimation */); + + appendStringInfoSpaces(&buf, JSONPATH_HDRSZ); + + if (!jsonpath) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for jsonpath: \"%s\"", in))); + + flattenJsonPathParseItem(&buf, jsonpath->expr, false, false); + + res = (JsonPath*)buf.data; + SET_VARSIZE(res, buf.len); + res->header = JSONPATH_VERSION; + if (jsonpath->lax) + res->header |= JSONPATH_LAX; + + PG_RETURN_JSONPATH_P(res); +} + +static void +printOperation(StringInfo buf, JsonPathItemType type) +{ + switch(type) + { + case jpiAnd: + appendBinaryStringInfo(buf, " && ", 4); break; + case jpiOr: + appendBinaryStringInfo(buf, " || ", 4); break; + case jpiEqual: + appendBinaryStringInfo(buf, " == ", 4); break; + case jpiNotEqual: + appendBinaryStringInfo(buf, " != ", 4); break; + case jpiLess: + appendBinaryStringInfo(buf, " < ", 3); break; + case jpiGreater: + appendBinaryStringInfo(buf, " > ", 3); break; + case jpiLessOrEqual: + appendBinaryStringInfo(buf, " <= ", 4); break; + case jpiGreaterOrEqual: + appendBinaryStringInfo(buf, " >= ", 4); break; + case jpiAdd: + appendBinaryStringInfo(buf, " + ", 3); break; + case jpiSub: + appendBinaryStringInfo(buf, " - ", 3); break; + case jpiMul: + appendBinaryStringInfo(buf, " * ", 3); break; + case jpiDiv: + appendBinaryStringInfo(buf, " / ", 3); break; + case jpiMod: + appendBinaryStringInfo(buf, " % ", 3); break; + case jpiStartsWith: + appendBinaryStringInfo(buf, " starts with ", 13); break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", type); + } +} + +static int +operationPriority(JsonPathItemType op) +{ + switch (op) + { + case jpiSequence: + return -1; + case jpiOr: + return 0; + case jpiAnd: + return 1; + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiStartsWith: + return 2; + case jpiAdd: + case jpiSub: + return 3; + case jpiMul: + case jpiDiv: + case jpiMod: + return 4; + case jpiPlus: + case jpiMinus: + return 5; + default: + return 6; + } +} + +static void +printJsonPathItem(StringInfo buf, JsonPathItem *v, bool inKey, bool printBracketes) +{ + JsonPathItem elem; + int i; + + check_stack_depth(); + + switch(v->type) + { + case jpiNull: + appendStringInfoString(buf, "null"); + break; + case jpiKey: + if (inKey) + appendStringInfoChar(buf, '.'); + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiString: + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiVariable: + appendStringInfoChar(buf, '$'); + escape_json(buf, jspGetString(v, NULL)); + break; + case jpiNumeric: + appendStringInfoString(buf, + DatumGetCString(DirectFunctionCall1(numeric_out, + PointerGetDatum(jspGetNumeric(v))))); + break; + case jpiBool: + if (jspGetBool(v)) + appendBinaryStringInfo(buf, "true", 4); + else + appendBinaryStringInfo(buf, "false", 5); + break; + case jpiAnd: + case jpiOr: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiStartsWith: + if (printBracketes) + appendStringInfoChar(buf, '('); + jspGetLeftArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + printOperation(buf, v->type); + jspGetRightArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiLikeRegex: + if (printBracketes) + appendStringInfoChar(buf, '('); + + jspInitByBuffer(&elem, v->base, v->content.like_regex.expr); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + + appendBinaryStringInfo(buf, " like_regex ", 12); + + escape_json(buf, v->content.like_regex.pattern); + + if (v->content.like_regex.flags) + { + appendBinaryStringInfo(buf, " flag \"", 7); + + if (v->content.like_regex.flags & JSP_REGEX_ICASE) + appendStringInfoChar(buf, 'i'); + if (v->content.like_regex.flags & JSP_REGEX_SLINE) + appendStringInfoChar(buf, 's'); + if (v->content.like_regex.flags & JSP_REGEX_MLINE) + appendStringInfoChar(buf, 'm'); + if (v->content.like_regex.flags & JSP_REGEX_WSPACE) + appendStringInfoChar(buf, 'x'); + + appendStringInfoChar(buf, '"'); + } + + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiPlus: + case jpiMinus: + if (printBracketes) + appendStringInfoChar(buf, '('); + appendStringInfoChar(buf, v->type == jpiPlus ? '+' : '-'); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, + operationPriority(elem.type) <= + operationPriority(v->type)); + if (printBracketes) + appendStringInfoChar(buf, ')'); + break; + case jpiFilter: + appendBinaryStringInfo(buf, "?(", 2); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiNot: + appendBinaryStringInfo(buf, "!(", 2); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiIsUnknown: + appendStringInfoChar(buf, '('); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendBinaryStringInfo(buf, ") is unknown", 12); + break; + case jpiExists: + appendBinaryStringInfo(buf,"exists (", 8); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiCurrent: + Assert(!inKey); + appendStringInfoChar(buf, '@'); + break; + case jpiRoot: + Assert(!inKey); + appendStringInfoChar(buf, '$'); + break; + case jpiLast: + appendBinaryStringInfo(buf, "last", 4); + break; + case jpiAnyArray: + appendBinaryStringInfo(buf, "[*]", 3); + break; + case jpiAnyKey: + if (inKey) + appendStringInfoChar(buf, '.'); + appendStringInfoChar(buf, '*'); + break; + case jpiIndexArray: + appendStringInfoChar(buf, '['); + for (i = 0; i < v->content.array.nelems; i++) + { + JsonPathItem from; + JsonPathItem to; + bool range = jspGetArraySubscript(v, &from, &to, i); + + if (i) + appendStringInfoChar(buf, ','); + + printJsonPathItem(buf, &from, false, from.type == jpiSequence); + + if (range) + { + appendBinaryStringInfo(buf, " to ", 4); + printJsonPathItem(buf, &to, false, to.type == jpiSequence); + } + } + appendStringInfoChar(buf, ']'); + break; + case jpiAny: + if (inKey) + appendStringInfoChar(buf, '.'); + + if (v->content.anybounds.first == 0 && + v->content.anybounds.last == PG_UINT32_MAX) + appendBinaryStringInfo(buf, "**", 2); + else if (v->content.anybounds.first == 0) + appendStringInfo(buf, "**{,%u}", v->content.anybounds.last); + else if (v->content.anybounds.last == PG_UINT32_MAX) + appendStringInfo(buf, "**{%u,}", v->content.anybounds.first); + else if (v->content.anybounds.first == v->content.anybounds.last) + appendStringInfo(buf, "**{%u}", v->content.anybounds.first); + else + appendStringInfo(buf, "**{%u,%u}", v->content.anybounds.first, + v->content.anybounds.last); + break; + case jpiType: + appendBinaryStringInfo(buf, ".type()", 7); + break; + case jpiSize: + appendBinaryStringInfo(buf, ".size()", 7); + break; + case jpiAbs: + appendBinaryStringInfo(buf, ".abs()", 6); + break; + case jpiFloor: + appendBinaryStringInfo(buf, ".floor()", 8); + break; + case jpiCeiling: + appendBinaryStringInfo(buf, ".ceiling()", 10); + break; + case jpiDouble: + appendBinaryStringInfo(buf, ".double()", 9); + break; + case jpiDatetime: + appendBinaryStringInfo(buf, ".datetime(", 10); + if (v->content.arg) + { + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + } + appendStringInfoChar(buf, ')'); + break; + case jpiKeyValue: + appendBinaryStringInfo(buf, ".keyvalue()", 11); + break; + case jpiMap: + appendBinaryStringInfo(buf, ".map(", 5); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiSequence: + if (printBracketes || jspHasNext(v)) + appendStringInfoChar(buf, '('); + + for (i = 0; i < v->content.sequence.nelems; i++) + { + JsonPathItem elem; + + if (i) + appendBinaryStringInfo(buf, ", ", 2); + + jspGetSequenceElement(v, i, &elem); + + printJsonPathItem(buf, &elem, false, elem.type == jpiSequence); + } + + if (printBracketes || jspHasNext(v)) + appendStringInfoChar(buf, ')'); + break; + case jpiArray: + appendStringInfoChar(buf, '['); + if (v->content.arg) + { + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + } + appendStringInfoChar(buf, ']'); + break; + case jpiObject: + appendStringInfoChar(buf, '{'); + + for (i = 0; i < v->content.object.nfields; i++) + { + JsonPathItem key; + JsonPathItem val; + + jspGetObjectField(v, i, &key, &val); + + if (i) + appendBinaryStringInfo(buf, ", ", 2); + + printJsonPathItem(buf, &key, false, false); + appendBinaryStringInfo(buf, ": ", 2); + printJsonPathItem(buf, &val, false, val.type == jpiSequence); + } + + appendStringInfoChar(buf, '}'); + break; + case jpiReduce: + appendBinaryStringInfo(buf, ".reduce(", 8); + jspGetArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiFold: + case jpiFoldl: + case jpiFoldr: + if (v->type == jpiFold) + appendBinaryStringInfo(buf, ".fold(", 6); + else if (v->type == jpiFoldl) + appendBinaryStringInfo(buf, ".foldl(", 7); + else + appendBinaryStringInfo(buf, ".foldr(", 7); + jspGetLeftArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendBinaryStringInfo(buf, ", ", 2); + jspGetRightArg(v, &elem); + printJsonPathItem(buf, &elem, false, false); + appendStringInfoChar(buf, ')'); + break; + case jpiMin: + appendBinaryStringInfo(buf, ".min()", 6); + break; + case jpiMax: + appendBinaryStringInfo(buf, ".max()", 6); + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", v->type); + } + + if (jspGetNext(v, &elem)) + printJsonPathItem(buf, &elem, true, true); +} + +Datum +jsonpath_out(PG_FUNCTION_ARGS) +{ + JsonPath *in = PG_GETARG_JSONPATH_P(0); + StringInfoData buf; + JsonPathItem v; + + initStringInfo(&buf); + enlargeStringInfo(&buf, VARSIZE(in) /* estimation */); + + if (!(in->header & JSONPATH_LAX)) + appendBinaryStringInfo(&buf, "strict ", 7); + + jspInit(&v, in); + printJsonPathItem(&buf, &v, false, v.type != jpiSequence); + + PG_RETURN_CSTRING(buf.data); +} + +/********************Support functions for JsonPath****************************/ + +/* + * Support macroses to read stored values + */ + +#define read_byte(v, b, p) do { \ + (v) = *(uint8*)((b) + (p)); \ + (p) += 1; \ +} while(0) \ + +#define read_int32(v, b, p) do { \ + (v) = *(uint32*)((b) + (p)); \ + (p) += sizeof(int32); \ +} while(0) \ + +#define read_int32_n(v, b, p, n) do { \ + (v) = (void *)((b) + (p)); \ + (p) += sizeof(int32) * (n); \ +} while(0) \ + +/* + * Read root node and fill root node representation + */ +void +jspInit(JsonPathItem *v, JsonPath *js) +{ + Assert((js->header & ~JSONPATH_LAX) == JSONPATH_VERSION); + jspInitByBuffer(v, js->data, 0); +} + +/* + * Read node from buffer and fill its representation + */ +void +jspInitByBuffer(JsonPathItem *v, char *base, int32 pos) +{ + v->base = base; + + read_byte(v->type, base, pos); + + switch(INTALIGN(pos) - pos) + { + case 3: pos++; + case 2: pos++; + case 1: pos++; + default: break; + } + + read_int32(v->nextPos, base, pos); + + switch(v->type) + { + case jpiNull: + case jpiRoot: + case jpiCurrent: + case jpiAnyArray: + case jpiAnyKey: + case jpiType: + case jpiSize: + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiKeyValue: + case jpiLast: + case jpiMin: + case jpiMax: + break; + case jpiKey: + case jpiString: + case jpiVariable: + read_int32(v->content.value.datalen, base, pos); + /* follow next */ + case jpiNumeric: + case jpiBool: + v->content.value.data = base + pos; + break; + case jpiAnd: + case jpiOr: + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + case jpiStartsWith: + case jpiFold: + case jpiFoldl: + case jpiFoldr: + read_int32(v->content.args.left, base, pos); + read_int32(v->content.args.right, base, pos); + break; + case jpiLikeRegex: + read_int32(v->content.like_regex.flags, base, pos); + read_int32(v->content.like_regex.expr, base, pos); + read_int32(v->content.like_regex.patternlen, base, pos); + v->content.like_regex.pattern = base + pos; + break; + case jpiNot: + case jpiExists: + case jpiIsUnknown: + case jpiPlus: + case jpiMinus: + case jpiFilter: + case jpiDatetime: + case jpiMap: + case jpiArray: + case jpiReduce: + read_int32(v->content.arg, base, pos); + break; + case jpiIndexArray: + read_int32(v->content.array.nelems, base, pos); + read_int32_n(v->content.array.elems, base, pos, + v->content.array.nelems * 2); + break; + case jpiAny: + read_int32(v->content.anybounds.first, base, pos); + read_int32(v->content.anybounds.last, base, pos); + break; + case jpiSequence: + read_int32(v->content.sequence.nelems, base, pos); + read_int32_n(v->content.sequence.elems, base, pos, + v->content.sequence.nelems); + break; + case jpiObject: + read_int32(v->content.object.nfields, base, pos); + read_int32_n(v->content.object.fields, base, pos, + v->content.object.nfields * 2); + break; + default: + elog(ERROR, "Unknown jsonpath item type: %d", v->type); + } +} + +void +jspGetArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiFilter || + v->type == jpiNot || + v->type == jpiIsUnknown || + v->type == jpiExists || + v->type == jpiPlus || + v->type == jpiMinus || + v->type == jpiDatetime || + v->type == jpiMap || + v->type == jpiArray || + v->type == jpiReduce + ); + + jspInitByBuffer(a, v->base, v->content.arg); +} + +bool +jspGetNext(JsonPathItem *v, JsonPathItem *a) +{ + if (jspHasNext(v)) + { + Assert( + v->type == jpiString || + v->type == jpiNumeric || + v->type == jpiBool || + v->type == jpiNull || + v->type == jpiKey || + v->type == jpiAny || + v->type == jpiAnyArray || + v->type == jpiAnyKey || + v->type == jpiIndexArray || + v->type == jpiFilter || + v->type == jpiCurrent || + v->type == jpiExists || + v->type == jpiRoot || + v->type == jpiVariable || + v->type == jpiLast || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiPlus || + v->type == jpiMinus || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiGreater || + v->type == jpiGreaterOrEqual || + v->type == jpiLess || + v->type == jpiLessOrEqual || + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiNot || + v->type == jpiIsUnknown || + v->type == jpiType || + v->type == jpiSize || + v->type == jpiAbs || + v->type == jpiFloor || + v->type == jpiCeiling || + v->type == jpiDouble || + v->type == jpiDatetime || + v->type == jpiKeyValue || + v->type == jpiStartsWith || + v->type == jpiMap || + v->type == jpiSequence || + v->type == jpiArray || + v->type == jpiObject || + v->type == jpiReduce || + v->type == jpiFold || + v->type == jpiFoldl || + v->type == jpiFoldr || + v->type == jpiMin || + v->type == jpiMax + ); + + if (a) + jspInitByBuffer(a, v->base, v->nextPos); + return true; + } + + return false; +} + +void +jspGetLeftArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiLess || + v->type == jpiGreater || + v->type == jpiLessOrEqual || + v->type == jpiGreaterOrEqual || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiStartsWith || + v->type == jpiFold || + v->type == jpiFoldl || + v->type == jpiFoldr + ); + + jspInitByBuffer(a, v->base, v->content.args.left); +} + +void +jspGetRightArg(JsonPathItem *v, JsonPathItem *a) +{ + Assert( + v->type == jpiAnd || + v->type == jpiOr || + v->type == jpiEqual || + v->type == jpiNotEqual || + v->type == jpiLess || + v->type == jpiGreater || + v->type == jpiLessOrEqual || + v->type == jpiGreaterOrEqual || + v->type == jpiAdd || + v->type == jpiSub || + v->type == jpiMul || + v->type == jpiDiv || + v->type == jpiMod || + v->type == jpiStartsWith || + v->type == jpiFold || + v->type == jpiFoldl || + v->type == jpiFoldr + ); + + jspInitByBuffer(a, v->base, v->content.args.right); +} + +bool +jspGetBool(JsonPathItem *v) +{ + Assert(v->type == jpiBool); + + return (bool)*v->content.value.data; +} + +Numeric +jspGetNumeric(JsonPathItem *v) +{ + Assert(v->type == jpiNumeric); + + return (Numeric)v->content.value.data; +} + +char* +jspGetString(JsonPathItem *v, int32 *len) +{ + Assert( + v->type == jpiKey || + v->type == jpiString || + v->type == jpiVariable + ); + + if (len) + *len = v->content.value.datalen; + return v->content.value.data; +} + +bool +jspGetArraySubscript(JsonPathItem *v, JsonPathItem *from, JsonPathItem *to, + int i) +{ + Assert(v->type == jpiIndexArray); + + jspInitByBuffer(from, v->base, v->content.array.elems[i].from); + + if (!v->content.array.elems[i].to) + return false; + + jspInitByBuffer(to, v->base, v->content.array.elems[i].to); + + return true; +} + +void +jspGetSequenceElement(JsonPathItem *v, int i, JsonPathItem *elem) +{ + Assert(v->type == jpiSequence); + + jspInitByBuffer(elem, v->base, v->content.sequence.elems[i]); +} + +void +jspGetObjectField(JsonPathItem *v, int i, JsonPathItem *key, JsonPathItem *val) +{ + Assert(v->type == jpiObject); + jspInitByBuffer(key, v->base, v->content.object.fields[i].key); + jspInitByBuffer(val, v->base, v->content.object.fields[i].val); +} diff --git a/src/backend/utils/adt/jsonpath_exec.c b/src/backend/utils/adt/jsonpath_exec.c new file mode 100644 index 0000000000..43ae827ef1 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_exec.c @@ -0,0 +1,3693 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_exec.c + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_exec.c + * + *------------------------------------------------------------------------- + */ + +#include "postgres.h" +#include "funcapi.h" +#include "miscadmin.h" +#include "catalog/pg_collation.h" +#include "catalog/pg_type.h" +#include "executor/execExpr.h" +#include "lib/stringinfo.h" +#include "nodes/nodeFuncs.h" +#include "regex/regex.h" +#include "utils/builtins.h" +#include "utils/formatting.h" +#include "utils/json.h" +#include "utils/jsonpath.h" +#include "utils/lsyscache.h" +#include "utils/memutils.h" +#include "utils/varlena.h" + +#ifdef JSONPATH_JSON_C +#define JSONXOID JSONOID +#else +#define JSONXOID JSONBOID +#endif + +typedef struct JsonPathExecContext +{ + List *vars; + bool lax; + JsonbValue *root; /* for $ evaluation */ + int innermostArraySize; /* for LAST array index evaluation */ +} JsonPathExecContext; + +typedef struct JsonValueListIterator +{ + ListCell *lcell; +} JsonValueListIterator; + +#define JsonValueListIteratorEnd ((ListCell *) -1) + +typedef struct JsonTableScanState JsonTableScanState; +typedef struct JsonTableJoinState JsonTableJoinState; + +struct JsonTableScanState +{ + JsonTableScanState *parent; + JsonTableJoinState *nested; + MemoryContext mcxt; + JsonPath *path; + List *args; + JsonValueList found; + JsonValueListIterator iter; + Datum current; + int ordinal; + bool outerJoin; + bool errorOnError; + bool advanceNested; + bool reset; +}; + +struct JsonTableJoinState +{ + union + { + struct + { + JsonTableJoinState *left; + JsonTableJoinState *right; + bool cross; + bool advanceRight; + } join; + JsonTableScanState scan; + } u; + bool is_join; +}; + +/* random number to identify JsonTableContext */ +#define JSON_TABLE_CONTEXT_MAGIC 418352867 + +typedef struct JsonTableContext +{ + int magic; + struct + { + ExprState *expr; + JsonTableScanState *scan; + } *colexprs; + JsonTableScanState root; + bool empty; +} JsonTableContext; + +static inline JsonPathExecResult recursiveExecute(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found); + +static inline JsonPathExecResult recursiveExecuteBool(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb); + +static inline JsonPathExecResult recursiveExecuteUnwrap(JsonPathExecContext *cxt, + JsonPathItem *jsp, JsonbValue *jb, JsonValueList *found); + +static inline JsonbValue *wrapItemsInArray(const JsonValueList *items); + +static inline JsonbValue *wrapItem(JsonbValue *jbv); + +static Datum returnDATUM(void *arg, bool *isNull); + +static JsonTableJoinState *JsonTableInitPlanState(JsonTableContext *cxt, + Node *plan, JsonTableScanState *parent); + +static bool JsonTableNextRow(JsonTableScanState *scan); + + +static inline void +JsonValueListAppend(JsonValueList *jvl, JsonbValue *jbv) +{ + if (jvl->singleton) + { + jvl->list = list_make2(jvl->singleton, jbv); + jvl->singleton = NULL; + } + else if (!jvl->list) + jvl->singleton = jbv; + else + jvl->list = lappend(jvl->list, jbv); +} + +static inline void +JsonValueListConcat(JsonValueList *jvl1, JsonValueList jvl2) +{ + if (jvl1->singleton) + { + if (jvl2.singleton) + jvl1->list = list_make2(jvl1->singleton, jvl2.singleton); + else + jvl1->list = lcons(jvl1->singleton, jvl2.list); + + jvl1->singleton = NULL; + } + else if (jvl2.singleton) + { + if (jvl1->list) + jvl1->list = lappend(jvl1->list, jvl2.singleton); + else + jvl1->singleton = jvl2.singleton; + } + else if (jvl1->list) + jvl1->list = list_concat(jvl1->list, jvl2.list); + else + jvl1->list = jvl2.list; +} + +static inline int +JsonValueListLength(const JsonValueList *jvl) +{ + return jvl->singleton ? 1 : list_length(jvl->list); +} + +static inline bool +JsonValueListIsEmpty(JsonValueList *jvl) +{ + return !jvl->singleton && list_length(jvl->list) <= 0; +} + +static inline JsonbValue * +JsonValueListHead(JsonValueList *jvl) +{ + return jvl->singleton ? jvl->singleton : linitial(jvl->list); +} + +static inline void +JsonValueListClear(JsonValueList *jvl) +{ + jvl->singleton = NULL; + jvl->list = NIL; +} + +static inline List * +JsonValueListGetList(JsonValueList *jvl) +{ + if (jvl->singleton) + return list_make1(jvl->singleton); + + return jvl->list; +} + +static inline JsonbValue * +JsonValueListNext(const JsonValueList *jvl, JsonValueListIterator *it) +{ + if (it->lcell == JsonValueListIteratorEnd) + return NULL; + + if (it->lcell) + it->lcell = lnext(it->lcell); + else + { + if (jvl->singleton) + { + it->lcell = JsonValueListIteratorEnd; + return jvl->singleton; + } + + it->lcell = list_head(jvl->list); + } + + if (!it->lcell) + { + it->lcell = JsonValueListIteratorEnd; + return NULL; + } + + return lfirst(it->lcell); +} + +#ifndef JSONPATH_JSON_C +static inline JsonbValue * +JsonbInitBinary(JsonbValue *jbv, Jsonb *jb) +{ + jbv->type = jbvBinary; + jbv->val.binary.data = &jb->root; + jbv->val.binary.len = VARSIZE_ANY_EXHDR(jb); + + return jbv; +} +#endif + +static inline JsonbValue * +JsonbWrapInBinary(JsonbValue *jbv, JsonbValue *out) +{ + Jsonb *jb = JsonbValueToJsonb(jbv); + + if (!out) + out = palloc(sizeof(*out)); + + return JsonbInitBinary(out, jb); +} + +/********************Execute functions for JsonPath***************************/ + +/* + * Find value of jsonpath variable in a list of passing params + */ +static void +computeJsonPathVariable(JsonPathItem *variable, List *vars, JsonbValue *value) +{ + ListCell *cell; + JsonPathVariable *var = NULL; + bool isNull; + Datum computedValue; + char *varName; + int varNameLength; + + Assert(variable->type == jpiVariable); + varName = jspGetString(variable, &varNameLength); + + foreach(cell, vars) + { + var = (JsonPathVariable*)lfirst(cell); + + if (varNameLength == VARSIZE_ANY_EXHDR(var->varName) && + !strncmp(varName, VARDATA_ANY(var->varName), varNameLength)) + break; + + var = NULL; + } + + if (var == NULL) + ereport(ERROR, + (errcode(ERRCODE_NO_DATA_FOUND), + errmsg("could not find '%s' passed variable", + pnstrdup(varName, varNameLength)))); + + computedValue = var->cb(var->cb_arg, &isNull); + + if (isNull) + { + value->type = jbvNull; + return; + } + + switch(var->typid) + { + case BOOLOID: + value->type = jbvBool; + value->val.boolean = DatumGetBool(computedValue); + break; + case NUMERICOID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(computedValue); + break; + break; + case INT2OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int2_numeric, computedValue)); + break; + case INT4OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int4_numeric, computedValue)); + break; + case INT8OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int8_numeric, computedValue)); + break; + case FLOAT4OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + float4_numeric, computedValue)); + break; + case FLOAT8OID: + value->type = jbvNumeric; + value->val.numeric = DatumGetNumeric(DirectFunctionCall1( + float4_numeric, computedValue)); + break; + case TEXTOID: + case VARCHAROID: + value->type = jbvString; + value->val.string.val = VARDATA_ANY(computedValue); + value->val.string.len = VARSIZE_ANY_EXHDR(computedValue); + break; + case DATEOID: + case TIMEOID: + case TIMETZOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + value->type = jbvDatetime; + value->val.datetime.typid = var->typid; + value->val.datetime.typmod = var->typmod; + value->val.datetime.value = computedValue; + break; + case JSONXOID: + { + Jsonb *jb = DatumGetJsonbP(computedValue); + + if (JB_ROOT_IS_SCALAR(jb)) + JsonbExtractScalar(&jb->root, value); + else + JsonbInitBinary(value, jb); + } + break; + case (Oid) -1: /* JsonbValue */ + *value = *(JsonbValue *) DatumGetPointer(computedValue); + break; + default: + ereport(ERROR, + (errcode(ERRCODE_WRONG_OBJECT_TYPE), + errmsg("only bool, numeric and text types could be casted to supported jsonpath types"))); + } +} + +/* + * Convert jsonpath's scalar or variable node to actual jsonb value + */ +static void +computeJsonPathItem(JsonPathExecContext *cxt, JsonPathItem *item, JsonbValue *value) +{ + switch(item->type) + { + case jpiNull: + value->type = jbvNull; + break; + case jpiBool: + value->type = jbvBool; + value->val.boolean = jspGetBool(item); + break; + case jpiNumeric: + value->type = jbvNumeric; + value->val.numeric = jspGetNumeric(item); + break; + case jpiString: + value->type = jbvString; + value->val.string.val = jspGetString(item, &value->val.string.len); + break; + case jpiVariable: + computeJsonPathVariable(item, cxt->vars, value); + break; + default: + elog(ERROR, "Wrong type"); + } +} + + +/* + * Returns jbv* type of of JsonbValue. Note, it never returns + * jbvBinary as is - jbvBinary is used as mark of store naked + * scalar value. To improve readability it defines jbvScalar + * as alias to jbvBinary + */ +#define jbvScalar jbvBinary +static inline int +JsonbType(JsonbValue *jb) +{ + int type = jb->type; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsScalar(jbc)) + type = jbvScalar; + else if (JsonContainerIsObject(jbc)) + type = jbvObject; + else if (JsonContainerIsArray(jbc)) + type = jbvArray; + else + elog(ERROR, "Unknown container type: 0x%08x", jbc->header); + } + + return type; +} + +static const char * +JsonbTypeName(JsonbValue *jb) +{ + JsonbValue jbvbuf; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsScalar(jbc)) + jb = JsonbExtractScalar(jbc, &jbvbuf); + else if (JsonContainerIsArray(jbc)) + return "array"; + else if (JsonContainerIsObject(jbc)) + return "object"; + else + elog(ERROR, "Unknown container type: 0x%08x", jbc->header); + } + + switch (jb->type) + { + case jbvObject: + return "object"; + case jbvArray: + return "array"; + case jbvNumeric: + return "number"; + case jbvString: + return "string"; + case jbvBool: + return "boolean"; + case jbvNull: + return "null"; + case jbvDatetime: + switch (jb->val.datetime.typid) + { + case DATEOID: + return "date"; + case TIMEOID: + return "time without time zone"; + case TIMETZOID: + return "time with time zone"; + case TIMESTAMPOID: + return "timestamp without time zone"; + case TIMESTAMPTZOID: + return "timestamp with time zone"; + default: + elog(ERROR, "unknown jsonb value datetime type oid %d", + jb->val.datetime.typid); + } + return "unknown"; + default: + elog(ERROR, "Unknown jsonb value type: %d", jb->type); + return "unknown"; + } +} + +static int +JsonbArraySize(JsonbValue *jb) +{ + if (jb->type == jbvArray) + return jb->val.array.nElems; + + if (jb->type == jbvBinary) + { + JsonbContainer *jbc = (void *) jb->val.binary.data; + + if (JsonContainerIsArray(jbc) && !JsonContainerIsScalar(jbc)) + return JsonContainerSize(jbc); + } + + return -1; +} + +static int +compareNumeric(Numeric a, Numeric b) +{ + return DatumGetInt32( + DirectFunctionCall2( + numeric_cmp, + PointerGetDatum(a), + PointerGetDatum(b) + ) + ); +} + +static int +compareDatetime(Datum val1, Oid typid1, Datum val2, Oid typid2, bool *error) +{ + PGFunction cmpfunc = NULL; + + switch (typid1) + { + case DATEOID: + switch (typid2) + { + case DATEOID: + cmpfunc = date_cmp; + break; + case TIMESTAMPOID: + cmpfunc = date_cmp_timestamp; + break; + case TIMESTAMPTZOID: + cmpfunc = date_cmp_timestamptz; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + case TIMEOID: + switch (typid2) + { + case TIMEOID: + cmpfunc = time_cmp; + break; + case TIMETZOID: + val1 = DirectFunctionCall1(time_timetz, val1); + cmpfunc = timetz_cmp; + break; + case DATEOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + *error = true; + return 0; + } + break; + + case TIMETZOID: + switch (typid2) + { + case TIMEOID: + val2 = DirectFunctionCall1(time_timetz, val2); + cmpfunc = timetz_cmp; + break; + case TIMETZOID: + cmpfunc = timetz_cmp; + break; + case DATEOID: + case TIMESTAMPOID: + case TIMESTAMPTZOID: + *error = true; + return 0; + } + break; + + case TIMESTAMPOID: + switch (typid2) + { + case DATEOID: + cmpfunc = timestamp_cmp_date; + break; + case TIMESTAMPOID: + cmpfunc = timestamp_cmp; + break; + case TIMESTAMPTZOID: + cmpfunc = timestamp_cmp_timestamptz; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + case TIMESTAMPTZOID: + switch (typid2) + { + case DATEOID: + cmpfunc = timestamptz_cmp_date; + break; + case TIMESTAMPOID: + cmpfunc = timestamptz_cmp_timestamp; + break; + case TIMESTAMPTZOID: + cmpfunc = timestamp_cmp; + break; + case TIMEOID: + case TIMETZOID: + *error = true; + return 0; + } + break; + + default: + elog(ERROR, "unknown SQL/JSON datetime type oid: %d", typid1); + } + + if (!cmpfunc) + elog(ERROR, "unknown SQL/JSON datetime type oid: %d", typid2); + + *error = false; + + return DatumGetInt32(DirectFunctionCall2(cmpfunc, val1, val2)); +} + +static inline JsonPathExecResult +checkEquality(JsonbValue *jb1, JsonbValue *jb2, bool not) +{ + bool eq = false; + + if (jb1->type != jb2->type) + { + if (jb1->type == jbvNull || jb2->type == jbvNull) + return not ? jperOk : jperNotFound; + + return jperError; + } + + switch (jb1->type) + { + case jbvNull: + eq = true; + break; + case jbvString: + eq = (jb1->val.string.len == jb2->val.string.len && + memcmp(jb2->val.string.val, jb1->val.string.val, + jb1->val.string.len) == 0); + break; + case jbvBool: + eq = (jb2->val.boolean == jb1->val.boolean); + break; + case jbvNumeric: + eq = (compareNumeric(jb1->val.numeric, jb2->val.numeric) == 0); + break; + case jbvDatetime: + { + bool error; + + eq = compareDatetime(jb1->val.datetime.value, + jb1->val.datetime.typid, + jb2->val.datetime.value, + jb2->val.datetime.typid, + &error) == 0; + + if (error) + return jperError; + + break; + } + + case jbvBinary: + case jbvObject: + case jbvArray: + return jperError; + + default: + elog(ERROR, "Unknown jsonb value type %d", jb1->type); + } + + return (not ^ eq) ? jperOk : jperNotFound; +} + +static JsonPathExecResult +makeCompare(int32 op, JsonbValue *jb1, JsonbValue *jb2) +{ + int cmp; + bool res; + + if (jb1->type != jb2->type) + { + if (jb1->type != jbvNull && jb2->type != jbvNull) + /* non-null items of different types are not order-comparable */ + return jperError; + + if (jb1->type != jbvNull || jb2->type != jbvNull) + /* comparison of nulls to non-nulls returns always false */ + return jperNotFound; + + /* both values are JSON nulls */ + } + + switch (jb1->type) + { + case jbvNull: + cmp = 0; + break; + case jbvNumeric: + cmp = compareNumeric(jb1->val.numeric, jb2->val.numeric); + break; + case jbvString: + cmp = varstr_cmp(jb1->val.string.val, jb1->val.string.len, + jb2->val.string.val, jb2->val.string.len, + DEFAULT_COLLATION_OID); + break; + case jbvDatetime: + { + bool error; + + cmp = compareDatetime(jb1->val.datetime.value, + jb1->val.datetime.typid, + jb2->val.datetime.value, + jb2->val.datetime.typid, + &error); + + if (error) + return jperError; + } + break; + default: + return jperError; + } + + switch (op) + { + case jpiEqual: + res = (cmp == 0); + break; + case jpiNotEqual: + res = (cmp != 0); + break; + case jpiLess: + res = (cmp < 0); + break; + case jpiGreater: + res = (cmp > 0); + break; + case jpiLessOrEqual: + res = (cmp <= 0); + break; + case jpiGreaterOrEqual: + res = (cmp >= 0); + break; + default: + elog(ERROR, "Unknown operation"); + return jperError; + } + + return res ? jperOk : jperNotFound; +} + +static JsonbValue * +copyJsonbValue(JsonbValue *src) +{ + JsonbValue *dst = palloc(sizeof(*dst)); + + *dst = *src; + + return dst; +} + +static inline JsonPathExecResult +recursiveExecuteNext(JsonPathExecContext *cxt, + JsonPathItem *cur, JsonPathItem *next, + JsonbValue *v, JsonValueList *found, bool copy) +{ + JsonPathItem elem; + bool hasNext; + + if (!cur) + hasNext = next != NULL; + else if (next) + hasNext = jspHasNext(cur); + else + { + next = &elem; + hasNext = jspGetNext(cur, next); + } + + if (hasNext) + return recursiveExecute(cxt, next, v, found); + + if (found) + JsonValueListAppend(found, copy ? copyJsonbValue(v) : v); + + return jperOk; +} + +static inline JsonPathExecResult +recursiveExecuteAndUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + if (cxt->lax) + { + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonPathExecResult res = recursiveExecute(cxt, jsp, jb, &seq); + JsonbValue *item; + + if (jperIsError(res)) + return res; + + while ((item = JsonValueListNext(&seq, &it))) + { + if (item->type == jbvArray) + { + JsonbValue *elem = item->val.array.elems; + JsonbValue *last = elem + item->val.array.nElems; + + for (; elem < last; elem++) + JsonValueListAppend(found, copyJsonbValue(elem)); + } + else if (item->type == jbvBinary && + JsonContainerIsArray(item->val.binary.data)) + { + JsonbValue elem; + JsonbIterator *it = JsonbIteratorInit(item->val.binary.data); + JsonbIteratorToken tok; + + while ((tok = JsonbIteratorNext(&it, &elem, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + JsonValueListAppend(found, copyJsonbValue(&elem)); + } + } + else + JsonValueListAppend(found, item); + } + + return jperOk; + } + + return recursiveExecute(cxt, jsp, jb, found); +} + +static JsonPathExecResult +executeExpr(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonValueListIterator lseqit = { 0 }; + JsonbValue *lval; + bool error = false; + bool found = false; + + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + if (jperIsError(res)) + return jperError; + + jspGetRightArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &rseq); + if (jperIsError(res)) + return jperError; + + while ((lval = JsonValueListNext(&lseq, &lseqit))) + { + JsonValueListIterator rseqit = { 0 }; + JsonbValue *rval; + + while ((rval = JsonValueListNext(&rseq, &rseqit))) + { + switch (jsp->type) + { + case jpiEqual: + res = checkEquality(lval, rval, false); + break; + case jpiNotEqual: + res = checkEquality(lval, rval, true); + break; + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + res = makeCompare(jsp->type, lval, rval); + break; + default: + elog(ERROR, "Unknown operation"); + } + + if (res == jperOk) + { + if (cxt->lax) + return jperOk; + + found = true; + } + else if (res == jperError) + { + if (!cxt->lax) + return jperError; + + error = true; + } + } + } + + if (found) /* possible only in strict mode */ + return jperOk; + + if (error) /* possible only in lax mode */ + return jperError; + + return jperNotFound; +} + +static JsonPathExecResult +executeBinaryArithmExpr(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult jper; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonbValue *lval; + JsonbValue *rval; + JsonbValue lvalbuf; + JsonbValue rvalbuf; + Datum ldatum; + Datum rdatum; + Datum res; + bool hasNext; + + jspGetLeftArg(jsp, &elem); + + /* XXX by standard unwrapped only operands of multiplicative expressions */ + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + + if (jper == jperOk) + { + jspGetRightArg(jsp, &elem); + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &rseq); /* XXX */ + } + + if (jper != jperOk || + JsonValueListLength(&lseq) != 1 || + JsonValueListLength(&rseq) != 1) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + lval = JsonValueListHead(&lseq); + + if (JsonbType(lval) == jbvScalar) + lval = JsonbExtractScalar(lval->val.binary.data, &lvalbuf); + + if (lval->type != jbvNumeric) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + rval = JsonValueListHead(&rseq); + + if (JsonbType(rval) == jbvScalar) + rval = JsonbExtractScalar(rval->val.binary.data, &rvalbuf); + + if (rval->type != jbvNumeric) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + hasNext = jspGetNext(jsp, &elem); + + if (!found && !hasNext) + return jperOk; + + ldatum = NumericGetDatum(lval->val.numeric); + rdatum = NumericGetDatum(rval->val.numeric); + + switch (jsp->type) + { + case jpiAdd: + res = DirectFunctionCall2(numeric_add, ldatum, rdatum); + break; + case jpiSub: + res = DirectFunctionCall2(numeric_sub, ldatum, rdatum); + break; + case jpiMul: + res = DirectFunctionCall2(numeric_mul, ldatum, rdatum); + break; + case jpiDiv: + res = DirectFunctionCall2(numeric_div, ldatum, rdatum); + break; + case jpiMod: + res = DirectFunctionCall2(numeric_mod, ldatum, rdatum); + break; + default: + elog(ERROR, "unknown jsonpath arithmetic operation %d", jsp->type); + } + + lval = palloc(sizeof(*lval)); + lval->type = jbvNumeric; + lval->val.numeric = DatumGetNumeric(res); + + return recursiveExecuteNext(cxt, jsp, &elem, lval, found, false); +} + +static JsonPathExecResult +executeUnaryArithmExpr(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult jper; + JsonPathExecResult jper2; + JsonPathItem elem; + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonbValue *val; + bool hasNext; + + jspGetArg(jsp, &elem); + jper = recursiveExecuteAndUnwrap(cxt, &elem, jb, &seq); + + if (jperIsError(jper)) + return jperMakeError(ERRCODE_JSON_NUMBER_NOT_FOUND); + + jper = jperNotFound; + + hasNext = jspGetNext(jsp, &elem); + + while ((val = JsonValueListNext(&seq, &it))) + { + if (JsonbType(val) == jbvScalar) + JsonbExtractScalar(val->val.binary.data, val); + + if (val->type == jbvNumeric) + { + if (!found && !hasNext) + return jperOk; + } + else if (!found && !hasNext) + continue; /* skip non-numerics processing */ + + if (val->type != jbvNumeric) + return jperMakeError(ERRCODE_JSON_NUMBER_NOT_FOUND); + + switch (jsp->type) + { + case jpiPlus: + break; + case jpiMinus: + val->val.numeric = + DatumGetNumeric(DirectFunctionCall1( + numeric_uminus, NumericGetDatum(val->val.numeric))); + break; + default: + elog(ERROR, "unknown jsonpath arithmetic operation %d", jsp->type); + } + + jper2 = recursiveExecuteNext(cxt, jsp, &elem, val, found, false); + + if (jperIsError(jper2)) + return jper2; + + if (jper2 == jperOk) + { + if (!found) + return jperOk; + jper = jperOk; + } + } + + return jper; +} + +/* + * implements jpiAny node (** operator) + */ +static JsonPathExecResult +recursiveAny(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found, uint32 level, uint32 first, uint32 last) +{ + JsonPathExecResult res = jperNotFound; + JsonbIterator *it; + int32 r; + JsonbValue v; + + check_stack_depth(); + + if (level > last) + return res; + + it = JsonbIteratorInit(jb->val.binary.data); + + /* + * Recursivly iterate over jsonb objects/arrays + */ + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + r = JsonbIteratorNext(&it, &v, true); + Assert(r == WJB_VALUE); + } + + if (r == WJB_VALUE || r == WJB_ELEM) + { + + if (level >= first) + { + /* check expression */ + res = recursiveExecuteNext(cxt, NULL, jsp, &v, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + if (level < last && v.type == jbvBinary) + { + res = recursiveAny(cxt, jsp, &v, found, level + 1, first, last); + + if (jperIsError(res)) + break; + + if (res == jperOk && found == NULL) + break; + } + } + } + + return res; +} + +static JsonPathExecResult +getArrayIndex(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + int32 *index) +{ + JsonbValue *jbv; + JsonValueList found = { 0 }; + JsonbValue tmp; + JsonPathExecResult res = recursiveExecute(cxt, jsp, jb, &found); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&found) != 1) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + jbv = JsonValueListHead(&found); + + if (JsonbType(jbv) == jbvScalar) + jbv = JsonbExtractScalar(jbv->val.binary.data, &tmp); + + if (jbv->type != jbvNumeric) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + *index = DatumGetInt32(DirectFunctionCall1(numeric_int4, + DirectFunctionCall2(numeric_trunc, + NumericGetDatum(jbv->val.numeric), + Int32GetDatum(0)))); + + return jperOk; +} + +static JsonPathExecResult +executeStartsWithPredicate(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList lseq = { 0 }; + JsonValueList rseq = { 0 }; + JsonValueListIterator lit = { 0 }; + JsonbValue *whole; + JsonbValue *initial; + JsonbValue initialbuf; + bool error = false; + bool found = false; + + jspGetRightArg(jsp, &elem); + res = recursiveExecute(cxt, &elem, jb, &rseq); + if (jperIsError(res)) + return jperError; + + if (JsonValueListLength(&rseq) != 1) + return jperError; + + initial = JsonValueListHead(&rseq); + + if (JsonbType(initial) == jbvScalar) + initial = JsonbExtractScalar(initial->val.binary.data, &initialbuf); + + if (initial->type != jbvString) + return jperError; + + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &lseq); + if (jperIsError(res)) + return jperError; + + while ((whole = JsonValueListNext(&lseq, &lit))) + { + JsonbValue wholebuf; + + if (JsonbType(whole) == jbvScalar) + whole = JsonbExtractScalar(whole->val.binary.data, &wholebuf); + + if (whole->type != jbvString) + { + if (!cxt->lax) + return jperError; + + error = true; + } + else if (whole->val.string.len >= initial->val.string.len && + !memcmp(whole->val.string.val, + initial->val.string.val, + initial->val.string.len)) + { + if (cxt->lax) + return jperOk; + + found = true; + } + } + + if (found) /* possible only in strict mode */ + return jperOk; + + if (error) /* possible only in lax mode */ + return jperError; + + return jperNotFound; +} + +static JsonPathExecResult +executeLikeRegexPredicate(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + JsonPathExecResult res; + JsonPathItem elem; + JsonValueList seq = { 0 }; + JsonValueListIterator it = { 0 }; + JsonbValue *str; + text *regex; + uint32 flags = jsp->content.like_regex.flags; + int cflags = REG_ADVANCED; + bool error = false; + bool found = false; + + if (flags & JSP_REGEX_ICASE) + cflags |= REG_ICASE; + if (flags & JSP_REGEX_MLINE) + cflags |= REG_NEWLINE; + if (flags & JSP_REGEX_SLINE) + cflags &= ~REG_NEWLINE; + if (flags & JSP_REGEX_WSPACE) + cflags |= REG_EXPANDED; + + regex = cstring_to_text_with_len(jsp->content.like_regex.pattern, + jsp->content.like_regex.patternlen); + + jspInitByBuffer(&elem, jsp->base, jsp->content.like_regex.expr); + res = recursiveExecuteAndUnwrap(cxt, &elem, jb, &seq); + if (jperIsError(res)) + return jperError; + + while ((str = JsonValueListNext(&seq, &it))) + { + JsonbValue strbuf; + + if (JsonbType(str) == jbvScalar) + str = JsonbExtractScalar(str->val.binary.data, &strbuf); + + if (str->type != jbvString) + { + if (!cxt->lax) + return jperError; + + error = true; + } + else if (RE_compile_and_execute(regex, str->val.string.val, + str->val.string.len, cflags, + DEFAULT_COLLATION_OID, 0, NULL)) + { + if (cxt->lax) + return jperOk; + + found = true; + } + } + + if (found) /* possible only in strict mode */ + return jperOk; + + if (error) /* possible only in lax mode */ + return jperError; + + return jperNotFound; +} + +static bool +tryToParseDatetime(const char *template, text *datetime, + Datum *value, Oid *typid, int32 *typmod) +{ + MemoryContext mcxt = CurrentMemoryContext; + bool ok = false; + + PG_TRY(); + { + *value = to_datetime(datetime, template, -1, true, typid, typmod); + ok = true; + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) != ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + } + PG_END_TRY(); + + return ok; +} + +static inline JsonPathExecResult +appendBoolResult(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonValueList *found, JsonPathExecResult res, bool needBool) +{ + JsonPathItem next; + JsonbValue jbv; + bool hasNext = jspGetNext(jsp, &next); + + if (needBool) + { + Assert(!hasNext); + return res; + } + + if (!found && !hasNext) + return jperOk; /* found singleton boolean value */ + + if (jperIsError(res)) + jbv.type = jbvNull; + else + { + jbv.type = jbvBool; + jbv.val.boolean = res == jperOk; + } + + return recursiveExecuteNext(cxt, jsp, &next, &jbv, found, true); +} + +/* + * Main executor function: walks on jsonpath structure and tries to find + * correspoding parts of jsonb. Note, jsonb and jsonpath values should be + * avaliable and untoasted during work because JsonPathItem, JsonbValue + * and found could have pointers into input values. If caller wants just to + * check matching of json by jsonpath then it doesn't provide a found arg. + * In this case executor works till first positive result and does not check + * the rest if it is possible. In other case it tries to find all satisfied + * results + */ +static JsonPathExecResult +recursiveExecuteNoUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found, bool needBool) +{ + JsonPathItem elem; + JsonPathExecResult res = jperNotFound; + bool hasNext; + + check_stack_depth(); + + switch(jsp->type) { + case jpiAnd: + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteBool(cxt, &elem, jb); + if (res != jperNotFound) + { + JsonPathExecResult res2; + + /* + * SQL/JSON says that we should check second arg + * in case of jperError + */ + + jspGetRightArg(jsp, &elem); + res2 = recursiveExecuteBool(cxt, &elem, jb); + + res = (res2 == jperOk) ? res : res2; + } + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiOr: + jspGetLeftArg(jsp, &elem); + res = recursiveExecuteBool(cxt, &elem, jb); + if (res != jperOk) + { + JsonPathExecResult res2; + + jspGetRightArg(jsp, &elem); + res2 = recursiveExecuteBool(cxt, &elem, jb); + + res = (res2 == jperNotFound) ? res : res2; + } + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiNot: + jspGetArg(jsp, &elem); + switch ((res = recursiveExecuteBool(cxt, &elem, jb))) + { + case jperOk: + res = jperNotFound; + break; + case jperNotFound: + res = jperOk; + break; + default: + break; + } + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiIsUnknown: + jspGetArg(jsp, &elem); + res = recursiveExecuteBool(cxt, &elem, jb); + res = jperIsError(res) ? jperOk : jperNotFound; + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiKey: + if (JsonbType(jb) == jbvObject) + { + JsonbValue *v, key; + JsonbValue obj; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &obj); + + key.type = jbvString; + key.val.string.val = jspGetString(jsp, &key.val.string.len); + + v = findJsonbValueFromContainer(jb->val.binary.data, JB_FOBJECT, &key); + + if (v != NULL) + { + res = recursiveExecuteNext(cxt, jsp, NULL, v, found, false); + + if (jspHasNext(jsp) || !found) + pfree(v); /* free value if it was not added to found list */ + } + else if (!cxt->lax) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_MEMBER_NOT_FOUND); + } + } + else if (!cxt->lax) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_MEMBER_NOT_FOUND); + } + break; + case jpiRoot: + jb = cxt->root; + /* fall through */ + case jpiCurrent: + { + JsonbValue *v; + JsonbValue vbuf; + bool copy = true; + + if (JsonbType(jb) == jbvScalar) + { + if (jspHasNext(jsp)) + v = &vbuf; + else + { + v = palloc(sizeof(*v)); + copy = false; + } + + JsonbExtractScalar(jb->val.binary.data, v); + } + else + v = jb; + + res = recursiveExecuteNext(cxt, jsp, NULL, v, found, copy); + break; + } + case jpiAnyArray: + if (JsonbType(jb) == jbvArray) + { + hasNext = jspGetNext(jsp, &elem); + + if (jb->type == jbvArray) + { + JsonbValue *el = jb->val.array.elems; + JsonbValue *last_el = el + jb->val.array.nElems; + + for (; el < last_el; el++) + { + res = recursiveExecuteNext(cxt, jsp, &elem, el, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + else + { + JsonbValue v; + JsonbIterator *it; + JsonbIteratorToken r; + + it = JsonbIteratorInit(jb->val.binary.data); + + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_ELEM) + { + res = recursiveExecuteNext(cxt, jsp, &elem, &v, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + } + else + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + break; + + case jpiIndexArray: + if (JsonbType(jb) == jbvArray) + { + int innermostArraySize = cxt->innermostArraySize; + int i; + int size = JsonbArraySize(jb); + bool binary = jb->type == jbvBinary; + + cxt->innermostArraySize = size; /* for LAST evaluation */ + + hasNext = jspGetNext(jsp, &elem); + + for (i = 0; i < jsp->content.array.nelems; i++) + { + JsonPathItem from; + JsonPathItem to; + int32 index; + int32 index_from; + int32 index_to; + bool range = jspGetArraySubscript(jsp, &from, &to, i); + + res = getArrayIndex(cxt, &from, jb, &index_from); + + if (jperIsError(res)) + break; + + if (range) + { + res = getArrayIndex(cxt, &to, jb, &index_to); + + if (jperIsError(res)) + break; + } + else + index_to = index_from; + + if (!cxt->lax && + (index_from < 0 || + index_from > index_to || + index_to >= size)) + { + res = jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + break; + } + + if (index_from < 0) + index_from = 0; + + if (index_to >= size) + index_to = size - 1; + + res = jperNotFound; + + for (index = index_from; index <= index_to; index++) + { + JsonbValue *v = binary ? + getIthJsonbValueFromContainer(jb->val.binary.data, + (uint32) index) : + &jb->val.array.elems[index]; + + if (v == NULL) + continue; + + res = recursiveExecuteNext(cxt, jsp, &elem, v, found, + !binary); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + + cxt->innermostArraySize = innermostArraySize; + } + else if (JsonbType(jb) == jbvObject) + { + int innermostArraySize = cxt->innermostArraySize; + int i; + JsonbValue bin; + JsonbValue *wrapped = NULL; + + if (jb->type != jbvBinary) + jb = JsonbWrapInBinary(jb, &bin); + + cxt->innermostArraySize = 1; + + for (i = 0; i < jsp->content.array.nelems; i++) + { + JsonPathItem from; + JsonPathItem to; + JsonbValue *key; + JsonbValue tmp; + JsonValueList keys = { 0 }; + bool range = jspGetArraySubscript(jsp, &from, &to, i); + + if (range) + { + int index_from; + int index_to; + + if (!cxt->lax) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + if (!wrapped) + wrapped = wrapItem(jb); + + res = getArrayIndex(cxt, &from, wrapped, &index_from); + if (jperIsError(res)) + return res; + + res = getArrayIndex(cxt, &to, wrapped, &index_to); + if (jperIsError(res)) + return res; + + res = jperNotFound; + + if (index_from <= 0 && index_to >= 0) + { + res = recursiveExecuteNext(cxt, jsp, NULL, jb, + found, true); + if (jperIsError(res)) + return res; + + } + + if (res == jperOk && !found) + break; + + continue; + } + + res = recursiveExecute(cxt, &from, jb, &keys); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&keys) != 1) + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + key = JsonValueListHead(&keys); + + if (JsonbType(key) == jbvScalar) + key = JsonbExtractScalar(key->val.binary.data, &tmp); + + res = jperNotFound; + + if (key->type == jbvNumeric && cxt->lax) + { + int index = DatumGetInt32( + DirectFunctionCall1(numeric_int4, + DirectFunctionCall2(numeric_trunc, + NumericGetDatum(key->val.numeric), + Int32GetDatum(0)))); + + if (!index) + { + res = recursiveExecuteNext(cxt, jsp, NULL, jb, + found, true); + if (jperIsError(res)) + return res; + } + } + else if (key->type == jbvString) + { + key = findJsonbValueFromContainer(jb->val.binary.data, + JB_FOBJECT, key); + + if (key) + { + res = recursiveExecuteNext(cxt, jsp, NULL, key, + found, false); + if (jperIsError(res)) + return res; + } + else if (!cxt->lax) + return jperMakeError(ERRCODE_JSON_MEMBER_NOT_FOUND); + } + else + return jperMakeError(ERRCODE_INVALID_JSON_SUBSCRIPT); + + if (res == jperOk && !found) + break; + } + + cxt->innermostArraySize = innermostArraySize; + } + else + { + if (cxt->lax) + res = recursiveExecuteNoUnwrap(cxt, jsp, wrapItem(jb), + found, false); + else + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + } + break; + + case jpiLast: + { + JsonbValue tmpjbv; + JsonbValue *lastjbv; + int last; + bool hasNext; + + if (cxt->innermostArraySize < 0) + elog(ERROR, + "evaluating jsonpath LAST outside of array subscript"); + + hasNext = jspGetNext(jsp, &elem); + + if (!hasNext && !found) + { + res = jperOk; + break; + } + + last = cxt->innermostArraySize - 1; + + lastjbv = hasNext ? &tmpjbv : palloc(sizeof(*lastjbv)); + + lastjbv->type = jbvNumeric; + lastjbv->val.numeric = DatumGetNumeric(DirectFunctionCall1( + int4_numeric, Int32GetDatum(last))); + + res = recursiveExecuteNext(cxt, jsp, &elem, lastjbv, found, hasNext); + } + break; + case jpiAnyKey: + if (JsonbType(jb) == jbvObject) + { + JsonbIterator *it; + int32 r; + JsonbValue v; + JsonbValue bin; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &bin); + + hasNext = jspGetNext(jsp, &elem); + it = JsonbIteratorInit(jb->val.binary.data); + + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_VALUE) + { + res = recursiveExecuteNext(cxt, jsp, &elem, &v, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + else if (!cxt->lax) + { + Assert(found); + res = jperMakeError(ERRCODE_JSON_OBJECT_NOT_FOUND); + } + break; + case jpiEqual: + case jpiNotEqual: + case jpiLess: + case jpiGreater: + case jpiLessOrEqual: + case jpiGreaterOrEqual: + res = executeExpr(cxt, jsp, jb); + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiAdd: + case jpiSub: + case jpiMul: + case jpiDiv: + case jpiMod: + res = executeBinaryArithmExpr(cxt, jsp, jb, found); + break; + case jpiPlus: + case jpiMinus: + res = executeUnaryArithmExpr(cxt, jsp, jb, found); + break; + case jpiFilter: + jspGetArg(jsp, &elem); + res = recursiveExecuteBool(cxt, &elem, jb); + if (res != jperOk) + res = jperNotFound; + else + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + break; + case jpiAny: + { + JsonbValue jbvbuf; + + hasNext = jspGetNext(jsp, &elem); + + /* first try without any intermediate steps */ + if (jsp->content.anybounds.first == 0) + { + res = recursiveExecuteNext(cxt, jsp, &elem, jb, found, true); + + if (res == jperOk && !found) + break; + } + + if (jb->type == jbvArray || jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &jbvbuf); + + if (jb->type == jbvBinary) + res = recursiveAny(cxt, hasNext ? &elem : NULL, jb, found, + 1, + jsp->content.anybounds.first, + jsp->content.anybounds.last); + break; + } + case jpiExists: + jspGetArg(jsp, &elem); + + if (cxt->lax) + res = recursiveExecute(cxt, &elem, jb, NULL); + else + { + JsonValueList vals = { 0 }; + + /* + * In strict mode we must get a complete list of values + * to check that there are no errors at all. + */ + res = recursiveExecute(cxt, &elem, jb, &vals); + + if (!jperIsError(res)) + res = JsonValueListIsEmpty(&vals) ? jperNotFound : jperOk; + } + + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiNull: + case jpiBool: + case jpiNumeric: + case jpiString: + case jpiVariable: + { + JsonbValue vbuf; + JsonbValue *v; + bool hasNext = jspGetNext(jsp, &elem); + + if (!hasNext && !found) + { + res = jperOk; /* skip evaluation */ + break; + } + + v = hasNext ? &vbuf : palloc(sizeof(*v)); + + computeJsonPathItem(cxt, jsp, v); + + res = recursiveExecuteNext(cxt, jsp, &elem, v, found, hasNext); + } + break; + case jpiType: + { + JsonbValue *jbv = palloc(sizeof(*jbv)); + + jbv->type = jbvString; + jbv->val.string.val = pstrdup(JsonbTypeName(jb)); + jbv->val.string.len = strlen(jbv->val.string.val); + + res = recursiveExecuteNext(cxt, jsp, NULL, jbv, found, false); + } + break; + case jpiSize: + { + int size = JsonbArraySize(jb); + + if (size < 0) + { + if (!cxt->lax) + { + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + break; + } + + size = 1; + } + + jb = palloc(sizeof(*jb)); + + jb->type = jbvNumeric; + jb->val.numeric = + DatumGetNumeric(DirectFunctionCall1(int4_numeric, + Int32GetDatum(size))); + + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, false); + } + break; + case jpiAbs: + case jpiFloor: + case jpiCeiling: + { + JsonbValue jbvbuf; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbvbuf); + + if (jb->type == jbvNumeric) + { + Datum datum = NumericGetDatum(jb->val.numeric); + + switch (jsp->type) + { + case jpiAbs: + datum = DirectFunctionCall1(numeric_abs, datum); + break; + case jpiFloor: + datum = DirectFunctionCall1(numeric_floor, datum); + break; + case jpiCeiling: + datum = DirectFunctionCall1(numeric_ceil, datum); + break; + default: + break; + } + + jb = palloc(sizeof(*jb)); + + jb->type = jbvNumeric; + jb->val.numeric = DatumGetNumeric(datum); + + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, false); + } + else + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + } + break; + case jpiDouble: + { + JsonbValue jbv; + MemoryContext mcxt = CurrentMemoryContext; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbv); + + PG_TRY(); + { + if (jb->type == jbvNumeric) + { + /* only check success of numeric to double cast */ + DirectFunctionCall1(numeric_float8, + NumericGetDatum(jb->val.numeric)); + res = jperOk; + } + else if (jb->type == jbvString) + { + /* cast string as double */ + char *str = pnstrdup(jb->val.string.val, + jb->val.string.len); + Datum val = DirectFunctionCall1( + float8in, CStringGetDatum(str)); + pfree(str); + + jb = &jbv; + jb->type = jbvNumeric; + jb->val.numeric = DatumGetNumeric(DirectFunctionCall1( + float8_numeric, val)); + res = jperOk; + + } + else + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) != + ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + res = jperMakeError(ERRCODE_NON_NUMERIC_JSON_ITEM); + } + PG_END_TRY(); + + if (res == jperOk) + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + } + break; + case jpiDatetime: + { + JsonbValue jbvbuf; + Datum value; + Oid typid; + int32 typmod = -1; + bool hasNext; + + if (JsonbType(jb) == jbvScalar) + jb = JsonbExtractScalar(jb->val.binary.data, &jbvbuf); + + if (jb->type == jbvNumeric && !jsp->content.arg) + { + /* Standard extension: unix epoch to timestamptz */ + MemoryContext mcxt = CurrentMemoryContext; + + PG_TRY(); + { + Datum unix_epoch = + DirectFunctionCall1(numeric_float8, + NumericGetDatum(jb->val.numeric)); + + value = DirectFunctionCall1(float8_timestamptz, + unix_epoch); + typid = TIMESTAMPTZOID; + res = jperOk; + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) != + ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + + res = jperMakeError(ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION); + } + PG_END_TRY(); + } + else if (jb->type == jbvString) + { + + text *datetime_txt = + cstring_to_text_with_len(jb->val.string.val, + jb->val.string.len); + + res = jperOk; + + if (jsp->content.arg) + { + text *template_txt; + char *template_str; + int template_len; + MemoryContext mcxt = CurrentMemoryContext; + + jspGetArg(jsp, &elem); + + if (elem.type != jpiString) + elog(ERROR, "invalid jsonpath item type for .datetime() argument"); + + template_str = jspGetString(&elem, &template_len); + template_txt = cstring_to_text_with_len(template_str, + template_len); + + PG_TRY(); + { + value = to_datetime(datetime_txt, + template_str, template_len, + false, + &typid, &typmod); + } + PG_CATCH(); + { + if (ERRCODE_TO_CATEGORY(geterrcode()) != + ERRCODE_DATA_EXCEPTION) + PG_RE_THROW(); + + FlushErrorState(); + MemoryContextSwitchTo(mcxt); + + res = jperMakeError(ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION); + } + PG_END_TRY(); + + pfree(template_txt); + } + else + { + if (!tryToParseDatetime("yyyy-mm-dd HH24:MI:SS TZH:TZM", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("yyyy-mm-dd HH24:MI:SS TZH", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("yyyy-mm-dd HH24:MI:SS", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("yyyy-mm-dd", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("HH24:MI:SS TZH:TZM", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("HH24:MI:SS TZH", + datetime_txt, &value, &typid, &typmod) && + !tryToParseDatetime("HH24:MI:SS", + datetime_txt, &value, &typid, &typmod)) + res = jperMakeError(ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION); + } + + pfree(datetime_txt); + } + else + { + res = jperMakeError(ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION); + break; + } + + if (jperIsError(res)) + break; + + hasNext = jspGetNext(jsp, &elem); + + if (!hasNext && !found) + break; + + jb = hasNext ? &jbvbuf : palloc(sizeof(*jb)); + + jb->type = jbvDatetime; + jb->val.datetime.value = value; + jb->val.datetime.typid = typid; + jb->val.datetime.typmod = typmod; + + res = recursiveExecuteNext(cxt, jsp, &elem, jb, found, hasNext); + } + break; + case jpiKeyValue: + if (JsonbType(jb) != jbvObject) + res = jperMakeError(ERRCODE_JSON_OBJECT_NOT_FOUND); + else + { + int32 r; + JsonbValue bin; + JsonbValue key; + JsonbValue val; + JsonbValue obj; + JsonbValue keystr; + JsonbValue valstr; + JsonbIterator *it; + JsonbParseState *ps = NULL; + + hasNext = jspGetNext(jsp, &elem); + + if (jb->type == jbvBinary + ? !JsonContainerSize(jb->val.binary.data) + : !jb->val.object.nPairs) + { + res = jperNotFound; + break; + } + + /* make template object */ + obj.type = jbvBinary; + + keystr.type = jbvString; + keystr.val.string.val = "key"; + keystr.val.string.len = 3; + + valstr.type = jbvString; + valstr.val.string.val = "value"; + valstr.val.string.len = 5; + + if (jb->type == jbvObject) + jb = JsonbWrapInBinary(jb, &bin); + + it = JsonbIteratorInit(jb->val.binary.data); + + while ((r = JsonbIteratorNext(&it, &key, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + Jsonb *jsonb; + JsonbValue *keyval; + + res = jperOk; + + if (!hasNext && !found) + break; + + r = JsonbIteratorNext(&it, &val, true); + Assert(r == WJB_VALUE); + + pushJsonbValue(&ps, WJB_BEGIN_OBJECT, NULL); + + pushJsonbValue(&ps, WJB_KEY, &keystr); + pushJsonbValue(&ps, WJB_VALUE, &key); + + + pushJsonbValue(&ps, WJB_KEY, &valstr); + pushJsonbValue(&ps, WJB_VALUE, &val); + + keyval = pushJsonbValue(&ps, WJB_END_OBJECT, NULL); + + jsonb = JsonbValueToJsonb(keyval); + + JsonbInitBinary(&obj, jsonb); + + res = recursiveExecuteNext(cxt, jsp, &elem, &obj, found, true); + + if (jperIsError(res)) + break; + + if (res == jperOk && !found) + break; + } + } + } + break; + case jpiStartsWith: + res = executeStartsWithPredicate(cxt, jsp, jb); + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiLikeRegex: + res = executeLikeRegexPredicate(cxt, jsp, jb); + res = appendBoolResult(cxt, jsp, found, res, needBool); + break; + case jpiMap: + if (JsonbType(jb) != jbvArray) + { + if (cxt->lax) + { + JsonValueList reslist = { 0 }; + + jspGetArg(jsp, &elem); + res = recursiveExecute(cxt, &elem, jb, &reslist); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&reslist) != 1) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + res = recursiveExecuteNext(cxt, jsp, NULL, + JsonValueListHead(&reslist), + found, true); + } + else + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + } + else + { + JsonbValue element_buf; + JsonbValue *element; + JsonbIterator *it = NULL; + JsonbIteratorToken tok; + JsonValueList result = { 0 }; + int size = JsonbArraySize(jb); + int i; + + jspGetArg(jsp, &elem); + + if (jb->type == jbvBinary && size > 0) + { + element = &element_buf; + it = JsonbIteratorInit(jb->val.binary.data); + tok = JsonbIteratorNext(&it, &element_buf, false); + if (tok != WJB_BEGIN_ARRAY) + elog(ERROR, "unexpected jsonb token at the array start"); + } + + for (i = 0; i < size; i++) + { + JsonValueList reslist = { 0 }; + + if (it) + { + tok = JsonbIteratorNext(&it, element, true); + if (tok != WJB_ELEM) + break; + } + else + element = &jb->val.array.elems[i]; + + res = recursiveExecute(cxt, &elem, element, &reslist); + + if (jperIsError(res)) + break; + + if (JsonValueListLength(&reslist) != 1) + { + res = jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + break; + } + + JsonValueListConcat(&result, reslist); + } + + if (jperIsError(res)) + break; + + res = recursiveExecuteNext(cxt, jsp, NULL, + wrapItemsInArray(&result), + found, false); + } + break; + case jpiSequence: + { + JsonPathItem next; + bool hasNext = jspGetNext(jsp, &next); + JsonValueList list; + JsonValueList *plist = hasNext ? &list : found; + JsonValueListIterator it; + int i; + + for (i = 0; i < jsp->content.sequence.nelems; i++) + { + JsonbValue *v; + + if (hasNext) + memset(&list, 0, sizeof(list)); + + jspGetSequenceElement(jsp, i, &elem); + res = recursiveExecute(cxt, &elem, jb, plist); + + if (jperIsError(res)) + break; + + if (!hasNext) + { + if (!found && res == jperOk) + break; + continue; + } + + memset(&it, 0, sizeof(it)); + + while ((v = JsonValueListNext(&list, &it))) + { + res = recursiveExecute(cxt, &next, v, found); + + if (jperIsError(res) || (!found && res == jperOk)) + { + i = jsp->content.sequence.nelems; + break; + } + } + } + + break; + } + case jpiArray: + { + JsonValueList list = { 0 }; + + if (jsp->content.arg) + { + jspGetArg(jsp, &elem); + res = recursiveExecute(cxt, &elem, jb, &list); + + if (jperIsError(res)) + break; + } + + res = recursiveExecuteNext(cxt, jsp, NULL, + wrapItemsInArray(&list), + found, false); + } + break; + case jpiObject: + { + JsonbParseState *ps = NULL; + JsonbValue *obj; + int i; + + pushJsonbValue(&ps, WJB_BEGIN_OBJECT, NULL); + + for (i = 0; i < jsp->content.object.nfields; i++) + { + JsonbValue *jbv; + JsonbValue jbvtmp; + JsonPathItem key; + JsonPathItem val; + JsonValueList key_list = { 0 }; + JsonValueList val_list = { 0 }; + + jspGetObjectField(jsp, i, &key, &val); + + recursiveExecute(cxt, &key, jb, &key_list); + + if (JsonValueListLength(&key_list) != 1) + { + res = jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + break; + } + + jbv = JsonValueListHead(&key_list); + + if (JsonbType(jbv) == jbvScalar) + jbv = JsonbExtractScalar(jbv->val.binary.data, &jbvtmp); + + if (jbv->type != jbvString) + { + res = jperMakeError(ERRCODE_JSON_SCALAR_REQUIRED); /* XXX */ + break; + } + + pushJsonbValue(&ps, WJB_KEY, jbv); + + recursiveExecute(cxt, &val, jb, &val_list); + + if (JsonValueListLength(&val_list) != 1) + { + res = jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + break; + } + + jbv = JsonValueListHead(&val_list); + + if (jbv->type == jbvObject || jbv->type == jbvArray) + jbv = JsonbWrapInBinary(jbv, &jbvtmp); + + pushJsonbValue(&ps, WJB_VALUE, jbv); + } + + if (jperIsError(res)) + break; + + obj = pushJsonbValue(&ps, WJB_END_OBJECT, NULL); + + res = recursiveExecuteNext(cxt, jsp, NULL, obj, found, false); + } + break; + case jpiReduce: + case jpiFold: + case jpiFoldl: + case jpiFoldr: + if (JsonbType(jb) != jbvArray) + { + if (cxt->lax) + { + if (jsp->type == jpiReduce) + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + else + res = recursiveExecute(cxt, jsp, wrapItem(jb), found); + } + else + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + } + else + { + JsonbValue jbv; + JsonbValue *result = NULL; + int size = JsonbArraySize(jb); + int i; + bool foldr = jsp->type == jpiFoldr; + + if (jsp->type == jpiReduce) + jspGetArg(jsp, &elem); + else + { + JsonValueList reslist = { 0 }; + + jspGetRightArg(jsp, &elem); + res = recursiveExecute(cxt, &elem, jb, &reslist); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&reslist) != 1) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + result = JsonValueListHead(&reslist); + + jspGetLeftArg(jsp, &elem); + } + + if (jsp->type == jpiReduce && size == 1) + { + if (jb->type == jbvBinary) + { + result = getIthJsonbValueFromContainer(jb->val.binary.data, 0); + if (!result) + { + res = jperNotFound; + break; + } + } + else + { + Assert(jb->type == jbvArray); + result = &jb->val.array.elems[0]; + } + } + else if (size) + { + JsonPathVariable *v1 = palloc(sizeof(*v1)); + JsonPathVariable *v2 = palloc(sizeof(*v2)); + JsonbIterator *it = NULL; + JsonbIteratorToken tok; + JsonbValue *element; + + if (jb->type == jbvBinary) + { + if (foldr) + { + /* unpack array for reverse iteration */ + JsonbParseState *ps = NULL; + + jb = pushJsonbValue(&ps, WJB_ELEM, jb); + } + else + { + element = &jbv; + it = JsonbIteratorInit(jb->val.binary.data); + tok = JsonbIteratorNext(&it, &jbv, false); + if (tok != WJB_BEGIN_ARRAY) + elog(ERROR, "unexpected jsonb token at the array start"); + } + } + + v1->cb = returnDATUM; + v2->cb = returnDATUM; + v1->varName = cstring_to_text_with_len("1", 1); + v2->varName = cstring_to_text_with_len("2", 1); + v1->typid = (Oid) -1; /* raw JsonbValue */ + v2->typid = (Oid) -1; + v1->typmod = -1; + v2->typmod = -1; + + cxt->vars = lcons(v1, lcons(v2, cxt->vars)); + + if (foldr) + { + /* swap $1 and $2 for foldr() */ + JsonPathVariable *tmp = v1; + + v1 = v2; + v2 = tmp; + } + + for (i = 0; i < size; i++) + { + JsonValueList reslist = { 0 }; + + if (it) + { + tok = JsonbIteratorNext(&it, element, true); + if (tok != WJB_ELEM) + break; + } + else if (foldr) + element = &jb->val.array.elems[size - i - 1]; + else + element = &jb->val.array.elems[i]; + + if (!i && jsp->type == jpiReduce) + { + result = copyJsonbValue(element); + continue; + } + + v1->cb_arg = result; + v2->cb_arg = element; + + res = recursiveExecute(cxt, &elem, jb, &reslist); + + if (jperIsError(res)) + return res; + + if (JsonValueListLength(&reslist) != 1) + return jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED); + + result = JsonValueListHead(&reslist); + } + + cxt->vars = list_delete_first(list_delete_first(cxt->vars)); + } + else if (jsp->type == jpiReduce) + { + res = jperNotFound; + break; + } + + res = recursiveExecuteNext(cxt, jsp, NULL, result, found, false); + } + break; + case jpiMin: + case jpiMax: + if (JsonbType(jb) != jbvArray) + { + if (cxt->lax) + res = recursiveExecuteNext(cxt, jsp, NULL, jb, found, true); + else + res = jperMakeError(ERRCODE_JSON_ARRAY_NOT_FOUND); + } + else + { + JsonbValue jbvElementBuf; + JsonbValue *jbvElement; + JsonbValue *jbvResult = NULL; + JsonbIterator *it = NULL; + JsonbIteratorToken tok; + int size = JsonbArraySize(jb); + int i; + JsonPathItemType cmpop = + jsp->type == jpiMax ? jpiGreater : jpiLess; + + if (jb->type == jbvBinary) + { + jbvElement = &jbvElementBuf; + it = JsonbIteratorInit(jb->val.binary.data); + tok = JsonbIteratorNext(&it, &jbvElementBuf, false); + if (tok != WJB_BEGIN_ARRAY) + elog(ERROR, "unexpected jsonb token at the array start"); + } + + for (i = 0; i < size; i++) + { + if (it) + { + tok = JsonbIteratorNext(&it, jbvElement, true); + if (tok != WJB_ELEM) + break; + } + else + jbvElement = &jb->val.array.elems[i]; + + if (!i) + { + jbvResult = it ? copyJsonbValue(jbvElement) : jbvElement; + } + else + { + res = makeCompare(cmpop, jbvElement, jbvResult); + + if (jperIsError(res)) + return jperMakeError(ERRCODE_JSON_SCALAR_REQUIRED); + + if (res == jperOk) + jbvResult = it ? copyJsonbValue(jbvElement) : jbvElement; + } + } + + if (!jbvResult) + { + res = jperNotFound; + break; + } + + res = recursiveExecuteNext(cxt, jsp, NULL, jbvResult, found, + false); + } + break; + default: + elog(ERROR,"2Wrong state: %d", jsp->type); + } + + return res; +} + +static JsonPathExecResult +recursiveExecuteUnwrapArray(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + JsonPathExecResult res = jperNotFound; + + if (jb->type == jbvArray) + { + JsonbValue *elem = jb->val.array.elems; + JsonbValue *last = elem + jb->val.array.nElems; + + for (; elem < last; elem++) + { + res = recursiveExecuteNoUnwrap(cxt, jsp, elem, found, false); + + if (jperIsError(res)) + break; + if (res == jperOk && !found) + break; + } + } + else + { + JsonbValue v; + JsonbIterator *it; + JsonbIteratorToken tok; + + it = JsonbIteratorInit(jb->val.binary.data); + + while ((tok = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (tok == WJB_ELEM) + { + res = recursiveExecuteNoUnwrap(cxt, jsp, &v, found, false); + if (jperIsError(res)) + break; + if (res == jperOk && !found) + break; + } + } + } + + return res; +} + +static inline JsonPathExecResult +recursiveExecuteUnwrap(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb, JsonValueList *found) +{ + if (cxt->lax && JsonbType(jb) == jbvArray) + return recursiveExecuteUnwrapArray(cxt, jsp, jb, found); + + return recursiveExecuteNoUnwrap(cxt, jsp, jb, found, false); +} + +static inline JsonbValue * +wrapItem(JsonbValue *jbv) +{ + JsonbParseState *ps = NULL; + JsonbValue jbvbuf; + int type = JsonbType(jbv); + + if (type == jbvArray) + return jbv; + + if (type == jbvScalar) + jbv = JsonbExtractScalar(jbv->val.binary.data, &jbvbuf); + + pushJsonbValue(&ps, WJB_BEGIN_ARRAY, NULL); + pushJsonbValue(&ps, WJB_ELEM, jbv); + jbv = pushJsonbValue(&ps, WJB_END_ARRAY, NULL); + + return JsonbWrapInBinary(jbv, NULL); +} + +static inline JsonPathExecResult +recursiveExecute(JsonPathExecContext *cxt, JsonPathItem *jsp, JsonbValue *jb, + JsonValueList *found) +{ + if (cxt->lax) + { + switch (jsp->type) + { + case jpiKey: + case jpiAnyKey: + /* case jpiAny: */ + case jpiFilter: + /* all methods excluding type() and size() */ + case jpiAbs: + case jpiFloor: + case jpiCeiling: + case jpiDouble: + case jpiDatetime: + case jpiKeyValue: + return recursiveExecuteUnwrap(cxt, jsp, jb, found); + + case jpiAnyArray: + case jpiIndexArray: + jb = wrapItem(jb); + break; + + default: + break; + } + } + + return recursiveExecuteNoUnwrap(cxt, jsp, jb, found, false); +} + +static inline JsonPathExecResult +recursiveExecuteBool(JsonPathExecContext *cxt, JsonPathItem *jsp, + JsonbValue *jb) +{ + if (jspHasNext(jsp)) + elog(ERROR, "boolean jsonpath item can not have next item"); + + switch (jsp->type) + { + case jpiAnd: + case jpiOr: + case jpiNot: + case jpiIsUnknown: + case jpiEqual: + case jpiNotEqual: + case jpiGreater: + case jpiGreaterOrEqual: + case jpiLess: + case jpiLessOrEqual: + case jpiExists: + case jpiStartsWith: + case jpiLikeRegex: + break; + + default: + elog(ERROR, "invalid boolean jsonpath item type: %d", jsp->type); + break; + } + + return recursiveExecuteNoUnwrap(cxt, jsp, jb, NULL, true); +} + +/* + * Public interface to jsonpath executor + */ +JsonPathExecResult +executeJsonPath(JsonPath *path, List *vars, Jsonb *json, JsonValueList *foundJson) +{ + JsonPathExecContext cxt; + JsonPathItem jsp; + JsonbValue jbv; + + jspInit(&jsp, path); + + cxt.vars = vars; + cxt.lax = (path->header & JSONPATH_LAX) != 0; + cxt.root = JsonbInitBinary(&jbv, json); + cxt.innermostArraySize = -1; + + if (!cxt.lax && !foundJson) + { + /* + * In strict mode we must get a complete list of values to check + * that there are no errors at all. + */ + JsonValueList vals = { 0 }; + JsonPathExecResult res = recursiveExecute(&cxt, &jsp, &jbv, &vals); + + if (jperIsError(res)) + return res; + + return JsonValueListIsEmpty(&vals) ? jperNotFound : jperOk; + } + + return recursiveExecute(&cxt, &jsp, &jbv, foundJson); +} + +static Datum +returnDATUM(void *arg, bool *isNull) +{ + *isNull = false; + return PointerGetDatum(arg); +} + +static Datum +returnNULL(void *arg, bool *isNull) +{ + *isNull = true; + return Int32GetDatum(0); +} + +/* + * Convert jsonb object into list of vars for executor + */ +static List* +makePassingVars(Jsonb *jb) +{ + JsonbValue v; + JsonbIterator *it; + int32 r; + List *vars = NIL; + + it = JsonbIteratorInit(&jb->root); + + r = JsonbIteratorNext(&it, &v, true); + + if (r != WJB_BEGIN_OBJECT) + ereport(ERROR, + (errcode(ERRCODE_WRONG_OBJECT_TYPE), + errmsg("passing variable json is not a object"))); + + while((r = JsonbIteratorNext(&it, &v, true)) != WJB_DONE) + { + if (r == WJB_KEY) + { + JsonPathVariable *jpv = palloc0(sizeof(*jpv)); + + jpv->varName = cstring_to_text_with_len(v.val.string.val, + v.val.string.len); + + JsonbIteratorNext(&it, &v, true); + + jpv->cb = returnDATUM; + + switch(v.type) + { + case jbvBool: + jpv->typid = BOOLOID; + jpv->cb_arg = DatumGetPointer(BoolGetDatum(v.val.boolean)); + break; + case jbvNull: + jpv->cb = returnNULL; + break; + case jbvString: + jpv->typid = TEXTOID; + jpv->cb_arg = cstring_to_text_with_len(v.val.string.val, + v.val.string.len); + break; + case jbvNumeric: + jpv->typid = NUMERICOID; + jpv->cb_arg = v.val.numeric; + break; + case jbvBinary: + jpv->typid = JSONXOID; + jpv->cb_arg = DatumGetPointer(JsonbPGetDatum(JsonbValueToJsonb(&v))); + break; + default: + elog(ERROR, "unsupported type in passing variable json"); + } + + vars = lappend(vars, jpv); + } + } + + return vars; +} + +static void +throwJsonPathError(JsonPathExecResult res) +{ + if (!jperIsError(res)) + return; + + switch (jperGetError(res)) + { + case ERRCODE_JSON_ARRAY_NOT_FOUND: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("SQL/JSON array not found"))); + break; + case ERRCODE_JSON_OBJECT_NOT_FOUND: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("SQL/JSON object not found"))); + break; + case ERRCODE_JSON_MEMBER_NOT_FOUND: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("SQL/JSON member not found"))); + break; + case ERRCODE_JSON_NUMBER_NOT_FOUND: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("SQL/JSON number not found"))); + break; + case ERRCODE_JSON_SCALAR_REQUIRED: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("SQL/JSON scalar required"))); + break; + case ERRCODE_SINGLETON_JSON_ITEM_REQUIRED: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("Singleton SQL/JSON item required"))); + break; + case ERRCODE_NON_NUMERIC_JSON_ITEM: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("Non-numeric SQL/JSON item"))); + break; + case ERRCODE_INVALID_JSON_SUBSCRIPT: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("Invalid SQL/JSON subscript"))); + break; + case ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("Invalid argument for SQL/JSON datetime function"))); + break; + default: + ereport(ERROR, + (errcode(jperGetError(res)), + errmsg("Unknown SQL/JSON error"))); + break; + } +} + +static Datum +jsonb_jsonpath_exists(PG_FUNCTION_ARGS) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + JsonPathExecResult res; + List *vars = NIL; + + if (PG_NARGS() == 3) + vars = makePassingVars(PG_GETARG_JSONB_P(2)); + + res = executeJsonPath(jp, vars, jb, NULL); + + PG_FREE_IF_COPY(jb, 0); + PG_FREE_IF_COPY(jp, 1); + + throwJsonPathError(res); + + PG_RETURN_BOOL(res == jperOk); +} + +Datum +jsonb_jsonpath_exists2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_exists(fcinfo); +} + +Datum +jsonb_jsonpath_exists3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_exists(fcinfo); +} + +static inline Datum +jsonb_jsonpath_predicate(FunctionCallInfo fcinfo, List *vars) +{ + Jsonb *jb = PG_GETARG_JSONB_P(0); + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + JsonbValue *jbv; + JsonValueList found = { 0 }; + JsonPathExecResult res; + + res = executeJsonPath(jp, vars, jb, &found); + + throwJsonPathError(res); + + if (JsonValueListLength(&found) != 1) + throwJsonPathError(jperMakeError(ERRCODE_SINGLETON_JSON_ITEM_REQUIRED)); + + jbv = JsonValueListHead(&found); + + if (JsonbType(jbv) == jbvScalar) + JsonbExtractScalar(jbv->val.binary.data, jbv); + + PG_FREE_IF_COPY(jb, 0); + PG_FREE_IF_COPY(jp, 1); + + if (jbv->type == jbvNull) + PG_RETURN_NULL(); + + if (jbv->type != jbvBool) + PG_RETURN_NULL(); /* XXX */ + + PG_RETURN_BOOL(jbv->val.boolean); +} + +Datum +jsonb_jsonpath_predicate2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_predicate(fcinfo, NIL); +} + +Datum +jsonb_jsonpath_predicate3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_predicate(fcinfo, + makePassingVars(PG_GETARG_JSONB_P(2))); +} + +static Datum +jsonb_jsonpath_query(FunctionCallInfo fcinfo, bool safe) +{ + FuncCallContext *funcctx; + List *found; + JsonbValue *v; + ListCell *c; + + if (SRF_IS_FIRSTCALL()) + { + JsonPath *jp = PG_GETARG_JSONPATH_P(1); + Jsonb *jb; + JsonPathExecResult res; + MemoryContext oldcontext; + List *vars = NIL; + JsonValueList found = { 0 }; + + funcctx = SRF_FIRSTCALL_INIT(); + oldcontext = MemoryContextSwitchTo(funcctx->multi_call_memory_ctx); + + jb = PG_GETARG_JSONB_P_COPY(0); + if (PG_NARGS() == 3) + vars = makePassingVars(PG_GETARG_JSONB_P(2)); + + res = executeJsonPath(jp, vars, jb, &found); + + if (jperIsError(res)) + { + if (safe) + JsonValueListClear(&found); + else + throwJsonPathError(res); + } + + PG_FREE_IF_COPY(jp, 1); + + funcctx->user_fctx = JsonValueListGetList(&found); + + MemoryContextSwitchTo(oldcontext); + } + + funcctx = SRF_PERCALL_SETUP(); + found = funcctx->user_fctx; + + c = list_head(found); + + if (c == NULL) + SRF_RETURN_DONE(funcctx); + + v = lfirst(c); + funcctx->user_fctx = list_delete_first(found); + + SRF_RETURN_NEXT(funcctx, JsonbPGetDatum(JsonbValueToJsonb(v))); +} + +Datum +jsonb_jsonpath_query2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo, false); +} + +Datum +jsonb_jsonpath_query3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo, false); +} + +Datum +jsonb_jsonpath_query_safe2(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo, true); +} + +Datum +jsonb_jsonpath_query_safe3(PG_FUNCTION_ARGS) +{ + return jsonb_jsonpath_query(fcinfo, true); +} + +static inline JsonbValue * +wrapItemsInArray(const JsonValueList *items) +{ + JsonbParseState *ps = NULL; + JsonValueListIterator it = { 0 }; + JsonbValue *jbv; + + pushJsonbValue(&ps, WJB_BEGIN_ARRAY, NULL); + + while ((jbv = JsonValueListNext(items, &it))) + { + JsonbValue bin; + + if (jbv->type == jbvBinary && + JsonContainerIsScalar(jbv->val.binary.data)) + JsonbExtractScalar(jbv->val.binary.data, jbv); + + if (jbv->type == jbvObject || jbv->type == jbvArray) + jbv = JsonbWrapInBinary(jbv, &bin); + + pushJsonbValue(&ps, WJB_ELEM, jbv); + } + + return pushJsonbValue(&ps, WJB_END_ARRAY, NULL); +} + +/********************Interface to pgsql's executor***************************/ +bool +JsonbPathExists(Datum jb, JsonPath *jp, List *vars) +{ + JsonPathExecResult res = executeJsonPath(jp, vars, DatumGetJsonbP(jb), + NULL); + + throwJsonPathError(res); + + return res == jperOk; +} + +Datum +JsonbPathQuery(Datum jb, JsonPath *jp, JsonWrapper wrapper, + bool *empty, List *vars) +{ + JsonbValue *first; + bool wrap; + JsonValueList found = { 0 }; + JsonPathExecResult jper = executeJsonPath(jp, vars, DatumGetJsonbP(jb), + &found); + int count; + + throwJsonPathError(jper); + + count = JsonValueListLength(&found); + + first = count ? JsonValueListHead(&found) : NULL; + + if (!first) + wrap = false; + else if (wrapper == JSW_NONE) + wrap = false; + else if (wrapper == JSW_UNCONDITIONAL) + wrap = true; + else if (wrapper == JSW_CONDITIONAL) + wrap = count > 1 || + IsAJsonbScalar(first) || + (first->type == jbvBinary && + JsonContainerIsScalar(first->val.binary.data)); + else + { + elog(ERROR, "unrecognized json wrapper %d", wrapper); + wrap = false; + } + + if (wrap) + return JsonbPGetDatum(JsonbValueToJsonb(wrapItemsInArray(&found))); + + if (count > 1) + ereport(ERROR, + (errcode(ERRCODE_MORE_THAN_ONE_JSON_ITEM), + errmsg("more than one SQL/JSON item"))); + + if (first) + return JsonbPGetDatum(JsonbValueToJsonb(first)); + + *empty = true; + return PointerGetDatum(NULL); +} + +JsonbValue * +JsonbPathValue(Datum jb, JsonPath *jp, bool *empty, List *vars) +{ + JsonbValue *res; + JsonValueList found = { 0 }; + JsonPathExecResult jper = executeJsonPath(jp, vars, DatumGetJsonbP(jb), + &found); + int count; + + throwJsonPathError(jper); + + count = JsonValueListLength(&found); + + *empty = !count; + + if (*empty) + return NULL; + + if (count > 1) + ereport(ERROR, + (errcode(ERRCODE_MORE_THAN_ONE_JSON_ITEM), + errmsg("more than one SQL/JSON item"))); + + res = JsonValueListHead(&found); + + if (res->type == jbvBinary && + JsonContainerIsScalar(res->val.binary.data)) + JsonbExtractScalar(res->val.binary.data, res); + + if (!IsAJsonbScalar(res)) + ereport(ERROR, + (errcode(ERRCODE_JSON_SCALAR_REQUIRED), + errmsg("SQL/JSON scalar required"))); + + if (res->type == jbvNull) + return NULL; + + return res; +} + +/* + * Returns private data from executor state. Ensure validity by check with + * MAGIC number. + */ +static inline JsonTableContext * +GetJsonTableContext(TableFuncScanState *state, const char *fname) +{ + JsonTableContext *result; + + if (!IsA(state, TableFuncScanState)) + elog(ERROR, "%s called with invalid TableFuncScanState", fname); + result = (JsonTableContext *) state->opaque; + if (result->magic != JSON_TABLE_CONTEXT_MAGIC) + elog(ERROR, "%s called with invalid TableFuncScanState", fname); + + return result; +} + +/* Recursively initialize JSON_TABLE scan state */ +static void +JsonTableInitScanState(JsonTableContext *cxt, JsonTableScanState *scan, + JsonTableParentNode *node, JsonTableScanState *parent, + List *args, MemoryContext mcxt) +{ + int i; + + scan->parent = parent; + scan->outerJoin = node->outerJoin; + scan->errorOnError = node->errorOnError; + scan->path = DatumGetJsonPathP(node->path->constvalue); + scan->args = args; + scan->mcxt = AllocSetContextCreate(mcxt, "JsonTableContext", + ALLOCSET_DEFAULT_SIZES); + scan->nested = node->child ? + JsonTableInitPlanState(cxt, node->child, scan) : NULL; + + for (i = node->colMin; i <= node->colMax; i++) + cxt->colexprs[i].scan = scan; +} + +/* Recursively initialize JSON_TABLE scan state */ +static JsonTableJoinState * +JsonTableInitPlanState(JsonTableContext *cxt, Node *plan, + JsonTableScanState *parent) +{ + JsonTableJoinState *state = palloc0(sizeof(*state)); + + if (IsA(plan, JsonTableSiblingNode)) + { + JsonTableSiblingNode *join = castNode(JsonTableSiblingNode, plan); + + state->is_join = true; + state->u.join.cross = join->cross; + state->u.join.left = JsonTableInitPlanState(cxt, join->larg, parent); + state->u.join.right = JsonTableInitPlanState(cxt, join->rarg, parent); + } + else + { + JsonTableParentNode *node = castNode(JsonTableParentNode, plan); + + state->is_join = false; + + JsonTableInitScanState(cxt, &state->u.scan, node, parent, + parent->args, parent->mcxt); + } + + return state; +} + +/* + * JsonTableInitOpaque + * Fill in TableFuncScanState->opaque for JsonTable processor + */ +static void +JsonTableInitOpaque(TableFuncScanState *state, int natts) +{ + JsonTableContext *cxt; + PlanState *ps = &state->ss.ps; + TableFuncScan *tfs = castNode(TableFuncScan, ps->plan); + TableFunc *tf = tfs->tablefunc; + JsonExpr *ci = castNode(JsonExpr, tf->docexpr); + JsonTableParentNode *root = castNode(JsonTableParentNode, tf->plan); + List *args = NIL; + ListCell *lc; + int i; + + cxt = palloc0(sizeof(JsonTableContext)); + cxt->magic = JSON_TABLE_CONTEXT_MAGIC; + + if (list_length(ci->passing.values) > 0) + { + ListCell *exprlc; + ListCell *namelc; + + forboth(exprlc, ci->passing.values, + namelc, ci->passing.names) + { + Expr *expr = (Expr *) lfirst(exprlc); + Value *name = (Value *) lfirst(namelc); + JsonPathVariableEvalContext *var = palloc(sizeof(*var)); + + var->var.varName = cstring_to_text(name->val.str); + var->var.typid = exprType((Node *) expr); + var->var.typmod = exprTypmod((Node *) expr); + var->var.cb = EvalJsonPathVar; + var->var.cb_arg = var; + var->estate = ExecInitExpr(expr, ps); + var->econtext = ps->ps_ExprContext; + var->mcxt = CurrentMemoryContext; + var->evaluated = false; + var->value = (Datum) 0; + var->isnull = true; + + args = lappend(args, var); + } + } + + cxt->colexprs = palloc(sizeof(*cxt->colexprs) * + list_length(tf->colvalexprs)); + + i = 0; + + foreach(lc, tf->colvalexprs) + { + Expr *expr = lfirst(lc); + + cxt->colexprs[i++].expr = ExecInitExpr(expr, ps); + } + + JsonTableInitScanState(cxt, &cxt->root, root, NULL, args, + CurrentMemoryContext); + + state->opaque = cxt; +} + +/* Reset scan iterator to the beginning of the item list */ +static void +JsonTableRescan(JsonTableScanState *scan) +{ + memset(&scan->iter, 0, sizeof(scan->iter)); + scan->current = PointerGetDatum(NULL); + scan->advanceNested = false; + scan->ordinal = 0; +} + +/* Reset context item of a scan, execute JSON path and reset a scan */ +static void +JsonTableResetContextItem(JsonTableScanState *scan, Datum item) +{ + MemoryContext oldcxt; + JsonPathExecResult res; + + JsonValueListClear(&scan->found); + + MemoryContextResetOnly(scan->mcxt); + + oldcxt = MemoryContextSwitchTo(scan->mcxt); + + res = executeJsonPath(scan->path, scan->args, DatumGetJsonbP(item), + &scan->found); + + MemoryContextSwitchTo(oldcxt); + + if (jperIsError(res)) + { + if (scan->errorOnError) + throwJsonPathError(res); /* does not return */ + else + JsonValueListClear(&scan->found); /* EMPTY ON ERROR case */ + } + + JsonTableRescan(scan); +} + +/* + * JsonTableSetDocument + * Install the input document + */ +static void +JsonTableSetDocument(TableFuncScanState *state, Datum value) +{ + JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableSetDocument"); + + JsonTableResetContextItem(&cxt->root, value); +} + +/* Recursively reset scan and its child nodes */ +static void +JsonTableRescanRecursive(JsonTableJoinState *state) +{ + if (state->is_join) + { + JsonTableRescanRecursive(state->u.join.left); + JsonTableRescanRecursive(state->u.join.right); + state->u.join.advanceRight = false; + } + else + { + JsonTableRescan(&state->u.scan); + if (state->u.scan.nested) + JsonTableRescanRecursive(state->u.scan.nested); + } +} + +/* + * Fetch next row from a cross/union joined scan. + * + * Returned false at the end of a scan, true otherwise. + */ +static bool +JsonTableNextJoinRow(JsonTableJoinState *state) +{ + if (!state->is_join) + return JsonTableNextRow(&state->u.scan); + + if (state->u.join.advanceRight) + { + /* fetch next inner row */ + if (JsonTableNextJoinRow(state->u.join.right)) + return true; + + /* inner rows are exhausted */ + if (state->u.join.cross) + state->u.join.advanceRight = false; /* next outer row */ + else + return false; /* end of scan */ + } + + while (!state->u.join.advanceRight) + { + /* fetch next outer row */ + bool left = JsonTableNextJoinRow(state->u.join.left); + + if (state->u.join.cross) + { + if (!left) + return false; /* end of scan */ + + JsonTableRescanRecursive(state->u.join.right); + + if (!JsonTableNextJoinRow(state->u.join.right)) + continue; /* next outer row */ + + state->u.join.advanceRight = true; /* next inner row */ + } + else if (!left) + { + if (!JsonTableNextJoinRow(state->u.join.right)) + return false; /* end of scan */ + + state->u.join.advanceRight = true; /* next inner row */ + } + + break; + } + + return true; +} + +/* Recursively set 'reset' flag of scan and its child nodes */ +static void +JsonTableJoinReset(JsonTableJoinState *state) +{ + if (state->is_join) + { + JsonTableJoinReset(state->u.join.left); + JsonTableJoinReset(state->u.join.right); + state->u.join.advanceRight = false; + } + else + { + state->u.scan.reset = true; + state->u.scan.advanceNested = false; + + if (state->u.scan.nested) + JsonTableJoinReset(state->u.scan.nested); + } +} + +/* + * Fetch next row from a simple scan with outer/inner joined nested subscans. + * + * Returned false at the end of a scan, true otherwise. + */ +static bool +JsonTableNextRow(JsonTableScanState *scan) +{ + /* reset context item if requested */ + if (scan->reset) + { + JsonTableResetContextItem(scan, scan->parent->current); + scan->reset = false; + } + + if (scan->advanceNested) + { + /* fetch next nested row */ + scan->advanceNested = JsonTableNextJoinRow(scan->nested); + + if (scan->advanceNested) + return true; + } + + for (;;) + { + /* fetch next row */ + JsonbValue *jbv = JsonValueListNext(&scan->found, &scan->iter); + MemoryContext oldcxt; + + if (!jbv) + { + scan->current = PointerGetDatum(NULL); + return false; /* end of scan */ + } + + /* set current row item */ + oldcxt = MemoryContextSwitchTo(scan->mcxt); + scan->current = JsonbPGetDatum(JsonbValueToJsonb(jbv)); + MemoryContextSwitchTo(oldcxt); + + scan->ordinal++; + + if (!scan->nested) + break; + + JsonTableJoinReset(scan->nested); + + scan->advanceNested = JsonTableNextJoinRow(scan->nested); + + if (scan->advanceNested || scan->outerJoin) + break; + + /* state->ordinal--; */ /* skip current outer row, reset counter */ + } + + return true; +} + +/* + * JsonTableFetchRow + * Prepare the next "current" tuple for upcoming GetValue calls. + * Returns FALSE if the row-filter expression returned no more rows. + */ +static bool +JsonTableFetchRow(TableFuncScanState *state) +{ + JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableFetchRow"); + + if (cxt->empty) + return false; + + return JsonTableNextRow(&cxt->root); +} + +/* + * JsonTableGetValue + * Return the value for column number 'colnum' for the current row. + * + * This leaks memory, so be sure to reset often the context in which it's + * called. + */ +static Datum +JsonTableGetValue(TableFuncScanState *state, int colnum, + Oid typid, int32 typmod, bool *isnull) +{ + JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableGetValue"); + ExprContext *econtext = state->ss.ps.ps_ExprContext; + ExprState *estate = cxt->colexprs[colnum].expr; + JsonTableScanState *scan = cxt->colexprs[colnum].scan; + Datum result; + + if (!DatumGetPointer(scan->current)) /* NULL from outer/union join */ + { + result = (Datum) 0; + *isnull = true; + } + else if (estate) /* regular column */ + result = ExecEvalExprPassingCaseValue(estate, econtext, isnull, + scan->current, false); + else + { + result = Int32GetDatum(scan->ordinal); /* ordinality column */ + *isnull = false; + } + + return result; +} + +/* + * JsonTableDestroyOpaque + */ +static void +JsonTableDestroyOpaque(TableFuncScanState *state) +{ + JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableDestroyOpaque"); + + /* not valid anymore */ + cxt->magic = 0; + + state->opaque = NULL; +} + +const TableFuncRoutine JsonbTableRoutine = +{ + JsonTableInitOpaque, + JsonTableSetDocument, + NULL, + NULL, + NULL, + JsonTableFetchRow, + JsonTableGetValue, + JsonTableDestroyOpaque +}; diff --git a/src/backend/utils/adt/jsonpath_gram.y b/src/backend/utils/adt/jsonpath_gram.y new file mode 100644 index 0000000000..2d5d4b30cb --- /dev/null +++ b/src/backend/utils/adt/jsonpath_gram.y @@ -0,0 +1,548 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_gram.y + * Grammar definitions for jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_gram.y + * + *------------------------------------------------------------------------- + */ + +%{ +#include "postgres.h" + +#include "fmgr.h" +#include "nodes/pg_list.h" +#include "utils/builtins.h" +#include "utils/jsonpath.h" + +#include "utils/jsonpath_scanner.h" + +/* + * Bison doesn't allocate anything that needs to live across parser calls, + * so we can easily have it use palloc instead of malloc. This prevents + * memory leaks if we error out during parsing. Note this only works with + * bison >= 2.0. However, in bison 1.875 the default is to use alloca() + * if possible, so there's not really much problem anyhow, at least if + * you're building with gcc. + */ +#define YYMALLOC palloc +#define YYFREE pfree + +static JsonPathParseItem* +makeItemType(int type) +{ + JsonPathParseItem* v = palloc(sizeof(*v)); + + v->type = type; + v->next = NULL; + + return v; +} + +static JsonPathParseItem* +makeItemString(string *s) +{ + JsonPathParseItem *v; + + if (s == NULL) + { + v = makeItemType(jpiNull); + } + else + { + v = makeItemType(jpiString); + v->value.string.val = s->val; + v->value.string.len = s->len; + } + + return v; +} + +static JsonPathParseItem* +makeItemVariable(string *s) +{ + JsonPathParseItem *v; + + v = makeItemType(jpiVariable); + v->value.string.val = s->val; + v->value.string.len = s->len; + + return v; +} + +static JsonPathParseItem* +makeItemKey(string *s) +{ + JsonPathParseItem *v; + + v = makeItemString(s); + v->type = jpiKey; + + return v; +} + +static JsonPathParseItem* +makeItemNumeric(string *s) +{ + JsonPathParseItem *v; + + v = makeItemType(jpiNumeric); + v->value.numeric = + DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(s->val), 0, -1)); + + return v; +} + +static JsonPathParseItem* +makeItemBool(bool val) { + JsonPathParseItem *v = makeItemType(jpiBool); + + v->value.boolean = val; + + return v; +} + +static JsonPathParseItem* +makeItemBinary(int type, JsonPathParseItem* la, JsonPathParseItem *ra) +{ + JsonPathParseItem *v = makeItemType(type); + + v->value.args.left = la; + v->value.args.right = ra; + + return v; +} + +static JsonPathParseItem* +makeItemUnary(int type, JsonPathParseItem* a) +{ + JsonPathParseItem *v; + + if (type == jpiPlus && a->type == jpiNumeric && !a->next) + return a; + + if (type == jpiMinus && a->type == jpiNumeric && !a->next) + { + v = makeItemType(jpiNumeric); + v->value.numeric = + DatumGetNumeric(DirectFunctionCall1(numeric_uminus, + NumericGetDatum(a->value.numeric))); + return v; + } + + v = makeItemType(type); + + v->value.arg = a; + + return v; +} + +static JsonPathParseItem* +makeItemList(List *list) +{ + JsonPathParseItem *head, *end; + ListCell *cell = list_head(list); + + head = end = (JsonPathParseItem *) lfirst(cell); + + if (!lnext(cell)) + return head; + + /* append items to the end of already existing list */ + while (end->next) + end = end->next; + + for_each_cell(cell, lnext(cell)) + { + JsonPathParseItem *c = (JsonPathParseItem *) lfirst(cell); + + end->next = c; + end = c; + } + + return head; +} + +static JsonPathParseItem* +makeIndexArray(List *list) +{ + JsonPathParseItem *v = makeItemType(jpiIndexArray); + ListCell *cell; + int i = 0; + + Assert(list_length(list) > 0); + v->value.array.nelems = list_length(list); + + v->value.array.elems = palloc(sizeof(v->value.array.elems[0]) * v->value.array.nelems); + + foreach(cell, list) + { + JsonPathParseItem *jpi = lfirst(cell); + + Assert(jpi->type == jpiSubscript); + + v->value.array.elems[i].from = jpi->value.args.left; + v->value.array.elems[i++].to = jpi->value.args.right; + } + + return v; +} + +static JsonPathParseItem* +makeAny(int first, int last) +{ + JsonPathParseItem *v = makeItemType(jpiAny); + + v->value.anybounds.first = (first > 0) ? first : 0; + v->value.anybounds.last = (last >= 0) ? last : PG_UINT32_MAX; + + return v; +} + +static JsonPathParseItem * +makeItemLikeRegex(JsonPathParseItem *expr, string *pattern, string *flags) +{ + JsonPathParseItem *v = makeItemType(jpiLikeRegex); + int i; + + v->value.like_regex.expr = expr; + v->value.like_regex.pattern = pattern->val; + v->value.like_regex.patternlen = pattern->len; + v->value.like_regex.flags = 0; + + for (i = 0; flags && i < flags->len; i++) + { + switch (flags->val[i]) + { + case 'i': + v->value.like_regex.flags |= JSP_REGEX_ICASE; + break; + case 's': + v->value.like_regex.flags &= ~JSP_REGEX_MLINE; + v->value.like_regex.flags |= JSP_REGEX_SLINE; + break; + case 'm': + v->value.like_regex.flags &= ~JSP_REGEX_SLINE; + v->value.like_regex.flags |= JSP_REGEX_MLINE; + break; + case 'x': + v->value.like_regex.flags |= JSP_REGEX_WSPACE; + break; + default: + yyerror(NULL, "unrecognized flag of LIKE_REGEX predicate"); + break; + } + } + + return v; +} + +static JsonPathParseItem * +makeItemSequence(List *elems) +{ + JsonPathParseItem *v = makeItemType(jpiSequence); + + v->value.sequence.elems = elems; + + return v; +} + +static JsonPathParseItem * +makeItemObject(List *fields) +{ + JsonPathParseItem *v = makeItemType(jpiObject); + + v->value.object.fields = fields; + + return v; +} + +%} + +/* BISON Declarations */ +%pure-parser +%expect 0 +%name-prefix="jsonpath_yy" +%error-verbose +%parse-param {JsonPathParseResult **result} + +%union { + string str; + List *elems; /* list of JsonPathParseItem */ + List *indexs; /* list of integers */ + JsonPathParseItem *value; + JsonPathParseResult *result; + JsonPathItemType optype; + bool boolean; +} + +%token TO_P NULL_P TRUE_P FALSE_P IS_P UNKNOWN_P EXISTS_P +%token STRING_P NUMERIC_P INT_P VARIABLE_P +%token OR_P AND_P NOT_P +%token LESS_P LESSEQUAL_P EQUAL_P NOTEQUAL_P GREATEREQUAL_P GREATER_P +%token ANY_P STRICT_P LAX_P LAST_P STARTS_P WITH_P LIKE_REGEX_P FLAG_P +%token ABS_P SIZE_P TYPE_P FLOOR_P DOUBLE_P CEILING_P DATETIME_P +%token KEYVALUE_P MAP_P REDUCE_P FOLD_P FOLDL_P FOLDR_P +%token MIN_P MAX_P + +%type result + +%type scalar_value path_primary expr pexpr array_accessor + any_path accessor_op key predicate delimited_predicate + index_elem starts_with_initial opt_datetime_template + expr_or_predicate expr_or_seq expr_seq object_field + +%type accessor_expr expr_list object_field_list + +%type index_list + +%type comp_op method fold + +%type mode + +%type key_name + + +%left OR_P +%left AND_P +%right NOT_P +%left '+' '-' +%left '*' '/' '%' +%left UMINUS +%nonassoc '(' ')' + +/* Grammar follows */ +%% + +result: + mode expr_or_seq { + *result = palloc(sizeof(JsonPathParseResult)); + (*result)->expr = $2; + (*result)->lax = $1; + } + | /* EMPTY */ { *result = NULL; } + ; + +expr_or_predicate: + expr { $$ = $1; } + | predicate { $$ = $1; } + ; + +expr_or_seq: + expr_or_predicate { $$ = $1; } + | expr_seq { $$ = $1; } + ; + +expr_seq: + expr_list { $$ = makeItemSequence($1); } + ; + +expr_list: + expr_or_predicate ',' expr_or_predicate { $$ = list_make2($1, $3); } + | expr_list ',' expr_or_predicate { $$ = lappend($1, $3); } + ; + +mode: + STRICT_P { $$ = false; } + | LAX_P { $$ = true; } + | /* EMPTY */ { $$ = true; } + ; + +scalar_value: + STRING_P { $$ = makeItemString(&$1); } + | NULL_P { $$ = makeItemString(NULL); } + | TRUE_P { $$ = makeItemBool(true); } + | FALSE_P { $$ = makeItemBool(false); } + | NUMERIC_P { $$ = makeItemNumeric(&$1); } + | INT_P { $$ = makeItemNumeric(&$1); } + | VARIABLE_P { $$ = makeItemVariable(&$1); } + ; + +comp_op: + EQUAL_P { $$ = jpiEqual; } + | NOTEQUAL_P { $$ = jpiNotEqual; } + | LESS_P { $$ = jpiLess; } + | GREATER_P { $$ = jpiGreater; } + | LESSEQUAL_P { $$ = jpiLessOrEqual; } + | GREATEREQUAL_P { $$ = jpiGreaterOrEqual; } + ; + +delimited_predicate: + '(' predicate ')' { $$ = $2; } + | EXISTS_P '(' expr ')' { $$ = makeItemUnary(jpiExists, $3); } + ; + +predicate: + delimited_predicate { $$ = $1; } + | pexpr comp_op pexpr { $$ = makeItemBinary($2, $1, $3); } + | predicate AND_P predicate { $$ = makeItemBinary(jpiAnd, $1, $3); } + | predicate OR_P predicate { $$ = makeItemBinary(jpiOr, $1, $3); } + | NOT_P delimited_predicate { $$ = makeItemUnary(jpiNot, $2); } + | '(' predicate ')' IS_P UNKNOWN_P { $$ = makeItemUnary(jpiIsUnknown, $2); } + | pexpr STARTS_P WITH_P starts_with_initial + { $$ = makeItemBinary(jpiStartsWith, $1, $4); } + | pexpr LIKE_REGEX_P STRING_P { $$ = makeItemLikeRegex($1, &$3, NULL); }; + | pexpr LIKE_REGEX_P STRING_P FLAG_P STRING_P + { $$ = makeItemLikeRegex($1, &$3, &$5); }; + ; + +starts_with_initial: + STRING_P { $$ = makeItemString(&$1); } + | VARIABLE_P { $$ = makeItemVariable(&$1); } + ; + +path_primary: + scalar_value { $$ = $1; } + | '$' { $$ = makeItemType(jpiRoot); } + | '@' { $$ = makeItemType(jpiCurrent); } + | LAST_P { $$ = makeItemType(jpiLast); } + | '(' expr_seq ')' { $$ = $2; } + | '[' ']' { $$ = makeItemUnary(jpiArray, NULL); } + | '[' expr_or_seq ']' { $$ = makeItemUnary(jpiArray, $2); } + | '{' object_field_list '}' { $$ = makeItemObject($2); } + ; + +object_field_list: + /* EMPTY */ { $$ = NIL; } + | object_field { $$ = list_make1($1); } + | object_field_list ',' object_field { $$ = lappend($1, $3); } + ; + +object_field: + key_name ':' expr_or_predicate + { $$ = makeItemBinary(jpiObjectField, makeItemString(&$1), $3); } + ; + +accessor_expr: + path_primary { $$ = list_make1($1); } + | '.' key { $$ = list_make2(makeItemType(jpiCurrent), $2); } + | '(' expr ')' accessor_op { $$ = list_make2($2, $4); } + | '(' predicate ')' accessor_op { $$ = list_make2($2, $4); } + | accessor_expr accessor_op { $$ = lappend($1, $2); } + ; + +pexpr: + expr { $$ = $1; } + | '(' expr ')' { $$ = $2; } + ; + +expr: + accessor_expr { $$ = makeItemList($1); } + | '+' pexpr %prec UMINUS { $$ = makeItemUnary(jpiPlus, $2); } + | '-' pexpr %prec UMINUS { $$ = makeItemUnary(jpiMinus, $2); } + | pexpr '+' pexpr { $$ = makeItemBinary(jpiAdd, $1, $3); } + | pexpr '-' pexpr { $$ = makeItemBinary(jpiSub, $1, $3); } + | pexpr '*' pexpr { $$ = makeItemBinary(jpiMul, $1, $3); } + | pexpr '/' pexpr { $$ = makeItemBinary(jpiDiv, $1, $3); } + | pexpr '%' pexpr { $$ = makeItemBinary(jpiMod, $1, $3); } + ; + +index_elem: + pexpr { $$ = makeItemBinary(jpiSubscript, $1, NULL); } + | pexpr TO_P pexpr { $$ = makeItemBinary(jpiSubscript, $1, $3); } + ; + +index_list: + index_elem { $$ = list_make1($1); } + | index_list ',' index_elem { $$ = lappend($1, $3); } + ; + +array_accessor: + '[' '*' ']' { $$ = makeItemType(jpiAnyArray); } + | '[' index_list ']' { $$ = makeIndexArray($2); } + ; + +any_path: + ANY_P { $$ = makeAny(-1, -1); } + | ANY_P '{' INT_P '}' { $$ = makeAny(pg_atoi($3.val, 4, 0), + pg_atoi($3.val, 4, 0)); } + | ANY_P '{' ',' INT_P '}' { $$ = makeAny(-1, pg_atoi($4.val, 4, 0)); } + | ANY_P '{' INT_P ',' '}' { $$ = makeAny(pg_atoi($3.val, 4, 0), -1); } + | ANY_P '{' INT_P ',' INT_P '}' { $$ = makeAny(pg_atoi($3.val, 4, 0), + pg_atoi($5.val, 4, 0)); } + ; + +accessor_op: + '.' key { $$ = $2; } + | '.' '*' { $$ = makeItemType(jpiAnyKey); } + | array_accessor { $$ = $1; } + | '.' array_accessor { $$ = $2; } + | '.' any_path { $$ = $2; } + | '.' method '(' ')' { $$ = makeItemType($2); } + | '.' DATETIME_P '(' opt_datetime_template ')' + { $$ = makeItemUnary(jpiDatetime, $4); } + | '.' MAP_P '(' expr_or_predicate ')' + { $$ = makeItemUnary(jpiMap, $4); } + | '.' REDUCE_P '(' expr_or_predicate ')' + { $$ = makeItemUnary(jpiReduce, $4); } + | '.' fold '(' expr_or_predicate ',' expr_or_predicate ')' + { $$ = makeItemBinary($2, $4, $6); } + | '?' '(' predicate ')' { $$ = makeItemUnary(jpiFilter, $3); } + ; + +fold: + FOLD_P { $$ = jpiFold; } + | FOLDL_P { $$ = jpiFoldl; } + | FOLDR_P { $$ = jpiFoldr; } + ; + +opt_datetime_template: + STRING_P { $$ = makeItemString(&$1); } + | /* EMPTY */ { $$ = NULL; } + ; + +key: + key_name { $$ = makeItemKey(&$1); } + ; + +key_name: + STRING_P + | TO_P + | NULL_P + | TRUE_P + | FALSE_P + | INT_P + | IS_P + | UNKNOWN_P + | EXISTS_P + | STRICT_P + | LAX_P + | ABS_P + | SIZE_P + | TYPE_P + | FLOOR_P + | DOUBLE_P + | CEILING_P + | DATETIME_P + | KEYVALUE_P + | LAST_P + | STARTS_P + | WITH_P + | LIKE_REGEX_P + | FLAG_P + | MAP_P + | REDUCE_P + | FOLD_P + | FOLDL_P + | FOLDR_P + | MIN_P + | MAX_P + ; + +method: + ABS_P { $$ = jpiAbs; } + | SIZE_P { $$ = jpiSize; } + | TYPE_P { $$ = jpiType; } + | FLOOR_P { $$ = jpiFloor; } + | DOUBLE_P { $$ = jpiDouble; } + | CEILING_P { $$ = jpiCeiling; } + | KEYVALUE_P { $$ = jpiKeyValue; } + | MIN_P { $$ = jpiMin; } + | MAX_P { $$ = jpiMax; } + ; +%% + diff --git a/src/backend/utils/adt/jsonpath_json.c b/src/backend/utils/adt/jsonpath_json.c new file mode 100644 index 0000000000..6746d2397c --- /dev/null +++ b/src/backend/utils/adt/jsonpath_json.c @@ -0,0 +1,105 @@ +#define JSONPATH_JSON_C + +#include "postgres.h" + +#include "catalog/pg_type.h" +#include "utils/json.h" +#include "utils/jsonapi.h" +#include "utils/jsonb.h" +#include "utils/builtins.h" + +/* redefine jsonb structures */ +#define Jsonb Json +#define JsonbContainer JsonContainer +#define JsonbIterator JsonIterator + +/* redefine jsonb functions */ +#define findJsonbValueFromContainer(jc, flags, jbv) \ + findJsonValueFromContainer((JsonContainer *)(jc), flags, jbv) +#define getIthJsonbValueFromContainer(jc, i) \ + getIthJsonValueFromContainer((JsonContainer *)(jc), i) +#define pushJsonbValue pushJsonValue +#define JsonbIteratorInit(jc) JsonIteratorInit((JsonContainer *)(jc)) +#define JsonbIteratorNext JsonIteratorNext +#define JsonbValueToJsonb JsonbValueToJson +#define JsonbToCString JsonToCString +#define JsonbUnquote JsonUnquote +#define JsonbExtractScalar(jc, jbv) JsonExtractScalar((JsonContainer *)(jc), jbv) + +/* redefine jsonb macros */ +#undef JsonContainerSize +#define JsonContainerSize(jc) \ + ((((JsonContainer *)(jc))->header & JB_CMASK) == JB_CMASK && \ + JsonContainerIsArray(jc) \ + ? JsonGetArraySize((JsonContainer *)(jc)) \ + : ((JsonContainer *)(jc))->header & JB_CMASK) + + +#undef DatumGetJsonbP +#define DatumGetJsonbP(d) DatumGetJsonP(d) + +#undef DatumGetJsonbPCopy +#define DatumGetJsonbPCopy(d) DatumGetJsonPCopy(d) + +#undef JsonbPGetDatum +#define JsonbPGetDatum(json) JsonPGetDatum(json) + +#undef PG_GETARG_JSONB_P +#define PG_GETARG_JSONB_P(n) DatumGetJsonP(PG_GETARG_DATUM(n)) + +#undef PG_GETARG_JSONB_P_COPY +#define PG_GETARG_JSONB_P_COPY(n) DatumGetJsonPCopy(PG_GETARG_DATUM(n)) + + +#ifdef DatumGetJsonb +#undef DatumGetJsonb +#define DatumGetJsonb(d) DatumGetJsonbP(d) +#endif + +#ifdef DatumGetJsonbCopy +#undef DatumGetJsonbCopy +#define DatumGetJsonbCopy(d) DatumGetJsonbPCopy(d) +#endif + +#ifdef JsonbGetDatum +#undef JsonbGetDatum +#define JsonbGetDatum(json) JsonbPGetDatum(json) +#endif + +#ifdef PG_GETARG_JSONB +#undef PG_GETARG_JSONB +#define PG_GETARG_JSONB(n) PG_GETARG_JSONB_P(n) +#endif + +#ifdef PG_GETARG_JSONB_COPY +#undef PG_GETARG_JSONB_COPY +#define PG_GETARG_JSONB_COPY(n) PG_GETARG_JSONB_P_COPY(n) +#endif + +/* redefine global jsonpath functions */ +#define executeJsonPath executeJsonPathJson +#define JsonbPathExists JsonPathExists +#define JsonbPathQuery JsonPathQuery +#define JsonbPathValue JsonPathValue +#define JsonbTableRoutine JsonTableRoutine + +#define jsonb_jsonpath_exists2 json_jsonpath_exists2 +#define jsonb_jsonpath_exists3 json_jsonpath_exists3 +#define jsonb_jsonpath_predicate2 json_jsonpath_predicate2 +#define jsonb_jsonpath_predicate3 json_jsonpath_predicate3 +#define jsonb_jsonpath_query2 json_jsonpath_query2 +#define jsonb_jsonpath_query3 json_jsonpath_query3 +#define jsonb_jsonpath_query_safe2 json_jsonpath_query_safe2 +#define jsonb_jsonpath_query_safe3 json_jsonpath_query_safe3 + +static inline JsonbValue * +JsonbInitBinary(JsonbValue *jbv, Json *jb) +{ + jbv->type = jbvBinary; + jbv->val.binary.data = (void *) &jb->root; + jbv->val.binary.len = jb->root.len; + + return jbv; +} + +#include "jsonpath_exec.c" diff --git a/src/backend/utils/adt/jsonpath_scan.l b/src/backend/utils/adt/jsonpath_scan.l new file mode 100644 index 0000000000..b060643e31 --- /dev/null +++ b/src/backend/utils/adt/jsonpath_scan.l @@ -0,0 +1,564 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_scan.l + * Lexical parser for jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/backend/utils/adt/jsonpath_scan.l + * + *------------------------------------------------------------------------- + */ + +%{ +#include "postgres.h" +#include "mb/pg_wchar.h" +#include "nodes/pg_list.h" +#include "utils/jsonpath_scanner.h" + +static string scanstring; + +/* No reason to constrain amount of data slurped */ +/* #define YY_READ_BUF_SIZE 16777216 */ + +/* Handles to the buffer that the lexer uses internally */ +static YY_BUFFER_STATE scanbufhandle; +static char *scanbuf; +static int scanbuflen; + +static void addstring(bool init, char *s, int l); +static void addchar(bool init, char s); +static int checkSpecialVal(void); /* examine scanstring for the special value */ + +static void parseUnicode(char *s, int l); + +/* Avoid exit() on fatal scanner errors (a bit ugly -- see yy_fatal_error) */ +#undef fprintf +#define fprintf(file, fmt, msg) fprintf_to_ereport(fmt, msg) + +static void +fprintf_to_ereport(const char *fmt, const char *msg) +{ + ereport(ERROR, (errmsg_internal("%s", msg))); +} + +#define yyerror jsonpath_yyerror +%} + +%option 8bit +%option never-interactive +%option nodefault +%option noinput +%option nounput +%option noyywrap +%option warn +%option prefix="jsonpath_yy" +%option bison-bridge +%option noyyalloc +%option noyyrealloc +%option noyyfree + +%x xQUOTED +%x xNONQUOTED +%x xVARQUOTED +%x xCOMMENT + +special [\?\%\$\.\[\]\{\}\(\)\|\&\!\=\<\>\@\#\,\*:\-\+\/] +any [^\?\%\$\.\[\]\{\}\(\)\|\&\!\=\<\>\@\#\,\*:\-\+\/\\\" \t\n\r\f] +blank [ \t\n\r\f] +unicode \\u[0-9A-Fa-f]{4} + +%% + +\&\& { return AND_P; } + +\|\| { return OR_P; } + +\! { return NOT_P; } + +\*\* { return ANY_P; } + +\< { return LESS_P; } + +\<\= { return LESSEQUAL_P; } + +\=\= { return EQUAL_P; } + +\<\> { return NOTEQUAL_P; } + +\!\= { return NOTEQUAL_P; } + +\>\= { return GREATEREQUAL_P; } + +\> { return GREATER_P; } + +\${any}+ { + addstring(true, yytext + 1, yyleng - 1); + addchar(false, '\0'); + yylval->str = scanstring; + return VARIABLE_P; + } + +\$\" { + addchar(true, '\0'); + BEGIN xVARQUOTED; + } + +{special} { return *yytext; } + +{blank}+ { /* ignore */ } + +\/\* { + addchar(true, '\0'); + BEGIN xCOMMENT; + } + +[0-9]+(\.[0-9]+)?[eE][+-]?[0-9]+ /* float */ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +\.[0-9]+[eE][+-]?[0-9]+ /* float */ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +([0-9]+)?\.[0-9]+ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return NUMERIC_P; + } + +[0-9]+ { + addstring(true, yytext, yyleng); + addchar(false, '\0'); + yylval->str = scanstring; + return INT_P; + } + +{any}+ { + addstring(true, yytext, yyleng); + BEGIN xNONQUOTED; + } + +\" { + addchar(true, '\0'); + BEGIN xQUOTED; + } + +\\ { + yyless(0); + addchar(true, '\0'); + BEGIN xNONQUOTED; + } + +{any}+ { + addstring(false, yytext, yyleng); + } + +{blank}+ { + yylval->str = scanstring; + BEGIN INITIAL; + return checkSpecialVal(); + } + + +\/\* { + yylval->str = scanstring; + BEGIN xCOMMENT; + } + +({special}|\") { + yylval->str = scanstring; + yyless(0); + BEGIN INITIAL; + return checkSpecialVal(); + } + +<> { + yylval->str = scanstring; + BEGIN INITIAL; + return checkSpecialVal(); + } + +\\[\"\\] { addchar(false, yytext[1]); } + +\\b { addchar(false, '\b'); } + +\\f { addchar(false, '\f'); } + +\\n { addchar(false, '\n'); } + +\\r { addchar(false, '\r'); } + +\\t { addchar(false, '\t'); } + +{unicode}+ { parseUnicode(yytext, yyleng); } + +\\u { yyerror(NULL, "Unicode sequence is invalid"); } + +\\. { yyerror(NULL, "Escape sequence is invalid"); } + +\\ { yyerror(NULL, "Unexpected end after backslash"); } + +<> { yyerror(NULL, "Unexpected end of quoted string"); } + +\" { + yylval->str = scanstring; + BEGIN INITIAL; + return STRING_P; + } +\" { + yylval->str = scanstring; + BEGIN INITIAL; + return VARIABLE_P; + } + +[^\\\"]+ { addstring(false, yytext, yyleng); } + +<> { yyterminate(); } + +\*\/ { BEGIN INITIAL; } + +[^\*]+ { } + +\* { } + +<> { yyerror(NULL, "Unexpected end of comment"); } + +%% + +void +jsonpath_yyerror(JsonPathParseResult **result, const char *message) +{ + if (*yytext == YY_END_OF_BUFFER_CHAR) + { + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("bad jsonpath representation"), + /* translator: %s is typically "syntax error" */ + errdetail("%s at end of input", message))); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("bad jsonpath representation"), + /* translator: first %s is typically "syntax error" */ + errdetail("%s at or near \"%s\"", message, yytext))); + } +} + +typedef struct keyword +{ + int16 len; + bool lowercase; + int val; + char *keyword; +} keyword; + +/* + * Array of key words should be sorted by length and then + * alphabetical order + */ + +static keyword keywords[] = { + { 2, false, IS_P, "is"}, + { 2, false, TO_P, "to"}, + { 3, false, ABS_P, "abs"}, + { 3, false, LAX_P, "lax"}, + { 3, false, MAP_P, "map"}, + { 3, false, MAX_P, "max"}, + { 3, false, MIN_P, "min"}, + { 4, false, FLAG_P, "flag"}, + { 4, false, FOLD_P, "fold"}, + { 4, false, LAST_P, "last"}, + { 4, true, NULL_P, "null"}, + { 4, false, SIZE_P, "size"}, + { 4, true, TRUE_P, "true"}, + { 4, false, TYPE_P, "type"}, + { 4, false, WITH_P, "with"}, + { 5, true, FALSE_P, "false"}, + { 5, false, FLOOR_P, "floor"}, + { 5, false, FOLDL_P, "foldl"}, + { 5, false, FOLDR_P, "foldr"}, + { 6, false, DOUBLE_P, "double"}, + { 6, false, EXISTS_P, "exists"}, + { 6, false, REDUCE_P, "reduce"}, + { 6, false, STARTS_P, "starts"}, + { 6, false, STRICT_P, "strict"}, + { 7, false, CEILING_P, "ceiling"}, + { 7, false, UNKNOWN_P, "unknown"}, + { 8, false, DATETIME_P, "datetime"}, + { 8, false, KEYVALUE_P, "keyvalue"}, + { 10,false, LIKE_REGEX_P, "like_regex"}, +}; + +static int +checkSpecialVal() +{ + int res = STRING_P; + int diff; + keyword *StopLow = keywords, + *StopHigh = keywords + lengthof(keywords), + *StopMiddle; + + if (scanstring.len > keywords[lengthof(keywords) - 1].len) + return res; + + while(StopLow < StopHigh) + { + StopMiddle = StopLow + ((StopHigh - StopLow) >> 1); + + if (StopMiddle->len == scanstring.len) + diff = pg_strncasecmp(StopMiddle->keyword, scanstring.val, scanstring.len); + else + diff = StopMiddle->len - scanstring.len; + + if (diff < 0) + StopLow = StopMiddle + 1; + else if (diff > 0) + StopHigh = StopMiddle; + else + { + if (StopMiddle->lowercase) + diff = strncmp(StopMiddle->keyword, scanstring.val, scanstring.len); + + if (diff == 0) + res = StopMiddle->val; + + break; + } + } + + return res; +} + +/* + * Called before any actual parsing is done + */ +static void +jsonpath_scanner_init(const char *str, int slen) +{ + if (slen <= 0) + slen = strlen(str); + + /* + * Might be left over after ereport() + */ + yy_init_globals(); + + /* + * Make a scan buffer with special termination needed by flex. + */ + + scanbuflen = slen; + scanbuf = palloc(slen + 2); + memcpy(scanbuf, str, slen); + scanbuf[slen] = scanbuf[slen + 1] = YY_END_OF_BUFFER_CHAR; + scanbufhandle = yy_scan_buffer(scanbuf, slen + 2); + + BEGIN(INITIAL); +} + + +/* + * Called after parsing is done to clean up after jsonpath_scanner_init() + */ +static void +jsonpath_scanner_finish(void) +{ + yy_delete_buffer(scanbufhandle); + pfree(scanbuf); +} + +static void +addstring(bool init, char *s, int l) { + if (init) { + scanstring.total = 32; + scanstring.val = palloc(scanstring.total); + scanstring.len = 0; + } + + if (s && l) { + while(scanstring.len + l + 1 >= scanstring.total) { + scanstring.total *= 2; + scanstring.val = repalloc(scanstring.val, scanstring.total); + } + + memcpy(scanstring.val + scanstring.len, s, l); + scanstring.len += l; + } +} + +static void +addchar(bool init, char s) { + if (init) + { + scanstring.total = 32; + scanstring.val = palloc(scanstring.total); + scanstring.len = 0; + } + else if(scanstring.len + 1 >= scanstring.total) + { + scanstring.total *= 2; + scanstring.val = repalloc(scanstring.val, scanstring.total); + } + + scanstring.val[ scanstring.len ] = s; + if (s != '\0') + scanstring.len++; +} + +JsonPathParseResult * +parsejsonpath(const char *str, int len) { + JsonPathParseResult *parseresult; + + jsonpath_scanner_init(str, len); + + if (jsonpath_yyparse((void*)&parseresult) != 0) + jsonpath_yyerror(NULL, "bugus input"); + + jsonpath_scanner_finish(); + + return parseresult; +} + +static int +hexval(char c) +{ + if (c >= '0' && c <= '9') + return c - '0'; + if (c >= 'a' && c <= 'f') + return c - 'a' + 0xA; + if (c >= 'A' && c <= 'F') + return c - 'A' + 0xA; + elog(ERROR, "invalid hexadecimal digit"); + return 0; /* not reached */ +} + +/* + * parseUnicode was adopted from json_lex_string() in + * src/backend/utils/adt/json.c + */ +static void +parseUnicode(char *s, int l) +{ + int i, j; + int ch = 0; + int hi_surrogate = -1; + + Assert(l % 6 /* \uXXXX */ == 0); + + for(i = 0; i < l / 6; i++) + { + ch = 0; + + for(j=0; j<4; j++) + ch = (ch << 4) | hexval(s[ i*6 + 2 + j]); + + if (ch >= 0xd800 && ch <= 0xdbff) + { + if (hi_surrogate != -1) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode high surrogate must not follow a high surrogate."))); + hi_surrogate = (ch & 0x3ff) << 10; + continue; + } + else if (ch >= 0xdc00 && ch <= 0xdfff) + { + if (hi_surrogate == -1) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode low surrogate must follow a high surrogate."))); + ch = 0x10000 + hi_surrogate + (ch & 0x3ff); + hi_surrogate = -1; + } + + if (hi_surrogate != -1) + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode low surrogate must follow a high surrogate."))); + + /* + * For UTF8, replace the escape sequence by the actual + * utf8 character in lex->strval. Do this also for other + * encodings if the escape designates an ASCII character, + * otherwise raise an error. + */ + + if (ch == 0) + { + /* We can't allow this, since our TEXT type doesn't */ + ereport(ERROR, + (errcode(ERRCODE_UNTRANSLATABLE_CHARACTER), + errmsg("unsupported Unicode escape sequence"), + errdetail("\\u0000 cannot be converted to text."))); + } + else if (GetDatabaseEncoding() == PG_UTF8) + { + char utf8str[5]; + int utf8len; + + unicode_to_utf8(ch, (unsigned char *) utf8str); + utf8len = pg_utf_mblen((unsigned char *) utf8str); + addstring(false, utf8str, utf8len); + } + else if (ch <= 0x007f) + { + /* + * This is the only way to designate things like a + * form feed character in JSON, so it's useful in all + * encodings. + */ + addchar(false, (char) ch); + } + else + { + ereport(ERROR, + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type jsonpath"), + errdetail("Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8."))); + } + + hi_surrogate = -1; + } +} + +/* + * Interface functions to make flex use palloc() instead of malloc(). + * It'd be better to make these static, but flex insists otherwise. + */ + +void * +jsonpath_yyalloc(yy_size_t bytes) +{ + return palloc(bytes); +} + +void * +jsonpath_yyrealloc(void *ptr, yy_size_t bytes) +{ + if (ptr) + return repalloc(ptr, bytes); + else + return palloc(bytes); +} + +void +jsonpath_yyfree(void *ptr) +{ + if (ptr) + pfree(ptr); +} + diff --git a/src/backend/utils/adt/regexp.c b/src/backend/utils/adt/regexp.c index 139bb583b1..119d936bc2 100644 --- a/src/backend/utils/adt/regexp.c +++ b/src/backend/utils/adt/regexp.c @@ -335,7 +335,7 @@ RE_execute(regex_t *re, char *dat, int dat_len, * Both pattern and data are given in the database encoding. We internally * convert to array of pg_wchar which is what Spencer's regex package wants. */ -static bool +bool RE_compile_and_execute(text *text_re, char *dat, int dat_len, int cflags, Oid collation, int nmatch, regmatch_t *pmatch) diff --git a/src/backend/utils/adt/ruleutils.c b/src/backend/utils/adt/ruleutils.c index cc6cec7877..d62d160a9d 100644 --- a/src/backend/utils/adt/ruleutils.c +++ b/src/backend/utils/adt/ruleutils.c @@ -463,6 +463,8 @@ static char *generate_operator_name(Oid operid, Oid arg1, Oid arg2); static char *generate_qualified_type_name(Oid typid); static text *string_to_text(char *str); static char *flatten_reloptions(Oid relid); +static void get_json_table_columns(TableFunc *tf, JsonTableParentNode *node, + deparse_context *context, bool showimplicit); #define only_marker(rte) ((rte)->inh ? "" : "ONLY ") @@ -7585,6 +7587,53 @@ get_rule_expr_paren(Node *node, deparse_context *context, appendStringInfoChar(context->buf, ')'); } +static void +get_json_behavior(JsonBehavior *behavior, deparse_context *context, + const char *on) +{ + switch (behavior->btype) + { + case JSON_BEHAVIOR_DEFAULT: + appendStringInfoString(context->buf, " DEFAULT "); + get_rule_expr(behavior->default_expr, context, false); + break; + + case JSON_BEHAVIOR_EMPTY: + appendStringInfoString(context->buf, " EMPTY"); + break; + + case JSON_BEHAVIOR_EMPTY_ARRAY: + appendStringInfoString(context->buf, " EMPTY ARRAY"); + break; + + case JSON_BEHAVIOR_EMPTY_OBJECT: + appendStringInfoString(context->buf, " EMPTY OBJECT"); + break; + + case JSON_BEHAVIOR_ERROR: + appendStringInfoString(context->buf, " ERROR"); + break; + + case JSON_BEHAVIOR_FALSE: + appendStringInfoString(context->buf, " FALSE"); + break; + + case JSON_BEHAVIOR_NULL: + appendStringInfoString(context->buf, " NULL"); + break; + + case JSON_BEHAVIOR_TRUE: + appendStringInfoString(context->buf, " TRUE"); + break; + + case JSON_BEHAVIOR_UNKNOWN: + appendStringInfoString(context->buf, " UNKNOWN"); + break; + } + + appendStringInfo(context->buf, " ON %s", on); +} + /* ---------- * get_rule_expr - Parse back an expression @@ -8747,6 +8796,92 @@ get_rule_expr(Node *node, deparse_context *context, } break; + case T_JsonExpr: + { + JsonExpr *jexpr = (JsonExpr *) node; + + switch (jexpr->op) + { + case IS_JSON_QUERY: + appendStringInfoString(buf, "JSON_QUERY("); + break; + case IS_JSON_VALUE: + appendStringInfoString(buf, "JSON_VALUE("); + break; + case IS_JSON_EXISTS: + appendStringInfoString(buf, "JSON_EXISTS("); + break; + default: + elog(ERROR, "unexpected JsonExpr type: %d", jexpr->op); + break; + } + + get_rule_expr(jexpr->raw_expr, context, false); + + if (jexpr->format.type != JS_FORMAT_DEFAULT) + { + appendStringInfoString(buf, + jexpr->format.type == JS_FORMAT_JSONB ? + " FORMAT JSONB" : " FORMAT JSON"); + + if (jexpr->format.encoding != JS_ENC_DEFAULT) + { + const char *encoding = + jexpr->format.encoding == JS_ENC_UTF16 ? "UTF16" : + jexpr->format.encoding == JS_ENC_UTF32 ? "UTF32" : + "UTF8"; + + appendStringInfo(buf, " ENCODING %s", encoding); + } + } + + appendStringInfoString(buf, ", "); + + get_const_expr(jexpr->path_spec, context, -1); + + if (jexpr->passing.values) + { + ListCell *lc1, *lc2; + bool needcomma = false; + + appendStringInfoString(buf, " PASSING "); + + forboth(lc1, jexpr->passing.names, + lc2, jexpr->passing.values) + { + if (needcomma) + appendStringInfoString(buf, ", "); + needcomma = true; + + get_rule_expr((Node *) lfirst(lc2), context, false); + appendStringInfo(buf, " AS %s", + ((Value *) lfirst(lc1))->val.str); + } + } + + if (jexpr->op != IS_JSON_EXISTS) + appendStringInfo(buf, " RETURNING %s", + format_type_with_typemod(jexpr->returning.typid, + jexpr->returning.typmod)); + + if (jexpr->wrapper == JSW_CONDITIONAL) + appendStringInfo(buf, " WITH CONDITIONAL WRAPPER"); + + if (jexpr->wrapper == JSW_UNCONDITIONAL) + appendStringInfo(buf, " WITH UNCONDITIONAL WRAPPER"); + + if (jexpr->omit_quotes) + appendStringInfo(buf, " OMIT QUOTES"); + + if (jexpr->op != IS_JSON_EXISTS) + get_json_behavior(&jexpr->on_empty, context, "EMPTY"); + + get_json_behavior(&jexpr->on_error, context, "ERROR"); + + appendStringInfoString(buf, ")"); + } + break; + case T_List: { char *sep; @@ -9558,6 +9693,176 @@ get_sublink_expr(SubLink *sublink, deparse_context *context) appendStringInfoChar(buf, ')'); } +/* + * get_json_nested_columns - Parse back nested JSON_TABLE columns + */ +static void +get_json_table_nested_columns(TableFunc *tf, Node *node, + deparse_context *context, bool showimplicit, + bool needcomma) +{ + if (IsA(node, JsonTableSiblingNode)) + { + JsonTableSiblingNode *n = (JsonTableSiblingNode *) node; + + get_json_table_nested_columns(tf, n->larg, context, showimplicit, + needcomma); + get_json_table_nested_columns(tf, n->rarg, context, showimplicit, true); + } + else + { + JsonTableParentNode *n = castNode(JsonTableParentNode, node); + + if (needcomma) + appendStringInfoChar(context->buf, ','); + + appendStringInfoChar(context->buf, ' '); + appendContextKeyword(context, "NESTED PATH ", 0, 0, 0); + get_const_expr(n->path, context, -1); + appendStringInfo(context->buf, " AS %s", quote_identifier(n->name)); + get_json_table_columns(tf, n, context, showimplicit); + } +} + +/* + * get_json_table_plan - Parse back a JSON_TABLE plan + */ +static void +get_json_table_plan(TableFunc *tf, Node *node, deparse_context *context, + bool parenthesize) +{ + if (parenthesize) + appendStringInfoChar(context->buf, '('); + + if (IsA(node, JsonTableSiblingNode)) + { + JsonTableSiblingNode *n = (JsonTableSiblingNode *) node; + + get_json_table_plan(tf, n->larg, context, + IsA(n->larg, JsonTableSiblingNode) || + castNode(JsonTableParentNode, n->larg)->child); + + appendStringInfoString(context->buf, n->cross ? " CROSS " : " UNION "); + + get_json_table_plan(tf, n->rarg, context, + IsA(n->rarg, JsonTableSiblingNode) || + castNode(JsonTableParentNode, n->rarg)->child); + } + else + { + JsonTableParentNode *n = castNode(JsonTableParentNode, node); + + appendStringInfoString(context->buf, quote_identifier(n->name)); + + if (n->child) + { + appendStringInfoString(context->buf, + n->outerJoin ? " OUTER " : " INNER "); + get_json_table_plan(tf, n->child, context, + IsA(n->child, JsonTableSiblingNode)); + } + } + + if (parenthesize) + appendStringInfoChar(context->buf, ')'); +} + +/* + * get_json_table_columns - Parse back JSON_TABLE columns + */ +static void +get_json_table_columns(TableFunc *tf, JsonTableParentNode *node, + deparse_context *context, bool showimplicit) +{ + StringInfo buf = context->buf; + ListCell *l1; + ListCell *l2; + ListCell *l3; + ListCell *l4; + int colnum = 0; + + l2 = list_head(tf->coltypes); + l3 = list_head(tf->coltypmods); + l4 = list_head(tf->colvalexprs); + + appendStringInfoChar(buf, ' '); + appendContextKeyword(context, "COLUMNS (", 0, 0, 0); + + if (PRETTY_INDENT(context)) + context->indentLevel += PRETTYINDENT_VAR; + + foreach(l1, tf->colnames) + { + char *colname = strVal(lfirst(l1)); + JsonExpr *colexpr; + Oid typid; + int32 typmod; + bool ordinality; + + typid = lfirst_oid(l2); + l2 = lnext(l2); + typmod = lfirst_int(l3); + l3 = lnext(l3); + colexpr = castNode(JsonExpr, lfirst(l4)); + l4 = lnext(l4); + + if (colnum < node->colMin) + { + colnum++; + continue; + } + + if (colnum > node->colMax) + break; + + if (colnum > node->colMin) + appendStringInfoString(buf, ", "); + + colnum++; + + ordinality = !colexpr; + + appendContextKeyword(context, "", 0, 0, 0); + + appendStringInfo(buf, "%s %s", quote_identifier(colname), + ordinality ? "FOR ORDINALITY" : + format_type_with_typemod(typid, typmod)); + if (ordinality) + continue; + + if (colexpr->op == IS_JSON_QUERY) + appendStringInfoString(buf, + colexpr->format.type == JS_FORMAT_JSONB ? + " FORMAT JSONB" : " FORMAT JSON"); + + appendStringInfoString(buf, " PATH "); + get_const_expr(colexpr->path_spec, context, -1); + + if (colexpr->wrapper == JSW_CONDITIONAL) + appendStringInfo(buf, " WITH CONDITIONAL WRAPPER"); + + if (colexpr->wrapper == JSW_UNCONDITIONAL) + appendStringInfo(buf, " WITH UNCONDITIONAL WRAPPER"); + + if (colexpr->omit_quotes) + appendStringInfo(buf, " OMIT QUOTES"); + + if (colexpr->on_empty.btype != JSON_BEHAVIOR_NULL) + get_json_behavior(&colexpr->on_empty, context, "EMPTY"); + + if (colexpr->on_error.btype != JSON_BEHAVIOR_NULL) + get_json_behavior(&colexpr->on_error, context, "ERROR"); + } + + if (node->child) + get_json_table_nested_columns(tf, node->child, context, showimplicit, + node->colMax >= node->colMin); + + if (PRETTY_INDENT(context)) + context->indentLevel -= PRETTYINDENT_VAR; + + appendContextKeyword(context, ")", 0, 0, 0); +} /* ---------- * get_tablefunc - Parse back a table function @@ -9568,109 +9873,192 @@ get_tablefunc(TableFunc *tf, deparse_context *context, bool showimplicit) { StringInfo buf = context->buf; - /* XMLTABLE is the only existing implementation. */ - - appendStringInfoString(buf, "XMLTABLE("); + /* XMLTABLE and JSON_TABLE are the only existing implementations. */ - if (tf->ns_uris != NIL) + if (tf->functype == TFT_XMLTABLE) { - ListCell *lc1, - *lc2; - bool first = true; + appendStringInfoString(buf, "XMLTABLE("); - appendStringInfoString(buf, "XMLNAMESPACES ("); - forboth(lc1, tf->ns_uris, lc2, tf->ns_names) + if (tf->ns_uris != NIL) { - Node *expr = (Node *) lfirst(lc1); - char *name = strVal(lfirst(lc2)); - - if (!first) - appendStringInfoString(buf, ", "); - else - first = false; + ListCell *lc1, + *lc2; + bool first = true; - if (name != NULL) + appendStringInfoString(buf, "XMLNAMESPACES ("); + forboth(lc1, tf->ns_uris, lc2, tf->ns_names) { - get_rule_expr(expr, context, showimplicit); - appendStringInfo(buf, " AS %s", name); + Node *expr = (Node *) lfirst(lc1); + char *name = strVal(lfirst(lc2)); + + if (!first) + appendStringInfoString(buf, ", "); + else + first = false; + + if (name != NULL) + { + get_rule_expr(expr, context, showimplicit); + appendStringInfo(buf, " AS %s", name); + } + else + { + appendStringInfoString(buf, "DEFAULT "); + get_rule_expr(expr, context, showimplicit); + } } - else + appendStringInfoString(buf, "), "); + } + + appendStringInfoChar(buf, '('); + get_rule_expr((Node *) tf->rowexpr, context, showimplicit); + appendStringInfoString(buf, ") PASSING ("); + get_rule_expr((Node *) tf->docexpr, context, showimplicit); + appendStringInfoChar(buf, ')'); + + if (tf->colexprs != NIL) + { + ListCell *l1; + ListCell *l2; + ListCell *l3; + ListCell *l4; + ListCell *l5; + int colnum = 0; + + l2 = list_head(tf->coltypes); + l3 = list_head(tf->coltypmods); + l4 = list_head(tf->colexprs); + l5 = list_head(tf->coldefexprs); + + appendStringInfoString(buf, " COLUMNS "); + foreach(l1, tf->colnames) { - appendStringInfoString(buf, "DEFAULT "); - get_rule_expr(expr, context, showimplicit); + char *colname = strVal(lfirst(l1)); + Oid typid; + int32 typmod; + Node *colexpr; + Node *coldefexpr; + bool ordinality = tf->ordinalitycol == colnum; + bool notnull = bms_is_member(colnum, tf->notnulls); + + typid = lfirst_oid(l2); + l2 = lnext(l2); + typmod = lfirst_int(l3); + l3 = lnext(l3); + colexpr = (Node *) lfirst(l4); + l4 = lnext(l4); + coldefexpr = (Node *) lfirst(l5); + l5 = lnext(l5); + + if (colnum > 0) + appendStringInfoString(buf, ", "); + colnum++; + + appendStringInfo(buf, "%s %s", quote_identifier(colname), + ordinality ? "FOR ORDINALITY" : + format_type_with_typemod(typid, typmod)); + if (ordinality) + continue; + + if (coldefexpr != NULL) + { + appendStringInfoString(buf, " DEFAULT ("); + get_rule_expr((Node *) coldefexpr, context, showimplicit); + appendStringInfoChar(buf, ')'); + } + if (colexpr != NULL) + { + appendStringInfoString(buf, " PATH ("); + get_rule_expr((Node *) colexpr, context, showimplicit); + appendStringInfoChar(buf, ')'); + } + if (notnull) + appendStringInfoString(buf, " NOT NULL"); } } - appendStringInfoString(buf, "), "); + + appendStringInfoChar(buf, ')'); } + else if (tf->functype == TFT_JSON_TABLE) + { + JsonExpr *jexpr = castNode(JsonExpr, tf->docexpr); + JsonTableParentNode *root = castNode(JsonTableParentNode, tf->plan); - appendStringInfoChar(buf, '('); - get_rule_expr((Node *) tf->rowexpr, context, showimplicit); - appendStringInfoString(buf, ") PASSING ("); - get_rule_expr((Node *) tf->docexpr, context, showimplicit); - appendStringInfoChar(buf, ')'); + appendStringInfoString(buf, "JSON_TABLE("); - if (tf->colexprs != NIL) - { - ListCell *l1; - ListCell *l2; - ListCell *l3; - ListCell *l4; - ListCell *l5; - int colnum = 0; - - l2 = list_head(tf->coltypes); - l3 = list_head(tf->coltypmods); - l4 = list_head(tf->colexprs); - l5 = list_head(tf->coldefexprs); - - appendStringInfoString(buf, " COLUMNS "); - foreach(l1, tf->colnames) - { - char *colname = strVal(lfirst(l1)); - Oid typid; - int32 typmod; - Node *colexpr; - Node *coldefexpr; - bool ordinality = tf->ordinalitycol == colnum; - bool notnull = bms_is_member(colnum, tf->notnulls); - - typid = lfirst_oid(l2); - l2 = lnext(l2); - typmod = lfirst_int(l3); - l3 = lnext(l3); - colexpr = (Node *) lfirst(l4); - l4 = lnext(l4); - coldefexpr = (Node *) lfirst(l5); - l5 = lnext(l5); - - if (colnum > 0) - appendStringInfoString(buf, ", "); - colnum++; + if (PRETTY_INDENT(context)) + context->indentLevel += PRETTYINDENT_VAR; - appendStringInfo(buf, "%s %s", quote_identifier(colname), - ordinality ? "FOR ORDINALITY" : - format_type_with_typemod(typid, typmod)); - if (ordinality) - continue; + appendContextKeyword(context, "", 0, 0, 0); + + get_rule_expr(jexpr->raw_expr, context, showimplicit); + + if (jexpr->format.type != JS_FORMAT_DEFAULT) + { + appendStringInfoString(buf, + jexpr->format.type == JS_FORMAT_JSONB ? + " FORMAT JSONB" : " FORMAT JSON"); - if (coldefexpr != NULL) + if (jexpr->format.encoding != JS_ENC_DEFAULT) { - appendStringInfoString(buf, " DEFAULT ("); - get_rule_expr((Node *) coldefexpr, context, showimplicit); - appendStringInfoChar(buf, ')'); + const char *encoding = + jexpr->format.encoding == JS_ENC_UTF16 ? "UTF16" : + jexpr->format.encoding == JS_ENC_UTF32 ? "UTF32" : + "UTF8"; + + appendStringInfo(buf, " ENCODING %s", encoding); } - if (colexpr != NULL) + } + + appendStringInfoString(buf, ", "); + + get_const_expr(root->path, context, -1); + + appendStringInfo(buf, " AS %s", quote_identifier(root->name)); + + if (jexpr->passing.values) + { + ListCell *lc1, *lc2; + bool needcomma = false; + + appendStringInfoChar(buf, ' '); + appendContextKeyword(context, "PASSING ", 0, 0, 0); + + if (PRETTY_INDENT(context)) + context->indentLevel += PRETTYINDENT_VAR; + + forboth(lc1, jexpr->passing.names, + lc2, jexpr->passing.values) { - appendStringInfoString(buf, " PATH ("); - get_rule_expr((Node *) colexpr, context, showimplicit); - appendStringInfoChar(buf, ')'); + if (needcomma) + appendStringInfoString(buf, ", "); + needcomma = true; + + appendContextKeyword(context, "", 0, 0, 0); + + get_rule_expr((Node *) lfirst(lc2), context, false); + appendStringInfo(buf, " AS %s", + quote_identifier(((Value *) lfirst(lc1))->val.str)); } - if (notnull) - appendStringInfoString(buf, " NOT NULL"); + + if (PRETTY_INDENT(context)) + context->indentLevel -= PRETTYINDENT_VAR; } - } - appendStringInfoChar(buf, ')'); + get_json_table_columns(tf, root, context, showimplicit); + + appendStringInfoChar(buf, ' '); + appendContextKeyword(context, "PLAN ", 0, 0, 0); + get_json_table_plan(tf, (Node *) root, context, true); + + if (jexpr->on_error.btype != JSON_BEHAVIOR_EMPTY) + get_json_behavior(&jexpr->on_error, context, "ERROR"); + + if (PRETTY_INDENT(context)) + context->indentLevel -= PRETTYINDENT_VAR; + + appendContextKeyword(context, ")", 0, 0, 0); + } } /* ---------- diff --git a/src/backend/utils/adt/timestamp.c b/src/backend/utils/adt/timestamp.c index 5797aaad34..d4d55834d3 100644 --- a/src/backend/utils/adt/timestamp.c +++ b/src/backend/utils/adt/timestamp.c @@ -70,7 +70,6 @@ typedef struct static TimeOffset time2t(const int hour, const int min, const int sec, const fsec_t fsec); static Timestamp dt2local(Timestamp dt, int timezone); -static void AdjustTimestampForTypmod(Timestamp *time, int32 typmod); static void AdjustIntervalForTypmod(Interval *interval, int32 typmod); static TimestampTz timestamp2timestamptz(Timestamp timestamp); static Timestamp timestamptz2timestamp(TimestampTz timestamp); @@ -330,7 +329,7 @@ timestamp_scale(PG_FUNCTION_ARGS) * AdjustTimestampForTypmod --- round off a timestamp to suit given typmod * Works for either timestamp or timestamptz. */ -static void +void AdjustTimestampForTypmod(Timestamp *time, int32 typmod) { static const int64 TimestampScales[MAX_TIMESTAMP_PRECISION + 1] = { diff --git a/src/backend/utils/errcodes.txt b/src/backend/utils/errcodes.txt index 76fe79eac0..4fe2c9010f 100644 --- a/src/backend/utils/errcodes.txt +++ b/src/backend/utils/errcodes.txt @@ -205,6 +205,22 @@ Section: Class 22 - Data Exception 2200N E ERRCODE_INVALID_XML_CONTENT invalid_xml_content 2200S E ERRCODE_INVALID_XML_COMMENT invalid_xml_comment 2200T E ERRCODE_INVALID_XML_PROCESSING_INSTRUCTION invalid_xml_processing_instruction +22030 E ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE duplicate_json_object_key_value +22031 E ERRCODE_INVALID_ARGUMENT_FOR_JSON_DATETIME_FUNCTION invalid_argument_for_json_datetime_function +22032 E ERRCODE_INVALID_JSON_TEXT invalid_json_text +22033 E ERRCODE_INVALID_JSON_SUBSCRIPT invalid_json_subscript +22034 E ERRCODE_MORE_THAN_ONE_JSON_ITEM more_than_one_json_item +22035 E ERRCODE_NO_JSON_ITEM no_json_item +22036 E ERRCODE_NON_NUMERIC_JSON_ITEM non_numeric_json_item +22037 E ERRCODE_NON_UNIQUE_KEYS_IN_JSON_OBJECT non_unique_keys_in_json_object +22038 E ERRCODE_SINGLETON_JSON_ITEM_REQUIRED singleton_json_item_required +22039 E ERRCODE_JSON_ARRAY_NOT_FOUND json_array_not_found +2203A E ERRCODE_JSON_MEMBER_NOT_FOUND json_member_not_found +2203B E ERRCODE_JSON_NUMBER_NOT_FOUND json_number_not_found +2203C E ERRCODE_JSON_OBJECT_NOT_FOUND object_not_found +2203F E ERRCODE_JSON_SCALAR_REQUIRED json_scalar_required +2203D E ERRCODE_TOO_MANY_JSON_ARRAY_ELEMENTS too_many_json_array_elements +2203E E ERRCODE_TOO_MANY_JSON_OBJECT_MEMBERS too_many_json_object_members Section: Class 23 - Integrity Constraint Violation diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h index 13f1bce5af..504a1afc0f 100644 --- a/src/include/catalog/pg_aggregate.h +++ b/src/include/catalog/pg_aggregate.h @@ -311,11 +311,15 @@ DATA(insert ( 3545 n 0 bytea_string_agg_transfn bytea_string_agg_finalfn - - - - /* json */ DATA(insert ( 3175 n 0 json_agg_transfn json_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); +DATA(insert ( 3450 n 0 json_agg_strict_transfn json_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); DATA(insert ( 3197 n 0 json_object_agg_transfn json_object_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); +DATA(insert ( 3451 n 0 json_objectagg_transfn json_object_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); /* jsonb */ DATA(insert ( 3267 n 0 jsonb_agg_transfn jsonb_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); +DATA(insert ( 6063 n 0 jsonb_agg_strict_transfn jsonb_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); DATA(insert ( 3270 n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); +DATA(insert ( 6064 n 0 jsonb_objectagg_transfn jsonb_object_agg_finalfn - - - - - - f f r r 0 2281 0 0 0 _null_ _null_ )); /* ordered-set and hypothetical-set aggregates */ DATA(insert ( 3972 o 1 ordered_set_transition percentile_disc_final - - - - - - t f s s 0 2281 0 0 0 _null_ _null_ )); diff --git a/src/include/catalog/pg_amop.h b/src/include/catalog/pg_amop.h index f850be490a..12d1b3b796 100644 --- a/src/include/catalog/pg_amop.h +++ b/src/include/catalog/pg_amop.h @@ -821,11 +821,15 @@ DATA(insert ( 4036 3802 3802 7 s 3246 2742 0 )); DATA(insert ( 4036 3802 25 9 s 3247 2742 0 )); DATA(insert ( 4036 3802 1009 10 s 3248 2742 0 )); DATA(insert ( 4036 3802 1009 11 s 3249 2742 0 )); +DATA(insert ( 4036 3802 6050 15 s 6076 2742 0 )); +DATA(insert ( 4036 3802 6050 16 s 6107 2742 0 )); /* * GIN jsonb_path_ops */ DATA(insert ( 4037 3802 3802 7 s 3246 2742 0 )); +DATA(insert ( 4037 3802 6050 15 s 6076 2742 0 )); +DATA(insert ( 4037 3802 6050 16 s 6107 2742 0 )); /* * SP-GiST range_ops diff --git a/src/include/catalog/pg_operator.h b/src/include/catalog/pg_operator.h index ff9b47077b..83d6af24bd 100644 --- a/src/include/catalog/pg_operator.h +++ b/src/include/catalog/pg_operator.h @@ -1853,5 +1853,17 @@ DATA(insert OID = 3286 ( "-" PGNSP PGUID b f f 3802 23 3802 0 0 3303 - - )); DESCR("delete array element"); DATA(insert OID = 3287 ( "#-" PGNSP PGUID b f f 3802 1009 3802 0 0 jsonb_delete_path - - )); DESCR("delete path"); +DATA(insert OID = 6075 ( "@*" PGNSP PGUID b f f 3802 6050 3802 0 0 6055 - - )); +DESCR("jsonpath items"); +DATA(insert OID = 6076 ( "@?" PGNSP PGUID b f f 3802 6050 16 0 0 6054 contsel contjoinsel )); +DESCR("jsonpath exists"); +DATA(insert OID = 6107 ( "@~" PGNSP PGUID b f f 3802 6050 16 0 0 6073 contsel contjoinsel )); +DESCR("jsonpath predicate"); +DATA(insert OID = 6070 ( "@*" PGNSP PGUID b f f 114 6050 114 0 0 6044 - - )); +DESCR("jsonpath items"); +DATA(insert OID = 6071 ( "@?" PGNSP PGUID b f f 114 6050 16 0 0 6043 contsel contjoinsel )); +DESCR("jsonpath exists"); +DATA(insert OID = 6108 ( "@~" PGNSP PGUID b f f 114 6050 16 0 0 6049 contsel contjoinsel )); +DESCR("jsonpath predicate"); #endif /* PG_OPERATOR_H */ diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h index 93c031aad7..b9aa363c21 100644 --- a/src/include/catalog/pg_proc.h +++ b/src/include/catalog/pg_proc.h @@ -4509,24 +4509,39 @@ DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s s DESCR("map row to json with optional pretty printing"); DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ )); DESCR("json aggregate transition function"); +DATA(insert OID = 3452 ( json_agg_strict_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ _null_ json_agg_strict_transfn _null_ _null_ _null_ )); +DESCR("json aggregate transition function"); DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i s 1 0 114 "2281" _null_ _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ )); DESCR("json aggregate final function"); DATA(insert OID = 3175 ( json_agg PGNSP PGUID 12 1 0 0 0 t f f f f f s s 1 0 114 "2283" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); DESCR("aggregate input into json"); +#define F_JSON_AGG 3175 +DATA(insert OID = 3450 ( json_agg_strict PGNSP PGUID 12 1 0 0 0 t f f f f f s s 1 0 114 "2283" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); +DESCR("aggregate input into json"); +#define F_JSON_AGG_STRICT 3450 DATA(insert OID = 3180 ( json_object_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ _null_ json_object_agg_transfn _null_ _null_ _null_ )); DESCR("json object aggregate transition function"); +DATA(insert OID = 3453 ( json_objectagg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 5 0 2281 "2281 2276 2276 16 16" _null_ _null_ _null_ _null_ _null_ json_objectagg_transfn _null_ _null_ _null_ )); +DESCR("json object aggregate transition function"); DATA(insert OID = 3196 ( json_object_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i s 1 0 114 "2281" _null_ _null_ _null_ _null_ _null_ json_object_agg_finalfn _null_ _null_ _null_ )); DESCR("json object aggregate final function"); DATA(insert OID = 3197 ( json_object_agg PGNSP PGUID 12 1 0 0 0 t f f f f f s s 2 0 114 "2276 2276" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); DESCR("aggregate input into a json object"); +DATA(insert OID = 3451 ( json_objectagg PGNSP PGUID 12 1 0 0 0 t f f f f f s s 4 0 114 "2276 2276 16 16" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); +DESCR("aggregate input into a json object"); +#define F_JSON_OBJECTAGG 3451 DATA(insert OID = 3198 ( json_build_array PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ _null_ json_build_array _null_ _null_ _null_ )); DESCR("build a json array from any inputs"); DATA(insert OID = 3199 ( json_build_array PGNSP PGUID 12 1 0 0 0 f f f f f f s s 0 0 114 "" _null_ _null_ _null_ _null_ _null_ json_build_array_noargs _null_ _null_ _null_ )); DESCR("build an empty json array"); +DATA(insert OID = 3998 ( json_build_array_ext PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 2 0 114 "16 2276" "{16,2276}" "{i,v}" _null_ _null_ _null_ json_build_array_ext _null_ _null_ _null_ )); +DESCR("build a json array from any inputs"); DATA(insert OID = 3200 ( json_build_object PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 1 0 114 "2276" "{2276}" "{v}" _null_ _null_ _null_ json_build_object _null_ _null_ _null_ )); DESCR("build a json object from pairwise key/value inputs"); DATA(insert OID = 3201 ( json_build_object PGNSP PGUID 12 1 0 0 0 f f f f f f s s 0 0 114 "" _null_ _null_ _null_ _null_ _null_ json_build_object_noargs _null_ _null_ _null_ )); DESCR("build an empty json object"); +DATA(insert OID = 6066 ( json_build_object_ext PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 3 0 114 "16 16 2276" "{16,16,2276}" "{i,i,v}" _null_ _null_ _null_ json_build_object_ext _null_ _null_ _null_ )); +DESCR("build a json object from pairwise key/value inputs"); DATA(insert OID = 3202 ( json_object PGNSP PGUID 12 1 0 0 0 f f f f t f i s 1 0 114 "1009" _null_ _null_ _null_ _null_ _null_ json_object _null_ _null_ _null_ )); DESCR("map text array of key value pairs to json object"); DATA(insert OID = 3203 ( json_object PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 114 "1009 1009" _null_ _null_ _null_ _null_ _null_ json_object_two_arg _null_ _null_ _null_ )); @@ -4535,6 +4550,10 @@ DATA(insert OID = 3176 ( to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s s 1 0 DESCR("map input to json"); DATA(insert OID = 3261 ( json_strip_nulls PGNSP PGUID 12 1 0 0 0 f f f f t f i s 1 0 114 "114" _null_ _null_ _null_ _null_ _null_ json_strip_nulls _null_ _null_ _null_ )); DESCR("remove object fields with null values from json"); +DATA(insert OID = 6060 ( json_is_valid PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "114 25 16" _null_ _null_ _null_ _null_ _null_ json_is_valid _null_ _null_ _null_ )); +DESCR("check json value type and key uniqueness"); +DATA(insert OID = 6061 ( json_is_valid PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "25 25 16" _null_ _null_ _null_ _null_ _null_ json_is_valid _null_ _null_ _null_ )); +DESCR("check json text validity, value type and key uniqueness"); DATA(insert OID = 3947 ( json_object_field PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 114 "114 25" _null_ _null_ "{from_json, field_name}" _null_ _null_ json_object_field _null_ _null_ _null_ )); DATA(insert OID = 3948 ( json_object_field_text PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 25 "114 25" _null_ _null_ "{from_json, field_name}" _null_ _null_ json_object_field_text _null_ _null_ _null_ )); @@ -4969,26 +4988,44 @@ DATA(insert OID = 3787 ( to_jsonb PGNSP PGUID 12 1 0 0 0 f f f f t f s s 1 0 DESCR("map input to jsonb"); DATA(insert OID = 3265 ( jsonb_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ )); DESCR("jsonb aggregate transition function"); +DATA(insert OID = 6065 ( jsonb_agg_strict_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ _null_ jsonb_agg_strict_transfn _null_ _null_ _null_ )); +DESCR("jsonb aggregate transition function"); DATA(insert OID = 3266 ( jsonb_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 1 0 3802 "2281" _null_ _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ )); DESCR("jsonb aggregate final function"); DATA(insert OID = 3267 ( jsonb_agg PGNSP PGUID 12 1 0 0 0 t f f f f f s s 1 0 3802 "2283" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); DESCR("aggregate input into jsonb"); +#define F_JSONB_AGG 3267 +DATA(insert OID = 6063 ( jsonb_agg_strict PGNSP PGUID 12 1 0 0 0 t f f f f f s s 1 0 3802 "2283" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); +DESCR("aggregate input into jsonb skipping nulls"); +#define F_JSONB_AGG_STRICT 6063 DATA(insert OID = 3268 ( jsonb_object_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ )); DESCR("jsonb object aggregate transition function"); +DATA(insert OID = 3449 ( jsonb_objectagg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 5 0 2281 "2281 2276 2276 16 16" _null_ _null_ _null_ _null_ _null_ jsonb_objectagg_transfn _null_ _null_ _null_ )); +DESCR("jsonb object aggregate transition function"); DATA(insert OID = 3269 ( jsonb_object_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f s s 1 0 3802 "2281" _null_ _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ )); DESCR("jsonb object aggregate final function"); DATA(insert OID = 3270 ( jsonb_object_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i s 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); DESCR("aggregate inputs into jsonb object"); +DATA(insert OID = 6064 ( jsonb_objectagg PGNSP PGUID 12 1 0 0 0 t f f f f f i s 4 0 3802 "2276 2276 16 16" _null_ _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ )); +DESCR("aggregate inputs into jsonb object"); +#define F_JSONB_OBJECTAGG 6064 DATA(insert OID = 3271 ( jsonb_build_array PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ _null_ jsonb_build_array _null_ _null_ _null_ )); DESCR("build a jsonb array from any inputs"); DATA(insert OID = 3272 ( jsonb_build_array PGNSP PGUID 12 1 0 0 0 f f f f f f s s 0 0 3802 "" _null_ _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ )); DESCR("build an empty jsonb array"); +DATA(insert OID = 6068 ( jsonb_build_array_ext PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 2 0 3802 "16 2276" "{16,2276}" "{i,v}" _null_ _null_ _null_ jsonb_build_array_ext _null_ _null_ _null_ )); +DESCR("build a jsonb array from any inputs"); DATA(insert OID = 3273 ( jsonb_build_object PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ _null_ jsonb_build_object _null_ _null_ _null_ )); DESCR("build a jsonb object from pairwise key/value inputs"); DATA(insert OID = 3274 ( jsonb_build_object PGNSP PGUID 12 1 0 0 0 f f f f f f s s 0 0 3802 "" _null_ _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ )); DESCR("build an empty jsonb object"); +DATA(insert OID = 6067 ( jsonb_build_object_ext PGNSP PGUID 12 1 0 2276 0 f f f f f f s s 3 0 3802 "16 16 2276" "{16,16,2276}" "{i,i,v}" _null_ _null_ _null_ jsonb_build_object_ext _null_ _null_ _null_ )); +DESCR("build a jsonb object from pairwise key/value inputs"); DATA(insert OID = 3262 ( jsonb_strip_nulls PGNSP PGUID 12 1 0 0 0 f f f f t f i s 1 0 3802 "3802" _null_ _null_ _null_ _null_ _null_ jsonb_strip_nulls _null_ _null_ _null_ )); DESCR("remove object fields with null values from jsonb"); +DATA(insert OID = 6062 ( jsonb_is_valid PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 16 "17 25" _null_ _null_ _null_ _null_ _null_ jsonb_is_valid _null_ _null_ _null_ )); +DESCR("check jsonb value type"); + DATA(insert OID = 3478 ( jsonb_object_field PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ _null_ jsonb_object_field _null_ _null_ _null_ )); DATA(insert OID = 3214 ( jsonb_object_field_text PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 25 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ _null_ jsonb_object_field_text _null_ _null_ _null_ )); @@ -5522,6 +5559,45 @@ DESCR("list files in the log directory"); DATA(insert OID = 3354 ( pg_ls_waldir PGNSP PGUID 12 10 20 0 0 f f f f t t v s 0 0 2249 "" "{25,20,1184}" "{o,o,o}" "{name,size,modification}" _null_ _null_ pg_ls_waldir _null_ _null_ _null_ )); DESCR("list of files in the WAL directory"); +/* jsonpath */ +DATA(insert OID = 6052 ( jsonpath_in PGNSP PGUID 12 1 0 0 0 f f f f t f i s 1 0 6050 "2275" _null_ _null_ _null_ _null_ _null_ jsonpath_in _null_ _null_ _null_ )); +DESCR("I/O"); +DATA(insert OID = 6053 ( jsonpath_out PGNSP PGUID 12 1 0 0 0 f f f f t f i s 1 0 2275 "6050" _null_ _null_ _null_ _null_ _null_ jsonpath_out _null_ _null_ _null_ )); +DESCR("I/O"); +DATA(insert OID = 6054 ( jsonpath_exists PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 16 "3802 6050" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_exists2 _null_ _null_ _null_ )); +DESCR("implementation of @? operator"); +DATA(insert OID = 6055 ( jsonpath_query PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 2 0 3802 "3802 6050" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_query2 _null_ _null_ _null_ )); +DESCR("implementation of @* operator"); +DATA(insert OID = 6056 ( jsonpath_exists PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "3802 6050 3802" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_exists3 _null_ _null_ _null_ )); +DESCR("jsonpath exists test"); +DATA(insert OID = 6057 ( jsonpath_query PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 3 0 3802 "3802 6050 3802" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_query3 _null_ _null_ _null_ )); +DESCR("jsonpath object test"); +DATA(insert OID = 6058 ( jsonpath_query_safe PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 2 0 3802 "3802 6050" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_query_safe2 _null_ _null_ _null_ )); +DESCR("jsonpath query, empty on error"); +DATA(insert OID = 6059 ( jsonpath_query_safe PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 3 0 3802 "3802 6050 3802" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_query_safe3 _null_ _null_ _null_ )); +DESCR("jsonpath query, empty on error"); +DATA(insert OID = 6073 ( jsonpath_predicate PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 16 "3802 6050" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_predicate2 _null_ _null_ _null_ )); +DESCR("implementation of @~ operator"); +DATA(insert OID = 6074 ( jsonpath_predicate PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "3802 6050 3802" _null_ _null_ _null_ _null_ _null_ jsonb_jsonpath_predicate3 _null_ _null_ _null_ )); +DESCR("jsonpath predicate test"); + +DATA(insert OID = 6043 ( jsonpath_exists PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 16 "114 6050" _null_ _null_ _null_ _null_ _null_ json_jsonpath_exists2 _null_ _null_ _null_ )); +DESCR("implementation of @? operator"); +DATA(insert OID = 6044 ( jsonpath_query PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 2 0 114 "114 6050" _null_ _null_ _null_ _null_ _null_ json_jsonpath_query2 _null_ _null_ _null_ )); +DESCR("implementation of @* operator"); +DATA(insert OID = 6045 ( jsonpath_exists PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "114 6050 114" _null_ _null_ _null_ _null_ _null_ json_jsonpath_exists3 _null_ _null_ _null_ )); +DESCR("jsonpath exists test"); +DATA(insert OID = 6046 ( jsonpath_query PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 3 0 114 "114 6050 114" _null_ _null_ _null_ _null_ _null_ json_jsonpath_query3 _null_ _null_ _null_ )); +DESCR("jsonpath query"); +DATA(insert OID = 6047 ( jsonpath_query_safe PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 2 0 114 "114 6050" _null_ _null_ _null_ _null_ _null_ json_jsonpath_query_safe2 _null_ _null_ _null_ )); +DESCR("jsonpath query, empty on error"); +DATA(insert OID = 6048 ( jsonpath_query_safe PGNSP PGUID 12 1 1000 0 0 f f f f t t i s 3 0 114 "114 6050 114" _null_ _null_ _null_ _null_ _null_ json_jsonpath_query_safe3 _null_ _null_ _null_ )); +DESCR("jsonpath query, empty on error"); +DATA(insert OID = 6049 ( jsonpath_predicate PGNSP PGUID 12 1 0 0 0 f f f f t f i s 2 0 16 "114 6050" _null_ _null_ _null_ _null_ _null_ json_jsonpath_predicate2 _null_ _null_ _null_ )); +DESCR("implementation of @~ operator"); +DATA(insert OID = 6069 ( jsonpath_predicate PGNSP PGUID 12 1 0 0 0 f f f f t f i s 3 0 16 "114 6050 114" _null_ _null_ _null_ _null_ _null_ json_jsonpath_predicate3 _null_ _null_ _null_ )); +DESCR("jsonpath predicate test"); + /* * Symbolic values for provolatile column: these indicate whether the result * of a function is dependent *only* on the values of its explicit arguments, diff --git a/src/include/catalog/pg_type.h b/src/include/catalog/pg_type.h index ffdb452b02..106436a212 100644 --- a/src/include/catalog/pg_type.h +++ b/src/include/catalog/pg_type.h @@ -638,6 +638,12 @@ DESCR("Binary JSON"); #define JSONBOID 3802 DATA(insert OID = 3807 ( _jsonb PGNSP PGUID -1 f b A f t \054 0 3802 0 array_in array_out array_recv array_send - - array_typanalyze i x f 0 -1 0 0 _null_ _null_ _null_ )); +/* jsonpath */ +DATA(insert OID = 6050 ( jsonpath PGNSP PGUID -1 f b U f t \054 0 0 6051 jsonpath_in jsonpath_out - - - - - i x f 0 -1 0 0 _null_ _null_ _null_ )); +DESCR("JSON Path"); +#define JSONPATHOID 6050 +DATA(insert OID = 6051 ( _jsonpath PGNSP PGUID -1 f b A f t \054 0 6050 0 array_in array_out array_recv array_send - - array_typanalyze i x f 0 -1 0 0 _null_ _null_ _null_ )); + DATA(insert OID = 2970 ( txid_snapshot PGNSP PGUID -1 f b U f t \054 0 0 2949 txid_snapshot_in txid_snapshot_out txid_snapshot_recv txid_snapshot_send - - - d x f 0 -1 0 0 _null_ _null_ _null_ )); DESCR("txid snapshot"); DATA(insert OID = 2949 ( _txid_snapshot PGNSP PGUID -1 f b A f t \054 0 2970 0 array_in array_out array_recv array_send - - array_typanalyze d x f 0 -1 0 0 _null_ _null_ _null_ )); diff --git a/src/include/executor/execExpr.h b/src/include/executor/execExpr.h index 78d2247816..0081445adb 100644 --- a/src/include/executor/execExpr.h +++ b/src/include/executor/execExpr.h @@ -18,6 +18,7 @@ /* forward reference to avoid circularity */ struct ArrayRefState; +struct JsonbValue; /* Bits in ExprState->flags (see also execnodes.h for public flag bits): */ /* expression's interpreter has been initialized */ @@ -211,6 +212,7 @@ typedef enum ExprEvalOp EEOP_WINDOW_FUNC, EEOP_SUBPLAN, EEOP_ALTERNATIVE_SUBPLAN, + EEOP_JSONEXPR, /* non-existent operation, used e.g. to check array lengths */ EEOP_LAST @@ -558,6 +560,55 @@ typedef struct ExprEvalStep /* out-of-line state, created by nodeSubplan.c */ AlternativeSubPlanState *asstate; } alternative_subplan; + + /* for EEOP_JSONEXPR */ + struct + { + JsonExpr *jsexpr; /* original expression node */ + + struct + { + FmgrInfo func; /* typinput function for output type */ + Oid typioparam; + } input; /* I/O info for output type */ + + struct + { + Datum value; + bool isnull; + } *raw_expr; /* raw context item value */ + + ExprState *formatted_expr; /* formatted context item */ + ExprState *result_expr; /* coerced to output type */ + ExprState *default_on_empty; /* ON EMPTY DEFAULT expression */ + ExprState *default_on_error; /* ON ERROR DEFAULT expression */ + List *args; /* passing arguments */ + + void *cache; /* cache for json_populate_type() */ + + struct JsonScalarCoercions + { + struct JsonScalarCoercionExprState + { + Node *result_expr; /* coercion expression */ + ExprState *result_expr_state; /* coercion expression state */ + bool coerce_via_io; + bool coerce_via_populate; + bool initialized; + } string, + numeric, + boolean, + date, + time, + timetz, + timestamp, + timestamptz, + composite, + null; + } scalar; /* states for coercion from SQL/JSON item + * types directly to the output type */ + } jsonexpr; + } d; } ExprEvalStep; @@ -597,7 +648,6 @@ typedef struct ArrayRefState bool prevnull; } ArrayRefState; - extern void ExecReadyInterpretedExpr(ExprState *state); extern ExprEvalOp ExecEvalStepOp(ExprState *state, ExprEvalStep *op); @@ -646,5 +696,15 @@ extern void ExecEvalAlternativeSubPlan(ExprState *state, ExprEvalStep *op, ExprContext *econtext); extern void ExecEvalWholeRowVar(ExprState *state, ExprEvalStep *op, ExprContext *econtext); +extern void ExecEvalJson(ExprState *state, ExprEvalStep *op, + ExprContext *econtext); +extern Datum ExecPrepareJsonItemCoercion(struct JsonbValue *item, bool is_jsonb, + JsonReturning *returning, + struct JsonScalarCoercions *coercions, + MemoryContext mcxt, + struct JsonScalarCoercionExprState **pcestate); +extern Datum ExecEvalExprPassingCaseValue(ExprState *estate, + ExprContext *econtext, bool *isnull, + Datum caseval_datum, bool caseval_isnull); #endif /* EXEC_EXPR_H */ diff --git a/src/include/lib/stringinfo.h b/src/include/lib/stringinfo.h index 01b845db44..6e2619e08f 100644 --- a/src/include/lib/stringinfo.h +++ b/src/include/lib/stringinfo.h @@ -157,4 +157,10 @@ extern void appendBinaryStringInfoNT(StringInfo str, */ extern void enlargeStringInfo(StringInfo str, int needed); +/*------------------------ + * alignStringInfoInt + * Add padding zero bytes to align StringInfo + */ +extern void alignStringInfoInt(StringInfo buf); + #endif /* STRINGINFO_H */ diff --git a/src/include/nodes/makefuncs.h b/src/include/nodes/makefuncs.h index dd0d2ea07d..d693602039 100644 --- a/src/include/nodes/makefuncs.h +++ b/src/include/nodes/makefuncs.h @@ -88,4 +88,12 @@ extern GroupingSet *makeGroupingSet(GroupingSetKind kind, List *content, int loc extern VacuumRelation *makeVacuumRelation(RangeVar *relation, Oid oid, List *va_cols); +extern JsonBehavior *makeJsonBehavior(JsonBehaviorType type, Node *expr); +extern Node *makeJsonTableJoinedPlan(JsonTablePlanJoinType type, + Node *plan1, Node *plan2, int location); +extern Node *makeJsonKeyValue(Node *key, Node *value); +extern Node *makeJsonIsPredicate(Node *expr, JsonFormat format, + JsonValueType vtype, bool unique_keys); +extern JsonEncoding makeJsonEncoding(char *name); + #endif /* MAKEFUNC_H */ diff --git a/src/include/nodes/nodes.h b/src/include/nodes/nodes.h index ffeeb4919b..ff4df3393e 100644 --- a/src/include/nodes/nodes.h +++ b/src/include/nodes/nodes.h @@ -191,6 +191,9 @@ typedef enum NodeTag T_FromExpr, T_OnConflictExpr, T_IntoClause, + T_JsonExpr, + T_JsonTableParentNode, + T_JsonTableSiblingNode, /* * TAGS FOR EXPRESSION STATE NODES (execnodes.h) @@ -469,6 +472,23 @@ typedef enum NodeTag T_PartitionRangeDatum, T_PartitionCmd, T_VacuumRelation, + T_JsonValueExpr, + T_JsonObjectCtor, + T_JsonArrayCtor, + T_JsonArrayQueryCtor, + T_JsonObjectAgg, + T_JsonArrayAgg, + T_JsonFuncExpr, + T_JsonIsPredicate, + T_JsonExistsPredicate, + T_JsonTable, + T_JsonTableColumn, + T_JsonTablePlan, + T_JsonCommon, + T_JsonArgument, + T_JsonKeyValue, + T_JsonBehavior, + T_JsonOutput, /* * TAGS FOR REPLICATION GRAMMAR PARSE NODES (replnodes.h) diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h index 06a2b81fb5..24e6ef3bcf 100644 --- a/src/include/nodes/parsenodes.h +++ b/src/include/nodes/parsenodes.h @@ -1403,6 +1403,296 @@ typedef struct TriggerTransition bool isTable; } TriggerTransition; +/* Nodes for SQL/JSON support */ + +/* + * JsonQuotes - + * representation of [KEEP|OMIT] QUOTES clause for JSON_QUERY() + */ +typedef enum JsonQuotes +{ + JS_QUOTES_UNSPEC, /* unspecified */ + JS_QUOTES_KEEP, /* KEEP QUOTES */ + JS_QUOTES_OMIT /* OMIT QUOTES */ +} JsonQuotes; + +/* + * JsonTableColumnType - + * enumeration of JSON_TABLE column types + */ +typedef enum +{ + JTC_FOR_ORDINALITY, + JTC_REGULAR, + JTC_FORMATTED, + JTC_NESTED, +} JsonTableColumnType; + +/* + * JsonPathSpec - + * representation of JSON path constant + */ +typedef char *JsonPathSpec; + +/* + * JsonOutput - + * representation of JSON output clause (RETURNING type [FORMAT format]) + */ +typedef struct JsonOutput +{ + NodeTag type; + TypeName *typename; /* RETURNING type name, if specified */ + JsonReturning returning; /* RETURNING FORMAT clause and type Oids */ +} JsonOutput; + +/* + * JsonValueExpr - + * representation of JSON value expression (expr [FORMAT json_format]) + */ +typedef struct JsonValueExpr +{ + NodeTag type; + Expr *expr; /* raw expression */ + JsonFormat format; /* FORMAT clause, if specified */ +} JsonValueExpr; + +/* + * JsonArgument - + * representation of argument from JSON PASSING clause + */ +typedef struct JsonArgument +{ + NodeTag type; + JsonValueExpr *val; /* argument value expression */ + char *name; /* argument name */ +} JsonArgument; + +/* + * JsonCommon - + * representation of common syntax of functions using JSON path + */ +typedef struct JsonCommon +{ + NodeTag type; + JsonValueExpr *expr; /* context item expression */ + JsonPathSpec pathspec; /* JSON path specification */ + char *pathname; /* path name, if any */ + List *passing; /* list of PASSING clause arguments, if any */ + int location; /* token location, or -1 if unknown */ +} JsonCommon; + +/* + * JsonFuncExpr - + * untransformed representation of JSON function expressions + */ +typedef struct JsonFuncExpr +{ + NodeTag type; + JsonExprOp op; /* expression type */ + JsonCommon *common; /* common syntax */ + JsonOutput *output; /* output clause, if specified */ + JsonBehavior *on_empty; /* ON EMPTY behavior, if specified */ + JsonBehavior *on_error; /* ON ERROR behavior, if specified */ + JsonWrapper wrapper; /* array wrapper behavior (JSON_QUERY only) */ + bool omit_quotes; /* omit or keep quotes? (JSON_QUERY only) */ + int location; /* token location, or -1 if unknown */ +} JsonFuncExpr; + +/* + * JsonTableColumn - + * untransformed representation of JSON_TABLE column + */ +typedef struct JsonTableColumn +{ + NodeTag type; + JsonTableColumnType coltype; /* column type */ + char *name; /* column name */ + TypeName *typename; /* column type name */ + JsonPathSpec pathspec; /* path specification, if any */ + char *pathname; /* path name, if any */ + JsonFormat format; /* JSON format clause, if specified */ + JsonWrapper wrapper; /* WRAPPER behavior for formatted columns */ + bool omit_quotes; /* omit or keep quotes on scalar strings? */ + List *columns; /* nested columns */ + JsonBehavior *on_empty; /* ON EMPTY behavior */ + JsonBehavior *on_error; /* ON ERROR behavior */ + int location; /* token location, or -1 if unknown */ +} JsonTableColumn; + +/* + * JsonTablePlanType - + * flags for JSON_TABLE plan node types representation + */ +typedef enum JsonTablePlanType +{ + JSTP_DEFAULT, + JSTP_SIMPLE, + JSTP_JOINED, +} JsonTablePlanType; + +/* + * JsonTablePlanJoinType - + * flags for JSON_TABLE join types representation + */ +typedef enum JsonTablePlanJoinType +{ + JSTP_INNER = 0x01, + JSTP_OUTER = 0x02, + JSTP_CROSS = 0x04, + JSTP_UNION = 0x08, +} JsonTablePlanJoinType; + +typedef struct JsonTablePlan JsonTablePlan; + +/* + * JsonTablePlan - + * untransformed representation of JSON_TABLE plan node + */ +struct JsonTablePlan +{ + NodeTag type; + JsonTablePlanType plan_type; /* plan type */ + JsonTablePlanJoinType join_type; /* join type (for joined plan only) */ + JsonTablePlan *plan1; /* first joined plan */ + JsonTablePlan *plan2; /* second joined plan */ + char *pathname; /* path name (for simple plan only) */ + int location; /* token location, or -1 if unknown */ +}; + +/* + * JsonTable - + * untransformed representation of JSON_TABLE + */ +typedef struct JsonTable +{ + NodeTag type; + JsonCommon *common; /* common JSON path syntax fields */ + List *columns; /* list of JsonTableColumn */ + JsonTablePlan *plan; /* join plan, if specified */ + JsonBehavior *on_error; /* ON ERROR behavior, if specified */ + Alias *alias; /* table alias in FROM clause */ + bool lateral; /* does it have LATERAL prefix? */ + int location; /* token location, or -1 if unknown */ +} JsonTable; + +/* + * JsonValueType - + * representation of JSON item type in IS JSON predicate + */ +typedef enum JsonValueType +{ + JS_TYPE_ANY, /* IS JSON [VALUE] */ + JS_TYPE_OBJECT, /* IS JSON OBJECT */ + JS_TYPE_ARRAY, /* IS JSON ARRAY*/ + JS_TYPE_SCALAR /* IS JSON SCALAR */ +} JsonValueType; + +/* + * JsonIsPredicate - + * untransformed representation of IS JSON predicate + */ +typedef struct JsonIsPredicate +{ + NodeTag type; + Node *expr; /* untransformed expression */ + JsonFormat format; /* FORMAT clause, if specified */ + JsonValueType vtype; /* JSON item type */ + bool unique_keys; /* check key uniqueness? */ + int location; /* token location, or -1 if unknown */ +} JsonIsPredicate; + +/* + * JsonKeyValue - + * untransformed representation of JSON object key-value pair for + * JSON_OBJECT() and JSON_OBJECTAGG() + */ +typedef struct JsonKeyValue +{ + NodeTag type; + Expr *key; /* key expression */ + JsonValueExpr *value; /* JSON value expression */ +} JsonKeyValue; + +/* + * JsonObjectCtor - + * untransformed representation of JSON_OBJECT() constructor + */ +typedef struct JsonObjectCtor +{ + NodeTag type; + List *exprs; /* list of JsonKeyValue pairs */ + JsonOutput *output; /* RETURNING clause, if specified */ + bool absent_on_null; /* skip NULL values? */ + bool unique; /* check key uniqueness? */ + int location; /* token location, or -1 if unknown */ +} JsonObjectCtor; + +/* + * JsonArrayCtor - + * untransformed representation of JSON_ARRAY(element,...) constructor + */ +typedef struct JsonArrayCtor +{ + NodeTag type; + List *exprs; /* list of JsonValueExpr elements */ + JsonOutput *output; /* RETURNING clause, if specified */ + bool absent_on_null; /* skip NULL elements? */ + int location; /* token location, or -1 if unknown */ +} JsonArrayCtor; + +/* + * JsonArrayQueryCtor - + * untransformed representation of JSON_ARRAY(subquery) constructor + */ +typedef struct JsonArrayQueryCtor +{ + NodeTag type; + Node *query; /* subquery */ + JsonOutput *output; /* RETURNING clause, if specified */ + JsonFormat format; /* FORMAT clause for subquery, if specified */ + bool absent_on_null; /* skip NULL elements? */ + int location; /* token location, or -1 if unknown */ +} JsonArrayQueryCtor; + +/* + * JsonAggCtor - + * common fields of untransformed representation of + * JSON_ARRAYAGG() and JSON_OBJECTAGG() + */ +typedef struct JsonAggCtor +{ + NodeTag type; + JsonOutput *output; /* RETURNING clause, if any */ + Node *agg_filter; /* FILTER clause, if any */ + List *agg_order; /* ORDER BY clause, if any */ + struct WindowDef *over; /* OVER clause, if any */ + int location; /* token location, or -1 if unknown */ +} JsonAggCtor; + +/* + * JsonObjectAgg - + * untransformed representation of JSON_OBJECTAGG() + */ +typedef struct JsonObjectAgg +{ + JsonAggCtor ctor; /* common fields */ + JsonKeyValue *arg; /* object key-value pair */ + bool absent_on_null; /* skip NULL values? */ + bool unique; /* check key uniqueness? */ +} JsonObjectAgg; + +/* + * JsonArrayAgg - + * untransformed representation of JSON_ARRRAYAGG() + */ +typedef struct JsonArrayAgg +{ + JsonAggCtor ctor; /* common fields */ + JsonValueExpr *arg; /* array element expression */ + bool absent_on_null; /* skip NULL elements? */ +} JsonArrayAgg; + + /***************************************************************************** * Raw Grammar Output Statements *****************************************************************************/ diff --git a/src/include/nodes/primnodes.h b/src/include/nodes/primnodes.h index c2929ac387..3a39815663 100644 --- a/src/include/nodes/primnodes.h +++ b/src/include/nodes/primnodes.h @@ -73,12 +73,19 @@ typedef struct RangeVar int location; /* token location, or -1 if unknown */ } RangeVar; +typedef enum TableFuncType +{ + TFT_XMLTABLE, + TFT_JSON_TABLE +} TableFuncType; + /* * TableFunc - node for a table function, such as XMLTABLE. */ typedef struct TableFunc { NodeTag type; + TableFuncType functype; /* XMLTABLE or JSON_TABLE */ List *ns_uris; /* list of namespace uri */ List *ns_names; /* list of namespace names */ Node *docexpr; /* input document expression */ @@ -89,7 +96,9 @@ typedef struct TableFunc List *colcollations; /* OID list of column collation OIDs */ List *colexprs; /* list of column filter expressions */ List *coldefexprs; /* list of column default expressions */ + List *colvalexprs; /* list of column value expressions */ Bitmapset *notnulls; /* nullability flag for each output column */ + Node *plan; /* JSON_TABLE plan */ int ordinalitycol; /* counts from 0; -1 if none specified */ int location; /* token location, or -1 if unknown */ } TableFunc; @@ -1157,6 +1166,167 @@ typedef struct XmlExpr int location; /* token location, or -1 if unknown */ } XmlExpr; +/* + * JsonExprOp - + * enumeration of JSON functions using JSON path + */ +typedef enum JsonExprOp +{ + IS_JSON_VALUE, /* JSON_VALUE() */ + IS_JSON_QUERY, /* JSON_QUERY() */ + IS_JSON_EXISTS, /* JSON_EXISTS() */ + IS_JSON_TABLE /* JSON_TABLE() */ +} JsonExprOp; + +/* + * JsonEncoding - + * representation of JSON ENCODING clause + */ +typedef enum JsonEncoding +{ + JS_ENC_DEFAULT, /* unspecified */ + JS_ENC_UTF8, + JS_ENC_UTF16, + JS_ENC_UTF32, +} JsonEncoding; + +/* + * JsonFormatType - + * enumeration of JSON formats used in JSON FORMAT clause + */ +typedef enum JsonFormatType +{ + JS_FORMAT_DEFAULT, /* unspecified */ + JS_FORMAT_JSON, /* FORMAT JSON [ENCODING ...] */ + JS_FORMAT_JSONB /* implicit internal format for RETURNING jsonb */ +} JsonFormatType; + +/* + * JsonBehaviorType - + * enumeration of behavior types used in JSON ON ... BEHAVIOR clause + */ +typedef enum +{ + JSON_BEHAVIOR_NULL, + JSON_BEHAVIOR_ERROR, + JSON_BEHAVIOR_EMPTY, + JSON_BEHAVIOR_TRUE, + JSON_BEHAVIOR_FALSE, + JSON_BEHAVIOR_UNKNOWN, + JSON_BEHAVIOR_EMPTY_ARRAY, + JSON_BEHAVIOR_EMPTY_OBJECT, + JSON_BEHAVIOR_DEFAULT, +} JsonBehaviorType; + +/* + * JsonWrapper - + * representation of WRAPPER clause for JSON_QUERY() + */ +typedef enum JsonWrapper +{ + JSW_NONE, + JSW_CONDITIONAL, + JSW_UNCONDITIONAL, +} JsonWrapper; + +/* + * JsonFormat - + * representation of JSON FORMAT clause + */ +typedef struct JsonFormat +{ + JsonFormatType type; /* format type */ + JsonEncoding encoding; /* JSON encoding */ + int location; /* token location, or -1 if unknown */ +} JsonFormat; + +/* + * JsonReturning - + * transformed representation of JSON RETURNING clause + */ +typedef struct JsonReturning +{ + JsonFormat format; /* output JSON format */ + Oid typid; /* target type Oid */ + int32 typmod; /* target type modifier */ +} JsonReturning; + +/* + * JsonBehavior - + * representation of JSON ON ... BEHAVIOR clause + */ +typedef struct JsonBehavior +{ + NodeTag type; + JsonBehaviorType btype; /* behavior type */ + Node *default_expr; /* default expression, if any */ +} JsonBehavior; + +/* + * JsonPassing - + * representation of JSON PASSING clause + */ +typedef struct JsonPassing +{ + List *values; /* list of PASSING argument expressions */ + List *names; /* parallel list of Value strings */ +} JsonPassing; + + +/* + * JsonExpr - + * transformed representation of JSON_VALUE(), JSON_QUERY(), JSON_EXISTS() + */ +typedef struct JsonExpr +{ + Expr xpr; + JsonExprOp op; /* json function ID */ + Node *raw_expr; /* raw context item expression */ + Node *formatted_expr; /* formatted context item expression */ + Node *result_expr; /* resulting expression (coerced to RETURNING type) */ + bool coerce_via_populate; /* coerce result using json_populate_type() */ + bool coerce_via_io; /* coerce result using type input function */ + Oid coerce_via_io_collation; /* collation for conversion through I/O */ + JsonFormat format; /* context item format (JSON/JSONB) */ + Const *path_spec; /* JSON path specification */ + JsonPassing passing; /* PASSING clause arguments */ + JsonReturning returning; /* RETURNING clause type/format info */ + JsonBehavior on_empty; /* ON EMPTY behavior */ + JsonBehavior on_error; /* ON ERROR behavior */ + JsonWrapper wrapper; /* WRAPPER for JSON_QUERY */ + bool omit_quotes; /* KEEP/OMIT QUOTES for JSON_QUERY */ + int location; /* token location, or -1 if unknown */ +} JsonExpr; + +/* + * JsonTableParentNode - + * transformed representation of parent JSON_TABLE plan node + */ +typedef struct JsonTableParentNode +{ + NodeTag type; + Const *path; /* jsonpath constant */ + char *name; /* path name */ + JsonPassing passing; /* PASSING arguments */ + Node *child; /* nested columns, if any */ + bool outerJoin; /* outer or inner join for nested columns? */ + int colMin; /* min column index in the resulting column list */ + int colMax; /* max column index in the resulting column list */ + bool errorOnError; /* ERROR/EMPTY ON ERROR behavior */ +} JsonTableParentNode; + +/* + * JsonTableSiblingNode - + * transformed representation of joined sibling JSON_TABLE plan node + */ +typedef struct JsonTableSiblingNode +{ + NodeTag type; + Node *larg; /* left join node */ + Node *rarg; /* right join node */ + bool cross; /* cross or union join? */ +} JsonTableSiblingNode; + /* ---------------- * NullTest * diff --git a/src/include/parser/kwlist.h b/src/include/parser/kwlist.h index f50e45e886..acf9dfae17 100644 --- a/src/include/parser/kwlist.h +++ b/src/include/parser/kwlist.h @@ -27,6 +27,7 @@ /* name, value, category */ PG_KEYWORD("abort", ABORT_P, UNRESERVED_KEYWORD) +PG_KEYWORD("absent", ABSENT, UNRESERVED_KEYWORD) PG_KEYWORD("absolute", ABSOLUTE_P, UNRESERVED_KEYWORD) PG_KEYWORD("access", ACCESS, UNRESERVED_KEYWORD) PG_KEYWORD("action", ACTION, UNRESERVED_KEYWORD) @@ -88,6 +89,7 @@ PG_KEYWORD("comments", COMMENTS, UNRESERVED_KEYWORD) PG_KEYWORD("commit", COMMIT, UNRESERVED_KEYWORD) PG_KEYWORD("committed", COMMITTED, UNRESERVED_KEYWORD) PG_KEYWORD("concurrently", CONCURRENTLY, TYPE_FUNC_NAME_KEYWORD) +PG_KEYWORD("conditional", CONDITIONAL, UNRESERVED_KEYWORD) PG_KEYWORD("configuration", CONFIGURATION, UNRESERVED_KEYWORD) PG_KEYWORD("conflict", CONFLICT, UNRESERVED_KEYWORD) PG_KEYWORD("connection", CONNECTION, UNRESERVED_KEYWORD) @@ -141,11 +143,13 @@ PG_KEYWORD("double", DOUBLE_P, UNRESERVED_KEYWORD) PG_KEYWORD("drop", DROP, UNRESERVED_KEYWORD) PG_KEYWORD("each", EACH, UNRESERVED_KEYWORD) PG_KEYWORD("else", ELSE, RESERVED_KEYWORD) +PG_KEYWORD("empty", EMPTY_P, UNRESERVED_KEYWORD) PG_KEYWORD("enable", ENABLE_P, UNRESERVED_KEYWORD) PG_KEYWORD("encoding", ENCODING, UNRESERVED_KEYWORD) PG_KEYWORD("encrypted", ENCRYPTED, UNRESERVED_KEYWORD) PG_KEYWORD("end", END_P, RESERVED_KEYWORD) PG_KEYWORD("enum", ENUM_P, UNRESERVED_KEYWORD) +PG_KEYWORD("error", ERROR_P, UNRESERVED_KEYWORD) PG_KEYWORD("escape", ESCAPE, UNRESERVED_KEYWORD) PG_KEYWORD("event", EVENT, UNRESERVED_KEYWORD) PG_KEYWORD("except", EXCEPT, RESERVED_KEYWORD) @@ -168,6 +172,7 @@ PG_KEYWORD("following", FOLLOWING, UNRESERVED_KEYWORD) PG_KEYWORD("for", FOR, RESERVED_KEYWORD) PG_KEYWORD("force", FORCE, UNRESERVED_KEYWORD) PG_KEYWORD("foreign", FOREIGN, RESERVED_KEYWORD) +PG_KEYWORD("format", FORMAT, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("forward", FORWARD, UNRESERVED_KEYWORD) PG_KEYWORD("freeze", FREEZE, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("from", FROM, RESERVED_KEYWORD) @@ -218,7 +223,18 @@ PG_KEYWORD("is", IS, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("isnull", ISNULL, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("isolation", ISOLATION, UNRESERVED_KEYWORD) PG_KEYWORD("join", JOIN, TYPE_FUNC_NAME_KEYWORD) +PG_KEYWORD("json", JSON, UNRESERVED_KEYWORD) +PG_KEYWORD("json_array", JSON_ARRAY, COL_NAME_KEYWORD) +PG_KEYWORD("json_arrayagg", JSON_ARRAYAGG, COL_NAME_KEYWORD) +PG_KEYWORD("json_exists", JSON_EXISTS, COL_NAME_KEYWORD) +PG_KEYWORD("json_object", JSON_OBJECT, COL_NAME_KEYWORD) +PG_KEYWORD("json_objectagg", JSON_OBJECTAGG, COL_NAME_KEYWORD) +PG_KEYWORD("json_query", JSON_QUERY, COL_NAME_KEYWORD) +PG_KEYWORD("json_table", JSON_TABLE, COL_NAME_KEYWORD) +PG_KEYWORD("json_value", JSON_VALUE, COL_NAME_KEYWORD) +PG_KEYWORD("keep", KEEP, UNRESERVED_KEYWORD) PG_KEYWORD("key", KEY, UNRESERVED_KEYWORD) +PG_KEYWORD("keys", KEYS, UNRESERVED_KEYWORD) PG_KEYWORD("label", LABEL, UNRESERVED_KEYWORD) PG_KEYWORD("language", LANGUAGE, UNRESERVED_KEYWORD) PG_KEYWORD("large", LARGE_P, UNRESERVED_KEYWORD) @@ -255,6 +271,7 @@ PG_KEYWORD("names", NAMES, UNRESERVED_KEYWORD) PG_KEYWORD("national", NATIONAL, COL_NAME_KEYWORD) PG_KEYWORD("natural", NATURAL, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("nchar", NCHAR, COL_NAME_KEYWORD) +PG_KEYWORD("nested", NESTED, UNRESERVED_KEYWORD) PG_KEYWORD("new", NEW, UNRESERVED_KEYWORD) PG_KEYWORD("next", NEXT, UNRESERVED_KEYWORD) PG_KEYWORD("no", NO, UNRESERVED_KEYWORD) @@ -274,6 +291,7 @@ PG_KEYWORD("off", OFF, UNRESERVED_KEYWORD) PG_KEYWORD("offset", OFFSET, RESERVED_KEYWORD) PG_KEYWORD("oids", OIDS, UNRESERVED_KEYWORD) PG_KEYWORD("old", OLD, UNRESERVED_KEYWORD) +PG_KEYWORD("omit", OMIT, UNRESERVED_KEYWORD) PG_KEYWORD("on", ON, RESERVED_KEYWORD) PG_KEYWORD("only", ONLY, RESERVED_KEYWORD) PG_KEYWORD("operator", OPERATOR, UNRESERVED_KEYWORD) @@ -296,7 +314,9 @@ PG_KEYWORD("partial", PARTIAL, UNRESERVED_KEYWORD) PG_KEYWORD("partition", PARTITION, UNRESERVED_KEYWORD) PG_KEYWORD("passing", PASSING, UNRESERVED_KEYWORD) PG_KEYWORD("password", PASSWORD, UNRESERVED_KEYWORD) +PG_KEYWORD("path", PATH, UNRESERVED_KEYWORD) PG_KEYWORD("placing", PLACING, RESERVED_KEYWORD) +PG_KEYWORD("plan", PLAN, UNRESERVED_KEYWORD) PG_KEYWORD("plans", PLANS, UNRESERVED_KEYWORD) PG_KEYWORD("policy", POLICY, UNRESERVED_KEYWORD) PG_KEYWORD("position", POSITION, COL_NAME_KEYWORD) @@ -313,6 +333,7 @@ PG_KEYWORD("procedure", PROCEDURE, UNRESERVED_KEYWORD) PG_KEYWORD("program", PROGRAM, UNRESERVED_KEYWORD) PG_KEYWORD("publication", PUBLICATION, UNRESERVED_KEYWORD) PG_KEYWORD("quote", QUOTE, UNRESERVED_KEYWORD) +PG_KEYWORD("quotes", QUOTES, UNRESERVED_KEYWORD) PG_KEYWORD("range", RANGE, UNRESERVED_KEYWORD) PG_KEYWORD("read", READ, UNRESERVED_KEYWORD) PG_KEYWORD("real", REAL, COL_NAME_KEYWORD) @@ -344,6 +365,7 @@ PG_KEYWORD("row", ROW, COL_NAME_KEYWORD) PG_KEYWORD("rows", ROWS, UNRESERVED_KEYWORD) PG_KEYWORD("rule", RULE, UNRESERVED_KEYWORD) PG_KEYWORD("savepoint", SAVEPOINT, UNRESERVED_KEYWORD) +PG_KEYWORD("scalar", SCALAR, UNRESERVED_KEYWORD) PG_KEYWORD("schema", SCHEMA, UNRESERVED_KEYWORD) PG_KEYWORD("schemas", SCHEMAS, UNRESERVED_KEYWORD) PG_KEYWORD("scroll", SCROLL, UNRESERVED_KEYWORD) @@ -378,6 +400,7 @@ PG_KEYWORD("stdin", STDIN, UNRESERVED_KEYWORD) PG_KEYWORD("stdout", STDOUT, UNRESERVED_KEYWORD) PG_KEYWORD("storage", STORAGE, UNRESERVED_KEYWORD) PG_KEYWORD("strict", STRICT_P, UNRESERVED_KEYWORD) +PG_KEYWORD("string", STRING, COL_NAME_KEYWORD) PG_KEYWORD("strip", STRIP_P, UNRESERVED_KEYWORD) PG_KEYWORD("subscription", SUBSCRIPTION, UNRESERVED_KEYWORD) PG_KEYWORD("substring", SUBSTRING, COL_NAME_KEYWORD) @@ -409,6 +432,7 @@ PG_KEYWORD("type", TYPE_P, UNRESERVED_KEYWORD) PG_KEYWORD("types", TYPES_P, UNRESERVED_KEYWORD) PG_KEYWORD("unbounded", UNBOUNDED, UNRESERVED_KEYWORD) PG_KEYWORD("uncommitted", UNCOMMITTED, UNRESERVED_KEYWORD) +PG_KEYWORD("unconditional", UNCONDITIONAL, UNRESERVED_KEYWORD) PG_KEYWORD("unencrypted", UNENCRYPTED, UNRESERVED_KEYWORD) PG_KEYWORD("union", UNION, RESERVED_KEYWORD) PG_KEYWORD("unique", UNIQUE, RESERVED_KEYWORD) diff --git a/src/include/parser/parse_expr.h b/src/include/parser/parse_expr.h index 3af09b0056..2e128cfeba 100644 --- a/src/include/parser/parse_expr.h +++ b/src/include/parser/parse_expr.h @@ -23,4 +23,8 @@ extern Node *transformExpr(ParseState *pstate, Node *expr, ParseExprKind exprKin extern const char *ParseExprKindName(ParseExprKind exprKind); +extern Node *coerceJsonExpr(ParseState *pstate, Node *expr, + JsonReturning *returning, + bool *coerce_via_io, bool *coerce_via_populate); + #endif /* PARSE_EXPR_H */ diff --git a/src/include/regex/regex.h b/src/include/regex/regex.h index 27fdc09040..8a2bf0353a 100644 --- a/src/include/regex/regex.h +++ b/src/include/regex/regex.h @@ -173,4 +173,8 @@ extern int pg_regprefix(regex_t *, pg_wchar **, size_t *); extern void pg_regfree(regex_t *); extern size_t pg_regerror(int, const regex_t *, char *, size_t); +extern bool RE_compile_and_execute(text *text_re, char *dat, int dat_len, + int cflags, Oid collation, + int nmatch, regmatch_t *pmatch); + #endif /* _REGEX_H_ */ diff --git a/src/include/utils/date.h b/src/include/utils/date.h index 0736a72946..4c8f4f835d 100644 --- a/src/include/utils/date.h +++ b/src/include/utils/date.h @@ -17,6 +17,7 @@ #include #include "fmgr.h" +#include "utils/timestamp.h" typedef int32 DateADT; @@ -73,5 +74,8 @@ extern void EncodeSpecialDate(DateADT dt, char *str); extern DateADT GetSQLCurrentDate(void); extern TimeTzADT *GetSQLCurrentTime(int32 typmod); extern TimeADT GetSQLLocalTime(int32 typmod); +extern int tm2time(struct pg_tm *tm, fsec_t fsec, TimeADT *result); +extern int tm2timetz(struct pg_tm *tm, fsec_t fsec, int tz, TimeTzADT *result); +extern void AdjustTimeForTypmod(TimeADT *time, int32 typmod); #endif /* DATE_H */ diff --git a/src/include/utils/datetime.h b/src/include/utils/datetime.h index 7968569fda..57b263b2bb 100644 --- a/src/include/utils/datetime.h +++ b/src/include/utils/datetime.h @@ -338,4 +338,6 @@ extern TimeZoneAbbrevTable *ConvertTimeZoneAbbrevs(struct tzEntry *abbrevs, int n); extern void InstallTimeZoneAbbrevs(TimeZoneAbbrevTable *tbl); +extern void AdjustTimestampForTypmod(Timestamp *time, int32 typmod); + #endif /* DATETIME_H */ diff --git a/src/include/utils/formatting.h b/src/include/utils/formatting.h index 8eaf2c3052..119df00c50 100644 --- a/src/include/utils/formatting.h +++ b/src/include/utils/formatting.h @@ -28,4 +28,7 @@ extern char *asc_tolower(const char *buff, size_t nbytes); extern char *asc_toupper(const char *buff, size_t nbytes); extern char *asc_initcap(const char *buff, size_t nbytes); +extern Datum to_datetime(text *date_txt, const char *fmt, int fmt_len, + bool strict, Oid *typid, int32 *typmod); + #endif diff --git a/src/include/utils/jsonapi.h b/src/include/utils/jsonapi.h index 4336823de2..e86604f45a 100644 --- a/src/include/utils/jsonapi.h +++ b/src/include/utils/jsonapi.h @@ -15,6 +15,7 @@ #define JSONAPI_H #include "jsonb.h" +#include "access/htup.h" #include "lib/stringinfo.h" typedef enum @@ -93,6 +94,48 @@ typedef struct JsonSemAction json_scalar_action scalar; } JsonSemAction; +typedef enum +{ + JTI_ARRAY_START, + JTI_ARRAY_ELEM, + JTI_ARRAY_ELEM_SCALAR, + JTI_ARRAY_ELEM_AFTER, + JTI_ARRAY_END, + JTI_OBJECT_START, + JTI_OBJECT_KEY, + JTI_OBJECT_VALUE, + JTI_OBJECT_VALUE_AFTER, +} JsontIterState; + +typedef struct JsonContainerData +{ + uint32 header; + int len; + char *data; +} JsonContainerData; + +typedef const JsonContainerData JsonContainer; + +typedef struct Json +{ + JsonContainer root; +} Json; + +typedef struct JsonIterator +{ + struct JsonIterator *parent; + JsonContainer *container; + JsonLexContext *lex; + JsontIterState state; + bool isScalar; +} JsonIterator; + +#define DatumGetJsonP(datum) JsonCreate(DatumGetTextP(datum)) +#define DatumGetJsonPCopy(datum) JsonCreate(DatumGetTextPCopy(datum)) + +#define JsonPGetDatum(json) \ + PointerGetDatum(cstring_to_text_with_len((json)->root.data, (json)->root.len)) + /* * parse_json will parse the string in the lex calling the * action functions in sem at the appropriate points. It is @@ -147,4 +190,26 @@ extern Jsonb *transform_jsonb_string_values(Jsonb *jsonb, void *action_state, extern text *transform_json_string_values(text *json, void *action_state, JsonTransformStringValuesAction transform_action); +extern Datum json_populate_type(Datum json_val, Oid json_type, + Oid typid, int32 typmod, + void **cache, MemoryContext mcxt, bool *isnull); + +extern Json *JsonCreate(text *json); +extern JsonbIteratorToken JsonIteratorNext(JsonIterator **pit, JsonbValue *val, + bool skipNested); +extern JsonIterator *JsonIteratorInit(JsonContainer *jc); +extern void JsonIteratorFree(JsonIterator *it); +extern uint32 JsonGetArraySize(JsonContainer *jc); +extern Json *JsonbValueToJson(JsonbValue *jbv); +extern JsonbValue *JsonExtractScalar(JsonContainer *jbc, JsonbValue *res); +extern char *JsonUnquote(Json *jb); +extern char *JsonToCString(StringInfo out, JsonContainer *jc, + int estimated_len); +extern JsonbValue *pushJsonValue(JsonbParseState **pstate, + JsonbIteratorToken tok, JsonbValue *jbv); +extern JsonbValue *findJsonValueFromContainer(JsonContainer *jc, uint32 flags, + JsonbValue *key); +extern JsonbValue *getIthJsonValueFromContainer(JsonContainer *array, + uint32 index); + #endif /* JSONAPI_H */ diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h index d639bbc960..cb89366d7a 100644 --- a/src/include/utils/jsonb.h +++ b/src/include/utils/jsonb.h @@ -34,6 +34,9 @@ typedef enum #define JsonbExistsStrategyNumber 9 #define JsonbExistsAnyStrategyNumber 10 #define JsonbExistsAllStrategyNumber 11 +#define JsonbJsonpathExistsStrategyNumber 15 +#define JsonbJsonpathPredicateStrategyNumber 16 + /* * In the standard jsonb_ops GIN opclass for jsonb, we choose to index both @@ -66,8 +69,10 @@ typedef enum /* Convenience macros */ #define DatumGetJsonbP(d) ((Jsonb *) PG_DETOAST_DATUM(d)) +#define DatumGetJsonbPCopy(d) ((Jsonb *) PG_DETOAST_DATUM_COPY(d)) #define JsonbPGetDatum(p) PointerGetDatum(p) #define PG_GETARG_JSONB_P(x) DatumGetJsonbP(PG_GETARG_DATUM(x)) +#define PG_GETARG_JSONB_P_COPY(x) DatumGetJsonbPCopy(PG_GETARG_DATUM(x)) #define PG_RETURN_JSONB_P(x) PG_RETURN_POINTER(x) typedef struct JsonbPair JsonbPair; @@ -219,10 +224,10 @@ typedef struct } Jsonb; /* convenience macros for accessing the root container in a Jsonb datum */ -#define JB_ROOT_COUNT(jbp_) (*(uint32 *) VARDATA(jbp_) & JB_CMASK) -#define JB_ROOT_IS_SCALAR(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FSCALAR) != 0) -#define JB_ROOT_IS_OBJECT(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FOBJECT) != 0) -#define JB_ROOT_IS_ARRAY(jbp_) ((*(uint32 *) VARDATA(jbp_) & JB_FARRAY) != 0) +#define JB_ROOT_COUNT(jbp_) JsonContainerSize(&(jbp_)->root) +#define JB_ROOT_IS_SCALAR(jbp_) JsonContainerIsScalar(&(jbp_)->root) +#define JB_ROOT_IS_OBJECT(jbp_) JsonContainerIsObject(&(jbp_)->root) +#define JB_ROOT_IS_ARRAY(jbp_) JsonContainerIsArray(&(jbp_)->root) enum jbvType @@ -236,7 +241,9 @@ enum jbvType jbvArray = 0x10, jbvObject, /* Binary (i.e. struct Jsonb) jbvArray/jbvObject */ - jbvBinary + jbvBinary, + /* Virtual types */ + jbvDatetime = 0x20, }; /* @@ -269,6 +276,8 @@ struct JsonbValue struct { int nPairs; /* 1 pair, 2 elements */ + bool uniquified; /* Should we sort pairs by key name and + * remove duplicate keys? */ JsonbPair *pairs; } object; /* Associative container type */ @@ -277,11 +286,19 @@ struct JsonbValue int len; JsonbContainer *data; } binary; /* Array or object, in on-disk format */ + + struct + { + Datum value; + Oid typid; + int32 typmod; + } datetime; } val; }; -#define IsAJsonbScalar(jsonbval) ((jsonbval)->type >= jbvNull && \ - (jsonbval)->type <= jbvBool) +#define IsAJsonbScalar(jsonbval) (((jsonbval)->type >= jbvNull && \ + (jsonbval)->type <= jbvBool) || \ + (jsonbval)->type == jbvDatetime) /* * Key/value pair within an Object. @@ -355,6 +372,8 @@ typedef struct JsonbIterator /* Support functions */ extern uint32 getJsonbOffset(const JsonbContainer *jc, int index); extern uint32 getJsonbLength(const JsonbContainer *jc, int index); +extern int lengthCompareJsonbStringValue(const void *a, const void *b); +extern bool equalsJsonbScalarValue(JsonbValue *a, JsonbValue *b); extern int compareJsonbContainers(JsonbContainer *a, JsonbContainer *b); extern JsonbValue *findJsonbValueFromContainer(JsonbContainer *sheader, uint32 flags, @@ -363,6 +382,8 @@ extern JsonbValue *getIthJsonbValueFromContainer(JsonbContainer *sheader, uint32 i); extern JsonbValue *pushJsonbValue(JsonbParseState **pstate, JsonbIteratorToken seq, JsonbValue *jbVal); +extern JsonbValue *pushJsonbValueScalar(JsonbParseState **pstate, + JsonbIteratorToken seq,JsonbValue *scalarVal); extern JsonbIterator *JsonbIteratorInit(JsonbContainer *container); extern JsonbIteratorToken JsonbIteratorNext(JsonbIterator **it, JsonbValue *val, bool skipNested); @@ -379,5 +400,9 @@ extern char *JsonbToCString(StringInfo out, JsonbContainer *in, extern char *JsonbToCStringIndent(StringInfo out, JsonbContainer *in, int estimated_len); +extern Jsonb *JsonbMakeEmptyArray(void); +extern Jsonb *JsonbMakeEmptyObject(void); +extern char *JsonbUnquote(Jsonb *jb); +extern JsonbValue *JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res); #endif /* __JSONB_H__ */ diff --git a/src/include/utils/jsonpath.h b/src/include/utils/jsonpath.h new file mode 100644 index 0000000000..c2710d6dbc --- /dev/null +++ b/src/include/utils/jsonpath.h @@ -0,0 +1,335 @@ +/*------------------------------------------------------------------------- + * + * jsonpath.h + * Definitions of jsonpath datatype + * + * Copyright (c) 2017, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/include/utils/jsonpath.h + * + *------------------------------------------------------------------------- + */ + +#ifndef JSONPATH_H +#define JSONPATH_H + +#include "fmgr.h" +#include "executor/tablefunc.h" +#include "nodes/pg_list.h" +#include "nodes/primnodes.h" +#include "utils/jsonb.h" + +typedef struct +{ + int32 vl_len_;/* varlena header (do not touch directly!) */ + uint32 header; /* just version, other bits are reservedfor future use */ + char data[FLEXIBLE_ARRAY_MEMBER]; +} JsonPath; + +#define JSONPATH_VERSION (0x01) +#define JSONPATH_LAX (0x80000000) +#define JSONPATH_HDRSZ (offsetof(JsonPath, data)) + +#define DatumGetJsonPathP(d) ((JsonPath *) DatumGetPointer(PG_DETOAST_DATUM(d))) +#define DatumGetJsonPathPCopy(d) ((JsonPath *) DatumGetPointer(PG_DETOAST_DATUM_COPY(d))) +#define PG_GETARG_JSONPATH_P(x) DatumGetJsonPathP(PG_GETARG_DATUM(x)) +#define PG_GETARG_JSONPATH_P_COPY(x) DatumGetJsonPathPCopy(PG_GETARG_DATUM(x)) +#define PG_RETURN_JSONPATH_P(p) PG_RETURN_POINTER(p) + +#define jspIsScalar(type) ((type) >= jpiNull && (type) <= jpiBool) + +/* + * All node's type of jsonpath expression + */ +typedef enum JsonPathItemType { + jpiNull = jbvNull, + jpiString = jbvString, + jpiNumeric = jbvNumeric, + jpiBool = jbvBool, + jpiAnd, + jpiOr, + jpiNot, + jpiIsUnknown, + jpiEqual, + jpiNotEqual, + jpiLess, + jpiGreater, + jpiLessOrEqual, + jpiGreaterOrEqual, + jpiAdd, + jpiSub, + jpiMul, + jpiDiv, + jpiMod, + jpiPlus, + jpiMinus, + jpiAnyArray, + jpiAnyKey, + jpiIndexArray, + jpiAny, + jpiKey, + jpiCurrent, + jpiRoot, + jpiVariable, + jpiFilter, + jpiExists, + jpiType, + jpiSize, + jpiAbs, + jpiFloor, + jpiCeiling, + jpiDouble, + jpiDatetime, + jpiKeyValue, + jpiSubscript, + jpiLast, + jpiStartsWith, + jpiLikeRegex, + jpiMap, + jpiSequence, + jpiArray, + jpiObject, + jpiObjectField, + jpiReduce, + jpiFold, + jpiFoldl, + jpiFoldr, + jpiMin, + jpiMax, +} JsonPathItemType; + +/* XQuery regex mode flags for LIKE_REGEX predicate */ +#define JSP_REGEX_ICASE 0x01 /* i flag, case insensitive */ +#define JSP_REGEX_SLINE 0x02 /* s flag, single-line mode */ +#define JSP_REGEX_MLINE 0x04 /* m flag, multi-line mode */ +#define JSP_REGEX_WSPACE 0x08 /* x flag, expanded syntax */ + +/* + * Support functions to parse/construct binary value. + * Unlike many other representation of expression the first/main + * node is not an operation but left operand of expression. That + * allows to implement cheep follow-path descending in jsonb + * structure and then execute operator with right operand + */ + +typedef struct JsonPathItem { + JsonPathItemType type; + + /* position form base to next node */ + int32 nextPos; + + /* + * pointer into JsonPath value to current node, all + * positions of current are relative to this base + */ + char *base; + + union { + /* classic operator with two operands: and, or etc */ + struct { + int32 left; + int32 right; + } args; + + /* any unary operation */ + int32 arg; + + /* storage for jpiIndexArray: indexes of array */ + struct { + int32 nelems; + struct { + int32 from; + int32 to; + } *elems; + } array; + + /* jpiAny: levels */ + struct { + uint32 first; + uint32 last; + } anybounds; + + struct { + int32 nelems; + int32 *elems; + } sequence; + + struct { + int32 nfields; + struct { + int32 key; + int32 val; + } *fields; + } object; + + struct { + char *data; /* for bool, numeric and string/key */ + int32 datalen; /* filled only for string/key */ + } value; + + struct { + int32 expr; + char *pattern; + int32 patternlen; + uint32 flags; + } like_regex; + } content; +} JsonPathItem; + +#define jspHasNext(jsp) ((jsp)->nextPos > 0) + +extern void jspInit(JsonPathItem *v, JsonPath *js); +extern void jspInitByBuffer(JsonPathItem *v, char *base, int32 pos); +extern bool jspGetNext(JsonPathItem *v, JsonPathItem *a); +extern void jspGetArg(JsonPathItem *v, JsonPathItem *a); +extern void jspGetLeftArg(JsonPathItem *v, JsonPathItem *a); +extern void jspGetRightArg(JsonPathItem *v, JsonPathItem *a); +extern Numeric jspGetNumeric(JsonPathItem *v); +extern bool jspGetBool(JsonPathItem *v); +extern char * jspGetString(JsonPathItem *v, int32 *len); +extern bool jspGetArraySubscript(JsonPathItem *v, JsonPathItem *from, + JsonPathItem *to, int i); +extern void jspGetSequenceElement(JsonPathItem *v, int i, JsonPathItem *elem); +extern void jspGetObjectField(JsonPathItem *v, int i, + JsonPathItem *key, JsonPathItem *val); + +/* + * Parsing + */ + +typedef struct JsonPathParseItem JsonPathParseItem; + +struct JsonPathParseItem { + JsonPathItemType type; + JsonPathParseItem *next; /* next in path */ + + union { + + /* classic operator with two operands: and, or etc */ + struct { + JsonPathParseItem *left; + JsonPathParseItem *right; + } args; + + /* any unary operation */ + JsonPathParseItem *arg; + + /* storage for jpiIndexArray: indexes of array */ + struct { + int nelems; + struct + { + JsonPathParseItem *from; + JsonPathParseItem *to; + } *elems; + } array; + + /* jpiAny: levels */ + struct { + uint32 first; + uint32 last; + } anybounds; + + struct { + JsonPathParseItem *expr; + char *pattern; /* could not be not null-terminated */ + uint32 patternlen; + uint32 flags; + } like_regex; + + struct { + List *elems; + } sequence; + + struct { + List *fields; + } object; + + /* scalars */ + Numeric numeric; + bool boolean; + struct { + uint32 len; + char *val; /* could not be not null-terminated */ + } string; + } value; +}; + +typedef struct JsonPathParseResult +{ + JsonPathParseItem *expr; + bool lax; +} JsonPathParseResult; + +extern JsonPathParseResult* parsejsonpath(const char *str, int len); + +/* + * Evaluation of jsonpath + */ + +typedef enum JsonPathExecStatus +{ + jperOk = 0, + jperError, + jperFatalError, + jperNotFound +} JsonPathExecStatus; + +typedef uint64 JsonPathExecResult; + +#define jperStatus(jper) ((JsonPathExecStatus)(uint32)(jper)) +#define jperIsError(jper) (jperStatus(jper) == jperError) +#define jperGetError(jper) ((uint32)((jper) >> 32)) +#define jperMakeError(err) (((uint64)(err) << 32) | jperError) + +typedef Datum (*JsonPathVariable_cb)(void *, bool *); + +typedef struct JsonPathVariable { + text *varName; + Oid typid; + int32 typmod; + JsonPathVariable_cb cb; + void *cb_arg; +} JsonPathVariable; + +typedef struct JsonPathVariableEvalContext +{ + JsonPathVariable var; + struct ExprContext *econtext; + struct ExprState *estate; + MemoryContext mcxt; /* memory context for cached value */ + Datum value; + bool isnull; + bool evaluated; +} JsonPathVariableEvalContext; + +typedef struct JsonValueList +{ + JsonbValue *singleton; + List *list; +} JsonValueList; + +JsonPathExecResult executeJsonPath(JsonPath *path, + List *vars, /* list of JsonPathVariable */ + Jsonb *json, + JsonValueList *foundJson); + +extern bool JsonbPathExists(Datum jb, JsonPath *path, List *vars); +extern Datum JsonbPathQuery(Datum jb, JsonPath *jp, JsonWrapper wrapper, + bool *empty, List *vars); +extern JsonbValue *JsonbPathValue(Datum jb, JsonPath *jp, bool *empty, + List *vars); + +extern bool JsonPathExists(Datum json, JsonPath *path, List *vars); +extern JsonbValue *JsonPathValue(Datum json, JsonPath *jp, bool *empty, + List *vars); +extern Datum JsonPathQuery(Datum json, JsonPath *jp, JsonWrapper wrapper, + bool *empty, List *vars); + +extern Datum EvalJsonPathVar(void *cxt, bool *isnull); + +extern const TableFuncRoutine JsonTableRoutine; +extern const TableFuncRoutine JsonbTableRoutine; + +#endif diff --git a/src/include/utils/jsonpath_scanner.h b/src/include/utils/jsonpath_scanner.h new file mode 100644 index 0000000000..1c8447f6bf --- /dev/null +++ b/src/include/utils/jsonpath_scanner.h @@ -0,0 +1,30 @@ +/*------------------------------------------------------------------------- + * + * jsonpath_scanner.h + * jsonpath scanner & parser support + * + * Portions Copyright (c) 1996-2017, PostgreSQL Global Development Group + * + * src/include/utils/jsonpath_scanner.h + * + *------------------------------------------------------------------------- + */ + +#ifndef JSONPATH_SCANNER_H +#define JSONPATH_SCANNER_H + +/* struct string is shared between scan and gram */ +typedef struct string { + char *val; + int len; + int total; +} string; + +#include "utils/jsonpath.h" +#include "utils/jsonpath_gram.h" + +/* flex 2.5.4 doesn't bother with a decl for this */ +extern int jsonpath_yylex(YYSTYPE * yylval_param); +extern void jsonpath_yyerror(JsonPathParseResult **result, const char *message); + +#endif diff --git a/src/interfaces/ecpg/preproc/parse.pl b/src/interfaces/ecpg/preproc/parse.pl index 768df3a6b1..6af70b8cef 100644 --- a/src/interfaces/ecpg/preproc/parse.pl +++ b/src/interfaces/ecpg/preproc/parse.pl @@ -45,6 +45,8 @@ 'NOT_LA' => 'not', 'NULLS_LA' => 'nulls', 'WITH_LA' => 'with', + 'WITH_LA_UNIQUE' => 'with', + 'WITHOUT_LA' => 'without', 'TYPECAST' => '::', 'DOT_DOT' => '..', 'COLON_EQUALS' => ':=', diff --git a/src/interfaces/ecpg/preproc/parser.c b/src/interfaces/ecpg/preproc/parser.c index 0c2705cd2b..3455d7e02a 100644 --- a/src/interfaces/ecpg/preproc/parser.c +++ b/src/interfaces/ecpg/preproc/parser.c @@ -84,6 +84,9 @@ filtered_base_yylex(void) case WITH: cur_token_length = 4; break; + case WITHOUT: + cur_token_length = 7; + break; default: return cur_token; } @@ -155,8 +158,22 @@ filtered_base_yylex(void) case ORDINALITY: cur_token = WITH_LA; break; + case UNIQUE: + cur_token = WITH_LA_UNIQUE; + break; + } + break; + + case WITHOUT: + /* Replace WITHOUT by WITHOUT_LA if it's followed by TIME */ + switch (next_token) + { + case TIME: + cur_token = WITHOUT_LA; + break; } break; + } return cur_token; diff --git a/src/test/regress/expected/horology.out b/src/test/regress/expected/horology.out index 7b3d058425..63e39198e6 100644 --- a/src/test/regress/expected/horology.out +++ b/src/test/regress/expected/horology.out @@ -2930,6 +2930,36 @@ SELECT to_timestamp('2011-12-18 11:38 PM', 'YYYY-MM-DD HH12:MI PM'); Sun Dec 18 23:38:00 2011 PST (1 row) +SELECT to_timestamp('2011-12-18 11:38 +05', 'YYYY-MM-DD HH12:MI TZH'); + to_timestamp +------------------------------ + Sat Dec 17 22:38:00 2011 PST +(1 row) + +SELECT to_timestamp('2011-12-18 11:38 -05', 'YYYY-MM-DD HH12:MI TZH'); + to_timestamp +------------------------------ + Sun Dec 18 08:38:00 2011 PST +(1 row) + +SELECT to_timestamp('2011-12-18 11:38 +05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); + to_timestamp +------------------------------ + Sat Dec 17 22:18:00 2011 PST +(1 row) + +SELECT to_timestamp('2011-12-18 11:38 -05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); + to_timestamp +------------------------------ + Sun Dec 18 08:58:00 2011 PST +(1 row) + +SELECT to_timestamp('2011-12-18 11:38 20', 'YYYY-MM-DD HH12:MI TZM'); + to_timestamp +------------------------------ + Sun Dec 18 03:18:00 2011 PST +(1 row) + -- -- Check handling of multiple spaces in format and/or input -- diff --git a/src/test/regress/expected/json_jsonpath.out b/src/test/regress/expected/json_jsonpath.out new file mode 100644 index 0000000000..47e03ac775 --- /dev/null +++ b/src/test/regress/expected/json_jsonpath.out @@ -0,0 +1,2015 @@ +select json '{"a": 12}' @? '$.a.b'; + ?column? +---------- + f +(1 row) + +select json '{"a": 12}' @? '$.b'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"a": 12}}' @? '$.a.a'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select json '{"b": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select json '{}' @? '$.*'; + ?column? +---------- + f +(1 row) + +select json '{"a": 1}' @? '$.*'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{1}'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{2}'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? 'lax $.**{3}'; + ?column? +---------- + f +(1 row) + +select json '[]' @? '$.[*]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? '$.[*]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$.[1]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? 'strict $.[1]'; +ERROR: Invalid SQL/JSON subscript +select json '[1]' @? '$.[0]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$.[0.3]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$.[0.5]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$.[0.9]'; + ?column? +---------- + t +(1 row) + +select json '[1]' @? '$.[1.2]'; + ?column? +---------- + f +(1 row) + +select json '[1]' @? 'strict $.[1.2]'; +ERROR: Invalid SQL/JSON subscript +select json '{}' @? 'strict $.[0.3]'; +ERROR: Invalid SQL/JSON subscript +select json '{}' @? 'lax $.[0.3]'; + ?column? +---------- + t +(1 row) + +select json '{}' @? 'strict $.[1.2]'; +ERROR: Invalid SQL/JSON subscript +select json '{}' @? 'lax $.[1.2]'; + ?column? +---------- + f +(1 row) + +select json '{}' @? 'strict $.[-2 to 3]'; +ERROR: Invalid SQL/JSON subscript +select json '{}' @? 'lax $.[-2 to 3]'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; + ?column? +---------- + f +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + f +(1 row) + +select json '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select json '1' @? '$ ? ((@ == "1") is unknown)'; + ?column? +---------- + t +(1 row) + +select json '1' @? '$ ? ((@ == 1) is unknown)'; + ?column? +---------- + f +(1 row) + +select json '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.a'; + ?column? +---------- + 12 +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.b'; + ?column? +----------- + {"a": 13} +(1 row) + +select json '{"a": 12, "b": {"a": 13}}' @* '$.*'; + ?column? +----------- + 12 + {"a": 13} +(2 rows) + +select json '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].*'; + ?column? +---------- + 13 + 14 +(2 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0].a'; + ?column? +---------- +(0 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[1].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[2].a'; + ?column? +---------- +(0 rows) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0,1].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0 to 10].a'; + ?column? +---------- + 13 +(1 row) + +select json '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$.[2.5 - 1 to @.size() - 2]'; + ?column? +----------- + {"a": 13} + {"b": 14} + "ccc" +(3 rows) + +select json '1' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select json '1' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select json '{}' @* 'lax $[0]'; + ?column? +---------- + {} +(1 row) + +select json '[1]' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select json '[1]' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select json '[1,2,3]' @* 'lax $[*]'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select json '[]' @* '$[last]'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $[last]'; +ERROR: Invalid SQL/JSON subscript +select json '[1]' @* '$[last]'; + ?column? +---------- + 1 +(1 row) + +select json '{}' @* 'lax $[last]'; + ?column? +---------- + {} +(1 row) + +select json '[1,2,3]' @* '$[last]'; + ?column? +---------- + 3 +(1 row) + +select json '[1,2,3]' @* '$[last - 1]'; + ?column? +---------- + 2 +(1 row) + +select json '[1,2,3]' @* '$[last ? (@.type() == "number")]'; + ?column? +---------- + 3 +(1 row) + +select json '[1,2,3]' @* '$[last ? (@.type() == "string")]'; +ERROR: Invalid SQL/JSON subscript +select * from jsonpath_query(json '{"a": 10}', '$'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)'); +ERROR: could not find 'value' passed variable +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); + jsonpath_query +---------------- +(0 rows) + +select * from jsonpath_query(json '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 +(1 row) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[*] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[0,1] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 +(2 rows) + +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[0 to 2] ? (@ < $value)', '{"value" : 15}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$.[*] ? (@ == "1")'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$.[*] ? (@ == $value)', '{"value" : "1"}'); + jsonpath_query +---------------- + "1" +(1 row) + +select json '[1, "2", null]' @* '$[*] ? (@ != null)'; + ?column? +---------- + 1 + "2" +(2 rows) + +select json '[1, "2", null]' @* '$[*] ? (@ == null)'; + ?column? +---------- + null +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1}'; + ?column? +---------- + {"b": 1} +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1,}'; + ?column? +---------- + {"b": 1} + 1 +(2 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{2}'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{2,}'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{3,}'; + ?column? +---------- +(0 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{0,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2,3}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{0,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"b": 1}}' @? '$.**{1,2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1,2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"a": {"c": {"b": 1}}}' @? '$.**{2,3}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; + ?column? +---------- + {"x": 2} +(1 row) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; + ?column? +---------- +(0 rows) + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + ?column? +---------- + {"x": 2} +(1 row) + +--test ternary logic +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + x | y | x && y +--------+--------+-------- + true | true | true + true | false | false + true | "null" | null + false | true | false + false | false | false + false | "null" | false + "null" | true | null + "null" | false | false + "null" | "null" | null +(9 rows) + +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + x | y | x || y +--------+--------+-------- + true | true | true + true | false | true + true | "null" | true + false | true | true + false | false | false + false | "null" | null + "null" | true | true + "null" | false | null + "null" | "null" | null +(9 rows) + +select json '{"a": 1, "b": 1}' @? '$ ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$ ? (.a == .b)'; + ?column? +---------- + f +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? ($.c.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.* ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"a": 1, "b": 1}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 1, "b": 1}}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == 1 + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (1 + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == .b + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (.b + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - 1)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -1)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -.b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 2, "b": 1}}' @? '$.** ? (.a == 1 - - .b)'; + ?column? +---------- + t +(1 row) + +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - +.b)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (+@[*] > +2)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (+@[*] > +3)'; + ?column? +---------- + f +(1 row) + +select json '[1,2,3]' @? '$ ? (-@[*] < -2)'; + ?column? +---------- + t +(1 row) + +select json '[1,2,3]' @? '$ ? (-@[*] < -3)'; + ?column? +---------- + f +(1 row) + +select json '1' @? '$ ? ($ > 0)'; + ?column? +---------- + t +(1 row) + +-- unwrapping of operator arguments in lax mode +select json '{"a": [2]}' @* 'lax $.a * 3'; + ?column? +---------- + 6 +(1 row) + +select json '{"a": [2]}' @* 'lax $.a + 3'; + ?column? +---------- + 5 +(1 row) + +select json '{"a": [2, 3, 4]}' @* 'lax -$.a'; + ?column? +---------- + -2 + -3 + -4 +(3 rows) + +-- should fail +select json '{"a": [1, 2]}' @* 'lax $.a * 3'; +ERROR: Singleton SQL/JSON item required +-- extension: boolean expressions +select json '2' @* '$ > 1'; + ?column? +---------- + true +(1 row) + +select json '2' @* '$ <= 1'; + ?column? +---------- + false +(1 row) + +select json '2' @* '$ == "2"'; + ?column? +---------- + null +(1 row) + +select json '2' @~ '$ > 1'; + ?column? +---------- + t +(1 row) + +select json '2' @~ '$ <= 1'; + ?column? +---------- + f +(1 row) + +select json '2' @~ '$ == "2"'; + ?column? +---------- + +(1 row) + +select json '2' @~ '1'; + ?column? +---------- + +(1 row) + +select json '{}' @~ '$'; + ?column? +---------- + +(1 row) + +select json '[]' @~ '$'; + ?column? +---------- + +(1 row) + +select json '[1,2,3]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select json '[]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); + jsonpath_predicate +-------------------- + f +(1 row) + +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + jsonpath_predicate +-------------------- + t +(1 row) + +select json '[null,1,true,"a",[],{}]' @* '$.type()'; + ?column? +---------- + "array" +(1 row) + +select json '[null,1,true,"a",[],{}]' @* 'lax $.type()'; + ?column? +---------- + "array" +(1 row) + +select json '[null,1,true,"a",[],{}]' @* '$[*].type()'; + ?column? +----------- + "null" + "number" + "boolean" + "string" + "array" + "object" +(6 rows) + +select json 'null' @* 'null.type()'; + ?column? +---------- + "null" +(1 row) + +select json 'null' @* 'true.type()'; + ?column? +----------- + "boolean" +(1 row) + +select json 'null' @* '123.type()'; + ?column? +---------- + "number" +(1 row) + +select json 'null' @* '"123".type()'; + ?column? +---------- + "string" +(1 row) + +select json 'null' @* 'aaa.type()'; + ?column? +---------- + "string" +(1 row) + +select json '{"a": 2}' @* '($.a - 5).abs() + 10'; + ?column? +---------- + 13 +(1 row) + +select json '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; + ?column? +---------- + 4 +(1 row) + +select json '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; + ?column? +---------- + true +(1 row) + +select json '[1, 2, 3]' @* '($[*] > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select json '[1, 2, 3]' @* '($[*].a > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select json '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + ?column? +---------- + "null" +(1 row) + +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +ERROR: SQL/JSON array not found +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + ?column? +---------- + 1 + 1 + 1 + 1 + 0 + 1 + 3 + 1 + 1 +(9 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; + ?column? +---------- + 0 + 1 + 2 + 3.4 + 5.6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; + ?column? +---------- + 0 + 1 + -2 + -4 + 5 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; + ?column? +---------- + 0 + 1 + -2 + -3 + 6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; + ?column? +---------- + 0 + 1 + 2 + 3 + 6 +(5 rows) + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + ?column? +---------- + "number" + "number" + "number" + "number" + "number" +(5 rows) + +select json '[{},1]' @* '$[*].keyvalue()'; +ERROR: SQL/JSON object not found +select json '{}' @* '$.keyvalue()'; + ?column? +---------- +(0 rows) + +select json '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +ERROR: SQL/JSON object not found +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select json 'null' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json 'true' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '[]' @* '$.double()'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $.double()'; +ERROR: Non-numeric SQL/JSON item +select json '{}' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '1.23' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select json '"1.23"' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select json '"1.23aaa"' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select json '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; + ?column? +---------- + "abc" + "abcabc" +(2 rows) + +select json '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + ["", "a", "abc", "abcabc"] +(1 row) + +select json '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + ["abc", "abcabc", null, 1] +(1 row) + +select json '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + [null, 1, "abc", "abcabc"] +(1 row) + +select json '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + [null, 1, "abd", "abdabc"] +(1 row) + +select json '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + ?column? +---------- + null + 1 +(2 rows) + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; + ?column? +---------- + "abc" + "abdacb" +(2 rows) + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + ?column? +---------- + "abc" + "aBdC" + "abdacb" +(3 rows) + +select json 'null' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json 'true' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '[]' @* '$.datetime()'; + ?column? +---------- +(0 rows) + +select json '[]' @* 'strict $.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '{}' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select json '""' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +-- Standard extension: UNIX epoch to timestamptz +select json '0' @* '$.datetime()'; + ?column? +-------------------------------- + "Wed Dec 31 16:00:00 1969 PST" +(1 row) + +select json '0' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '1490216035.5' @* '$.datetime()'; + ?column? +---------------------------------- + "Wed Mar 22 13:53:55.5 2017 PDT" +(1 row) + +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select json '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select json '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +set time zone '+00'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +---------------------------- + "Fri Mar 10 12:34:00 2017" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017 +00" +(1 row) + +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 07:34:00 2017 +00" +(1 row) + +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 17:34:00 2017 +00" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 07:14:00 2017 +00" +(1 row) + +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 17:54:00 2017 +00" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+00" +(1 row) + +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+05" +(1 row) + +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00-05" +(1 row) + +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone '+10'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +---------------------------- + "Fri Mar 10 12:34:00 2017" +(1 row) + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017 +10" +(1 row) + +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 17:34:00 2017 +10" +(1 row) + +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Sat Mar 11 03:34:00 2017 +10" +(1 row) + +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 17:14:00 2017 +10" +(1 row) + +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Sat Mar 11 03:54:00 2017 +10" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+10" +(1 row) + +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+05" +(1 row) + +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00-05" +(1 row) + +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone default; +select json '"2017-03-10"' @* '$.datetime().type()'; + ?column? +---------- + "date" +(1 row) + +select json '"2017-03-10"' @* '$.datetime()'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select json '"2017-03-10 12:34:56"' @* '$.datetime().type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select json '"2017-03-10 12:34:56"' @* '$.datetime()'; + ?column? +---------------------------- + "Fri Mar 10 12:34:56 2017" +(1 row) + +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; + ?column? +-------------------------------- + "Fri Mar 10 01:34:56 2017 PST" +(1 row) + +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; + ?column? +-------------------------------- + "Fri Mar 10 01:24:56 2017 PST" +(1 row) + +select json '"12:34:56"' @* '$.datetime().type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select json '"12:34:56"' @* '$.datetime()'; + ?column? +------------ + "12:34:56" +(1 row) + +select json '"12:34:56 +3"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select json '"12:34:56 +3"' @* '$.datetime()'; + ?column? +--------------- + "12:34:56+03" +(1 row) + +select json '"12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select json '"12:34:56 +3:10"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:10" +(1 row) + +set time zone '+00'; +-- date comparison +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-10-2017" + "Fri Mar 10 00:00:00 2017" + "Fri Mar 10 00:00:00 2017 +00" +(3 rows) + +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-10-2017" + "03-11-2017" + "Fri Mar 10 00:00:00 2017" + "Fri Mar 10 12:34:56 2017" + "Fri Mar 10 00:00:00 2017 +00" +(5 rows) + +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-09-2017" + "Thu Mar 09 21:02:03 2017 +00" +(2 rows) + +-- time comparison +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:35:00" + "12:35:00+00" +(2 rows) + +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:35:00" + "12:36:00" + "12:35:00+00" +(3 rows) + +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:34:00" + "12:35:00+01" + "13:35:00+01" +(3 rows) + +-- timetz comparison +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:35:00+01" +(1 row) + +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:35:00+01" + "12:36:00+01" + "12:35:00-02" + "11:35:00" + "12:35:00" +(5 rows) + +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:34:00+01" + "12:35:00+02" + "10:35:00" +(3 rows) + +-- timestamp comparison +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:35:00 2017" + "Fri Mar 10 12:35:00 2017 +00" +(2 rows) + +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:35:00 2017" + "Fri Mar 10 12:36:00 2017" + "Fri Mar 10 12:35:00 2017 +00" + "Fri Mar 10 13:35:00 2017 +00" + "03-11-2017" +(5 rows) + +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017" + "Fri Mar 10 11:35:00 2017 +00" + "03-10-2017" +(3 rows) + +-- timestamptz comparison +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:35:00 2017 +00" + "Fri Mar 10 11:35:00 2017" +(2 rows) + +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:35:00 2017 +00" + "Fri Mar 10 11:36:00 2017 +00" + "Fri Mar 10 14:35:00 2017 +00" + "Fri Mar 10 11:35:00 2017" + "Fri Mar 10 12:35:00 2017" + "03-11-2017" +(6 rows) + +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:34:00 2017 +00" + "Fri Mar 10 10:35:00 2017 +00" + "Fri Mar 10 10:35:00 2017" + "03-10-2017" +(4 rows) + +set time zone default; +-- jsonpath operators +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*]'; + ?column? +---------- + {"a": 1} + {"a": 2} +(2 rows) + +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + ?column? +---------- +(0 rows) + +SELECT json '[{"a": 1}, {"a": 2}]' @* '[$[*].a]'; + ?column? +---------- + [1, 2] +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 2)'; + ?column? +---------- + f +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; + ?column? +---------- + t +(1 row) + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + ?column? +---------- + f +(1 row) + +-- extension: map item method +select json '1' @* 'strict $.map(@ + 10)'; +ERROR: SQL/JSON array not found +select json '1' @* 'lax $.map(@ + 10)'; + ?column? +---------- + 11 +(1 row) + +select json '[1, 2, 3]' @* '$.map(@ + 10)'; + ?column? +-------------- + [11, 12, 13] +(1 row) + +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.map(@.map(@ + 10))'; + ?column? +---------------------------------------- + [[11, 12], [13, 14, 15], [], [16, 17]] +(1 row) + +-- extension: reduce/fold item methods +select json '1' @* 'strict $.reduce($1 + $2)'; +ERROR: SQL/JSON array not found +select json '1' @* 'lax $.reduce($1 + $2)'; + ?column? +---------- + 1 +(1 row) + +select json '1' @* 'strict $.fold($1 + $2, 10)'; +ERROR: SQL/JSON array not found +select json '1' @* 'lax $.fold($1 + $2, 10)'; + ?column? +---------- + 11 +(1 row) + +select json '[1, 2, 3]' @* '$.reduce($1 + $2)'; + ?column? +---------- + 6 +(1 row) + +select json '[1, 2, 3]' @* '$.fold($1 + $2, 100)'; + ?column? +---------- + 106 +(1 row) + +select json '[]' @* '$.reduce($1 + $2)'; + ?column? +---------- +(0 rows) + +select json '[]' @* '$.fold($1 + $2, 100)'; + ?column? +---------- + 100 +(1 row) + +select json '[1]' @* '$.reduce($1 + $2)'; + ?column? +---------- + 1 +(1 row) + +select json '[1, 2, 3]' @* '$.foldl([$1, $2], [])'; + ?column? +------------------- + [[[[], 1], 2], 3] +(1 row) + +select json '[1, 2, 3]' @* '$.foldr([$2, $1], [])'; + ?column? +------------------- + [[[[], 3], 2], 1] +(1 row) + +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.fold($1 + $2.fold($1 + $2, 100), 1000)'; + ?column? +---------- + 1428 +(1 row) + +-- extension: min/max item methods +select json '1' @* 'strict $.min()'; +ERROR: SQL/JSON array not found +select json '1' @* 'lax $.min()'; + ?column? +---------- + 1 +(1 row) + +select json '[]' @* '$.min()'; + ?column? +---------- +(0 rows) + +select json '[]' @* '$.max()'; + ?column? +---------- +(0 rows) + +select json '[null]' @* '$.min()'; + ?column? +---------- + null +(1 row) + +select json '[null]' @* '$.max()'; + ?column? +---------- + null +(1 row) + +select json '[1, 2, 3]' @* '$.min()'; + ?column? +---------- + 1 +(1 row) + +select json '[1, 2, 3]' @* '$.max()'; + ?column? +---------- + 3 +(1 row) + +select json '[2, 3, 5, null, 1, 4, null]' @* '$.min()'; + ?column? +---------- + 1 +(1 row) + +select json '[2, 3, 5, null, 1, 4, null]' @* '$.max()'; + ?column? +---------- + 5 +(1 row) + +select json '["aa", null, "a", "bbb"]' @* '$.min()'; + ?column? +---------- + "a" +(1 row) + +select json '["aa", null, "a", "bbb"]' @* '$.max()'; + ?column? +---------- + "bbb" +(1 row) + +select json '[1, null, "2"]' @* '$.max()'; +ERROR: SQL/JSON scalar required +-- extension: path sequences +select json '[1,2,3,4,5]' @* '10, 20, $[*], 30'; + ?column? +---------- + 10 + 20 + 1 + 2 + 3 + 4 + 5 + 30 +(8 rows) + +select json '[1,2,3,4,5]' @* 'lax 10, 20, $[*].a, 30'; + ?column? +---------- + 10 + 20 + 30 +(3 rows) + +select json '[1,2,3,4,5]' @* 'strict 10, 20, $[*].a, 30'; +ERROR: SQL/JSON member not found +select json '[1,2,3,4,5]' @* '-(10, 20, $[1 to 3], 30)'; + ?column? +---------- + -10 + -20 + -2 + -3 + -4 + -30 +(6 rows) + +select json '[1,2,3,4,5]' @* 'lax (10, 20, $[1 to 3], 30).map(@ + 100)'; + ?column? +---------- + 110 + 120 + 102 + 103 + 104 + 130 +(6 rows) + +select json '[1,2,3,4,5]' @* '$[(0, $[*], 5) ? (@ == 3)]'; + ?column? +---------- + 4 +(1 row) + +select json '[1,2,3,4,5]' @* '$[(0, $[*], 3) ? (@ == 3)]'; +ERROR: Invalid SQL/JSON subscript +-- extension: array constructors +select json '[1, 2, 3]' @* '[]'; + ?column? +---------- + [] +(1 row) + +select json '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5]'; + ?column? +----------------------------- + [1, 2, 101, 102, 103, 4, 5] +(1 row) + +select json '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5][*]'; + ?column? +---------- + 1 + 2 + 101 + 102 + 103 + 4 + 5 +(7 rows) + +select json '[1, 2, 3]' @* '[(1, (2, $.map(@ + 100)[*])), (4, 5)]'; + ?column? +----------------------------- + [1, 2, 101, 102, 103, 4, 5] +(1 row) + +select json '[1, 2, 3]' @* '[[1, 2], [$.map(@ + 100)[*], 4], 5, [(1,2)?(@ > 5)]]'; + ?column? +------------------------------------- + [[1, 2], [101, 102, 103, 4], 5, []] +(1 row) + +select json '[1, 2, 3]' @* 'strict [1, 2, $.map(@.a)[*], 4, 5]'; +ERROR: SQL/JSON member not found +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '[$[*].map(@ + 10)[*] ? (@ > 13)]'; + ?column? +------------------ + [14, 15, 16, 17] +(1 row) + +-- extension: object constructors +select json '[1, 2, 3]' @* '{}'; + ?column? +---------- + {} +(1 row) + +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}'; + ?column? +-------------------------------- + {"a": 5, "b": [1, 2, 3, 4, 5]} +(1 row) + +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}.*'; + ?column? +----------------- + 5 + [1, 2, 3, 4, 5] +(2 rows) + +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": ($[*], 4, 5)}'; +ERROR: Singleton SQL/JSON item required +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$.map({x: @, y: @ < 3})[*], {z: "foo"}]}'; + ?column? +----------------------------------------------------------------------------------------------- + {"a": 5, "b": [{"x": 1, "y": true}, {"x": 2, "y": true}, {"x": 3, "y": false}, {"z": "foo"}]} +(1 row) + +-- extension: object subscripting +select json '{"a": 1}' @? '$["a"]'; + ?column? +---------- + t +(1 row) + +select json '{"a": 1}' @? '$["b"]'; + ?column? +---------- + f +(1 row) + +select json '{"a": 1}' @? 'strict $["b"]'; +ERROR: SQL/JSON member not found +select json '{"a": 1}' @? '$["b", "a"]'; + ?column? +---------- + t +(1 row) + +select json '{"a": 1}' @* '$["a"]'; + ?column? +---------- + 1 +(1 row) + +select json '{"a": 1}' @* 'strict $["b"]'; +ERROR: SQL/JSON member not found +select json '{"a": 1}' @* 'lax $["b"]'; + ?column? +---------- +(0 rows) + +select json '{"a": 1, "b": 2}' @* 'lax $["b", "c", "b", "a", 0 to 3]'; + ?column? +------------------ + 2 + 2 + 1 + {"a": 1, "b": 2} +(4 rows) + +select json 'null' @* '{"a": 1}["a"]'; + ?column? +---------- + 1 +(1 row) + +select json 'null' @* '{"a": 1}["b"]'; + ?column? +---------- +(0 rows) + diff --git a/src/test/regress/expected/json_sqljson.out b/src/test/regress/expected/json_sqljson.out new file mode 100644 index 0000000000..a5c834b37d --- /dev/null +++ b/src/test/regress/expected/json_sqljson.out @@ -0,0 +1,1949 @@ +-- JSON_EXISTS +SELECT JSON_EXISTS(NULL FORMAT JSON, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(NULL::text FORMAT JSON, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(NULL::bytea FORMAT JSON, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(NULL::json FORMAT JSON, '$'); +WARNING: FORMAT JSON has no effect for json and jsonb types + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(NULL::jsonb FORMAT JSON, '$'); +WARNING: FORMAT JSON has no effect for json and jsonb types + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(NULL::json, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS('' FORMAT JSON, '$'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS('' FORMAT JSON, '$' TRUE ON ERROR); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS('' FORMAT JSON, '$' FALSE ON ERROR); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS('' FORMAT JSON, '$' UNKNOWN ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS('' FORMAT JSON, '$' ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: The input string ended unexpectedly. +CONTEXT: JSON data, line 1: +SELECT JSON_EXISTS(bytea '' FORMAT JSON, '$' ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: The input string ended unexpectedly. +CONTEXT: JSON data, line 1: +SELECT JSON_EXISTS(json '[]', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS('[]' FORMAT JSON, '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(JSON_OBJECT(RETURNING bytea FORMAT JSON) FORMAT JSON, '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '1', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json 'null', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '[]', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '1', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '1', 'strict $.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '1', 'strict $.a' ERROR ON ERROR); +ERROR: SQL/JSON member not found +SELECT JSON_EXISTS(json 'null', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '[]', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '[1, "aaa", {"a": 1}]', 'strict $.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '[1, "aaa", {"a": 1}]', 'lax $.a'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '{}', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '{"b": 1, "a": 2}', '$.a'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '1', '$.a.b'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '{"a": {"b": 1}}', '$.a.b'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.a.b'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING 1 AS x); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING '1' AS x); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 2 AS y); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 1 AS y); + ?column? +---------- + f +(1 row) + +-- extension: boolean expressions +SELECT JSON_EXISTS(json '1', '$ > 2'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(json '1', '$.a > 2' ERROR ON ERROR); + ?column? +---------- + t +(1 row) + +-- JSON_VALUE +SELECT JSON_VALUE(NULL, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(NULL FORMAT JSON, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(NULL::text, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(NULL::bytea, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(NULL::json, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(NULL::jsonb FORMAT JSON, '$'); +WARNING: FORMAT JSON has no effect for json and jsonb types + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE('' FORMAT JSON, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE('' FORMAT JSON, '$' NULL ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE('' FORMAT JSON, '$' DEFAULT '"default value"' ON ERROR); + ?column? +----------------- + "default value" +(1 row) + +SELECT JSON_VALUE('' FORMAT JSON, '$' ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: The input string ended unexpectedly. +CONTEXT: JSON data, line 1: +SELECT JSON_VALUE(json 'null', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json 'null', '$' RETURNING int); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json 'true', '$'); + ?column? +---------- + true +(1 row) + +SELECT JSON_VALUE(json 'true', '$' RETURNING bool); + ?column? +---------- + t +(1 row) + +SELECT JSON_VALUE(json '123', '$'); + ?column? +---------- + 123 +(1 row) + +SELECT JSON_VALUE(json '123', '$' RETURNING int) + 234; + ?column? +---------- + 357 +(1 row) + +SELECT JSON_VALUE(json '123', '$' RETURNING text); + ?column? +---------- + 123 +(1 row) + +/* jsonb bytea ??? */ +SELECT JSON_VALUE(json '123', '$' RETURNING bytea); + ?column? +---------- + \x313233 +(1 row) + +SELECT JSON_VALUE(json '1.23', '$'); + ?column? +---------- + 1.23 +(1 row) + +SELECT JSON_VALUE(json '1.23', '$' RETURNING int); + ?column? +---------- + 1 +(1 row) + +SELECT JSON_VALUE(json '"1.23"', '$' RETURNING numeric); + ?column? +---------- + 1.23 +(1 row) + +SELECT JSON_VALUE(json '"1.23"', '$' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: "1.23" +SELECT JSON_VALUE(json '"aaa"', '$'); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING text); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING char(5)); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING char(2)); + ?column? +---------- + aa +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING json); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING jsonb); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING json ERROR ON ERROR); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING jsonb ERROR ON ERROR); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(json '"\"aaa\""', '$' RETURNING json); + ?column? +----------- + "\"aaa\"" +(1 row) + +SELECT JSON_VALUE(json '"\"aaa\""', '$' RETURNING jsonb); + ?column? +----------- + "\"aaa\"" +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: "aaa" +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int DEFAULT 111 ON ERROR); + ?column? +---------- + 111 +(1 row) + +SELECT JSON_VALUE(json '"123"', '$' RETURNING int) + 234; + ?column? +---------- + 357 +(1 row) + +SELECT JSON_VALUE(json '"2017-02-20"', '$' RETURNING date) + 9; + ?column? +------------ + 03-01-2017 +(1 row) + +-- Test NULL checks execution in domain types +CREATE DOMAIN sqljson_int_not_null AS int NOT NULL; +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null); +ERROR: domain sqljson_int_not_null does not allow null values +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null NULL ON ERROR); +ERROR: domain sqljson_int_not_null does not allow null values +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null DEFAULT NULL ON ERROR); +ERROR: domain sqljson_int_not_null does not allow null values +SELECT JSON_VALUE(json '[]', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '[]', '$' ERROR ON ERROR); +ERROR: SQL/JSON scalar required +SELECT JSON_VALUE(json '{}', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '{}', '$' ERROR ON ERROR); +ERROR: SQL/JSON scalar required +SELECT JSON_VALUE(json '1', '$.a'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '1', 'strict $.a' ERROR ON ERROR); +ERROR: SQL/JSON member not found +SELECT JSON_VALUE(json '1', 'strict $.a' DEFAULT 'error' ON ERROR); + ?column? +---------- + error +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON EMPTY ERROR ON ERROR); +ERROR: no SQL/JSON item +SELECT JSON_VALUE(json '1', 'strict $.a' DEFAULT 2 ON ERROR); + ?column? +---------- + 2 +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT 2 ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT '2' ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' NULL ON EMPTY DEFAULT '2' ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT '2' ON EMPTY DEFAULT '3' ON ERROR); + ?column? +---------- + 2 +(1 row) + +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON EMPTY DEFAULT '3' ON ERROR); + ?column? +---------- + 3 +(1 row) + +SELECT JSON_VALUE(json '[1,2]', '$[*]' ERROR ON ERROR); +ERROR: more than one SQL/JSON item +SELECT JSON_VALUE(json '[1,2]', '$[*]' DEFAULT '0' ON ERROR); + ?column? +---------- + 0 +(1 row) + +SELECT JSON_VALUE(json '[" "]', '$[*]' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: " " +SELECT JSON_VALUE(json '[" "]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + ?column? +---------- + 5 +(1 row) + +SELECT JSON_VALUE(json '["1"]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + ?column? +---------- + 1 +(1 row) + +SELECT + x, + JSON_VALUE( + json '{"a": 1, "b": 2}', + '$.* ? (@ > $x)' PASSING x AS x + RETURNING int + DEFAULT -1 ON EMPTY + DEFAULT -2 ON ERROR + ) y +FROM + generate_series(0, 2) x; + x | y +---+---- + 0 | -2 + 1 | 2 + 2 | -1 +(3 rows) + +SELECT JSON_VALUE(json 'null', '$a' PASSING point ' (1, 2 )' AS a); + ?column? +---------- + (1,2) +(1 row) + +SELECT JSON_VALUE(json 'null', '$a' PASSING point ' (1, 2 )' AS a RETURNING point); + ?column? +---------- + (1,2) +(1 row) + +-- JSON_QUERY +SELECT + JSON_QUERY(js FORMAT JSON, '$'), + JSON_QUERY(js FORMAT JSON, '$' WITHOUT WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH CONDITIONAL WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH UNCONDITIONAL ARRAY WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH ARRAY WRAPPER) +FROM + (VALUES + ('null'), + ('12.3'), + ('true'), + ('"aaa"'), + ('[1, null, "2"]'), + ('{"a": 1, "b": [2]}') + ) foo(js); + ?column? | ?column? | ?column? | ?column? | ?column? +--------------------+--------------------+--------------------+----------------------+---------------------- + null | null | [null] | [null] | [null] + 12.3 | 12.3 | [12.3] | [12.3] | [12.3] + true | true | [true] | [true] | [true] + "aaa" | "aaa" | ["aaa"] | ["aaa"] | ["aaa"] + [1, null, "2"] | [1, null, "2"] | [1, null, "2"] | [[1, null, "2"]] | [[1, null, "2"]] + {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | [{"a": 1, "b": [2]}] | [{"a": 1, "b": [2]}] +(6 rows) + +SELECT + JSON_QUERY(js FORMAT JSON, 'strict $[*]') AS "unspec", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITHOUT WRAPPER) AS "without", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH CONDITIONAL WRAPPER) AS "with cond", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH UNCONDITIONAL ARRAY WRAPPER) AS "with uncond", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH ARRAY WRAPPER) AS "with" +FROM + (VALUES + ('1'), + ('[]'), + ('[null]'), + ('[12.3]'), + ('[true]'), + ('["aaa"]'), + ('[[1, 2, 3]]'), + ('[{"a": 1, "b": [2]}]'), + ('[1, "2", null, [3]]') + ) foo(js); + unspec | without | with cond | with uncond | with +--------------------+--------------------+---------------------+----------------------+---------------------- + | | | | + | | | | + null | null | [null] | [null] | [null] + 12.3 | 12.3 | [12.3] | [12.3] | [12.3] + true | true | [true] | [true] | [true] + "aaa" | "aaa" | ["aaa"] | ["aaa"] | ["aaa"] + [1, 2, 3] | [1, 2, 3] | [1, 2, 3] | [[1, 2, 3]] | [[1, 2, 3]] + {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | [{"a": 1, "b": [2]}] | [{"a": 1, "b": [2]}] + | | [1, "2", null, [3]] | [1, "2", null, [3]] | [1, "2", null, [3]] +(9 rows) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text KEEP QUOTES); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text KEEP QUOTES ON SCALAR STRING); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text OMIT QUOTES); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text OMIT QUOTES ON SCALAR STRING); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' OMIT QUOTES ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: Token "aaa" is invalid. +CONTEXT: JSON data, line 1: aaa +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING json OMIT QUOTES ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: Token "aaa" is invalid. +CONTEXT: JSON data, line 1: aaa +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING bytea FORMAT JSON OMIT QUOTES ERROR ON ERROR); + ?column? +---------- + \x616161 +(1 row) + +-- QUOTES behavior should not be specified when WITH WRAPPER used: +-- Should fail +SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER OMIT QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER OMIT QUOTES); + ^ +SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER KEEP QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER KEEP QUOTES); + ^ +SELECT JSON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: ...ON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTE... + ^ +SELECT JSON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: ...ON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTE... + ^ +-- Should succeed +SELECT JSON_QUERY(json '[1]', '$' WITHOUT WRAPPER OMIT QUOTES); + ?column? +---------- + [1] +(1 row) + +SELECT JSON_QUERY(json '[1]', '$' WITHOUT WRAPPER KEEP QUOTES); + ?column? +---------- + [1] +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]'); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' NULL ON EMPTY); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' EMPTY ARRAY ON EMPTY); + ?column? +---------- + [] +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' EMPTY OBJECT ON EMPTY); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY NULL ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY EMPTY ARRAY ON ERROR); + ?column? +---------- + [] +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY ERROR ON ERROR); +ERROR: no SQL/JSON item +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY('[1,2]' FORMAT JSON, '$[*]' ERROR ON ERROR); +ERROR: more than one SQL/JSON item +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING json); + ?column? +---------- + [1,2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING json FORMAT JSON); + ?column? +---------- + [1,2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING jsonb); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING jsonb FORMAT JSON); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING text); + ?column? +---------- + [1,2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING char(10)); + ?column? +------------ + [1,2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING char(3)); + ?column? +---------- + [1, +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING text FORMAT JSON); + ?column? +---------- + [1,2] +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING bytea); + ?column? +-------------- + \x5b312c325d +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING bytea FORMAT JSON); + ?column? +-------------- + \x5b312c325d +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING bytea EMPTY OBJECT ON ERROR); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING bytea FORMAT JSON EMPTY OBJECT ON ERROR); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING json EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING jsonb EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT + x, y, + JSON_QUERY( + json '[1,2,3,4,5,null]', + '$[*] ? (@ >= $x && @ <= $y)' + PASSING x AS x, y AS y + WITH CONDITIONAL WRAPPER + EMPTY ARRAY ON EMPTY + ) list +FROM + generate_series(0, 4) x, + generate_series(0, 4) y; + x | y | list +---+---+-------------- + 0 | 0 | [] + 0 | 1 | [1] + 0 | 2 | [1, 2] + 0 | 3 | [1, 2, 3] + 0 | 4 | [1, 2, 3, 4] + 1 | 0 | [] + 1 | 1 | [1] + 1 | 2 | [1, 2] + 1 | 3 | [1, 2, 3] + 1 | 4 | [1, 2, 3, 4] + 2 | 0 | [] + 2 | 1 | [] + 2 | 2 | [2] + 2 | 3 | [2, 3] + 2 | 4 | [2, 3, 4] + 3 | 0 | [] + 3 | 1 | [] + 3 | 2 | [] + 3 | 3 | [3] + 3 | 4 | [3, 4] + 4 | 0 | [] + 4 | 1 | [] + 4 | 2 | [] + 4 | 3 | [] + 4 | 4 | [4] +(25 rows) + +-- Conversion to record types +CREATE TYPE sqljson_rec AS (a int, t text, js json, jb jsonb, jsa json[]); +CREATE TYPE sqljson_reca AS (reca sqljson_rec[]); +SELECT JSON_QUERY(json '[{"a": 1, "b": "foo", "t": "aaa", "js": [1, "2", {}], "jb": {"x": [1, "2", {}]}}, {"a": 2}]', '$[0]' RETURNING sqljson_rec); + ?column? +----------------------------------------------------- + (1,aaa,"[1, ""2"", {}]","{""x"": [1, ""2"", {}]}",) +(1 row) + +SELECT * FROM unnest((JSON_QUERY(json '{"jsa": [{"a": 1, "b": ["foo"]}, {"a": 2, "c": {}}, 123]}', '$' RETURNING sqljson_rec)).jsa); + unnest +------------------------ + {"a": 1, "b": ["foo"]} + {"a": 2, "c": {}} + 123 +(3 rows) + +SELECT * FROM unnest((JSON_QUERY(json '{"reca": [{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]}', '$' RETURNING sqljson_reca)).reca); + a | t | js | jb | jsa +---+-------------+----+------------+----- + 1 | ["foo", []] | | | + 2 | | | [{}, true] | +(2 rows) + +-- Conversion to array types +SELECT JSON_QUERY(json '[1,2,null,"3"]', '$[*]' RETURNING int[] WITH WRAPPER); + ?column? +-------------- + {1,2,NULL,3} +(1 row) + +SELECT * FROM unnest(JSON_QUERY(json '[{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]', '$' RETURNING sqljson_rec[])); + a | t | js | jb | jsa +---+-------------+----+------------+----- + 1 | ["foo", []] | | | + 2 | | | [{}, true] | +(2 rows) + +-- Conversion to domain types +SELECT JSON_QUERY(json '{"a": 1}', '$.a' RETURNING sqljson_int_not_null); + ?column? +---------- + 1 +(1 row) + +SELECT JSON_QUERY(json '{"a": 1}', '$.b' RETURNING sqljson_int_not_null); +ERROR: domain sqljson_int_not_null does not allow null values +-- Test constraints +CREATE TABLE test_json_constraints ( + js text, + i int, + x jsonb DEFAULT JSON_QUERY(json '[1,2]', '$[*]' WITH WRAPPER) + CONSTRAINT test_json_constraint1 + CHECK (js IS JSON) + CONSTRAINT test_json_constraint2 + CHECK (JSON_EXISTS(js FORMAT JSON, '$.a' PASSING i + 5 AS int, i::text AS txt, array[1,2,3] as arr)) + CONSTRAINT test_json_constraint3 + CHECK (JSON_VALUE(js::json, '$.a' RETURNING int DEFAULT ('12' || i)::int ON EMPTY ERROR ON ERROR) > i) + CONSTRAINT test_json_constraint4 + CHECK (JSON_QUERY(js FORMAT JSON, '$.a' RETURNING jsonb WITH CONDITIONAL WRAPPER EMPTY OBJECT ON ERROR) < jsonb '[10]') + CONSTRAINT test_json_constraint5 + CHECK (JSON_QUERY(js FORMAT JSON, '$.a' RETURNING char(5) OMIT QUOTES EMPTY ARRAY ON EMPTY) > 'a') +); +\d test_json_constraints + Table "public.test_json_constraints" + Column | Type | Collation | Nullable | Default +--------+---------+-----------+----------+--------------------------------------------------------------------------------------------------------- + js | text | | | + i | integer | | | + x | jsonb | | | JSON_QUERY('[1,2]'::json, '$[*]' RETURNING json WITH UNCONDITIONAL WRAPPER NULL ON EMPTY NULL ON ERROR) +Check constraints: + "test_json_constraint1" CHECK (pg_catalog.json_is_valid(js, 'any'::text, false)) + "test_json_constraint2" CHECK (JSON_EXISTS(js FORMAT JSON, '$."a"' PASSING i + 5 AS int, i::text AS txt, to_json(ARRAY[1, 2, 3]) AS arr FALSE ON ERROR)) + "test_json_constraint3" CHECK ((JSON_VALUE(js::json, '$."a"' RETURNING integer DEFAULT ('12'::text || i)::integer ON EMPTY ERROR ON ERROR)) > i) + "test_json_constraint4" CHECK ((JSON_QUERY(js FORMAT JSON, '$."a"' RETURNING jsonb WITH CONDITIONAL WRAPPER NULL ON EMPTY EMPTY OBJECT ON ERROR)) < '[10]'::jsonb) + "test_json_constraint5" CHECK ((JSON_QUERY(js FORMAT JSON, '$."a"' RETURNING character(5) OMIT QUOTES EMPTY ARRAY ON EMPTY NULL ON ERROR)) > 'a'::bpchar) + +SELECT check_clause +FROM information_schema.check_constraints +WHERE constraint_name LIKE 'test_json_constraint%'; + check_clause +-------------------------------------------------------------------------------------------------------------------------------------- + (pg_catalog.json_is_valid(js, 'any'::text, false)) + (JSON_EXISTS(js FORMAT JSON, '$."a"' PASSING (i + 5) AS int, (i)::text AS txt, to_json(ARRAY[1, 2, 3]) AS arr FALSE ON ERROR)) + ((JSON_VALUE((js)::json, '$."a"' RETURNING integer DEFAULT (('12'::text || i))::integer ON EMPTY ERROR ON ERROR) > i)) + ((JSON_QUERY(js FORMAT JSON, '$."a"' RETURNING jsonb WITH CONDITIONAL WRAPPER NULL ON EMPTY EMPTY OBJECT ON ERROR) < '[10]'::jsonb)) + ((JSON_QUERY(js FORMAT JSON, '$."a"' RETURNING character(5) OMIT QUOTES EMPTY ARRAY ON EMPTY NULL ON ERROR) > 'a'::bpchar)) +(5 rows) + +SELECT adsrc FROM pg_attrdef WHERE adrelid = 'test_json_constraints'::regclass; + adsrc +--------------------------------------------------------------------------------------------------------- + JSON_QUERY('[1,2]'::json, '$[*]' RETURNING json WITH UNCONDITIONAL WRAPPER NULL ON EMPTY NULL ON ERROR) +(1 row) + +INSERT INTO test_json_constraints VALUES ('', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint1" +DETAIL: Failing row contains (, 1, [1, 2]). +INSERT INTO test_json_constraints VALUES ('1', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint2" +DETAIL: Failing row contains (1, 1, [1, 2]). +INSERT INTO test_json_constraints VALUES ('[]'); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint2" +DETAIL: Failing row contains ([], null, [1, 2]). +INSERT INTO test_json_constraints VALUES ('{"b": 1}', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint2" +DETAIL: Failing row contains ({"b": 1}, 1, [1, 2]). +INSERT INTO test_json_constraints VALUES ('{"a": 1}', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint3" +DETAIL: Failing row contains ({"a": 1}, 1, [1, 2]). +INSERT INTO test_json_constraints VALUES ('{"a": 7}', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint5" +DETAIL: Failing row contains ({"a": 7}, 1, [1, 2]). +INSERT INTO test_json_constraints VALUES ('{"a": 10}', 1); +ERROR: new row for relation "test_json_constraints" violates check constraint "test_json_constraint4" +DETAIL: Failing row contains ({"a": 10}, 1, [1, 2]). +DROP TABLE test_json_constraints; +-- JSON_TABLE +-- Should fail (JSON_TABLE can be used only in FROM clause) +SELECT JSON_TABLE('[]', '$'); +ERROR: syntax error at or near "(" +LINE 1: SELECT JSON_TABLE('[]', '$'); + ^ +-- Should fail (no columns) +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); +ERROR: syntax error at or near ")" +LINE 1: SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); + ^ +-- NULL => empty table +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS (foo int)) bar; + foo +----- +(0 rows) + +-- invalid json => empty table +SELECT * FROM JSON_TABLE('', '$' COLUMNS (foo int)) bar; + foo +----- +(0 rows) + +SELECT * FROM JSON_TABLE('' FORMAT JSON, '$' COLUMNS (foo int)) bar; + foo +----- +(0 rows) + +-- invalid json => error +SELECT * FROM JSON_TABLE('' FORMAT JSON, '$' COLUMNS (foo int) ERROR ON ERROR) bar; +ERROR: invalid input syntax for type json +DETAIL: The input string ended unexpectedly. +CONTEXT: JSON data, line 1: +-- +SELECT * FROM JSON_TABLE('123' FORMAT JSON, '$' + COLUMNS (item int PATH '$', foo int)) bar; + item | foo +------+----- + 123 | +(1 row) + +SELECT * FROM JSON_TABLE(json '123', '$' + COLUMNS (item int PATH '$', foo int)) bar; + item | foo +------+----- + 123 | +(1 row) + +-- JSON_TABLE: basic functionality +SELECT * +FROM + (VALUES + ('1'), + ('[]'), + ('{}'), + ('[1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""]'), + ('err') + ) vals(js) + LEFT OUTER JOIN +-- JSON_TABLE is implicitly lateral + JSON_TABLE( + vals.js FORMAT json, 'lax $[*]' + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa' + ) + ) jt + ON true; + js | id | id2 | int | text | char(4) | bool | numeric | js | jb | jst | jsc | jsv | jsb | aaa | aaa1 +--------------------------------------------------------------------------------+----+-----+-----+---------+---------+------+---------+-----------+-----------+--------------+------+------+--------------+-----+------ + 1 | 1 | 1 | 1 | 1 | 1 | t | 1 | 1 | 1 | 1 | 1 | 1 | 1 | | + [] | | | | | | | | | | | | | | | + {} | 1 | 1 | | | | | | | | {} | {} | {} | {} | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 1 | 1 | 1 | 1 | 1 | t | 1 | 1 | 1 | 1 | 1 | 1 | 1 | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 2 | 2 | 1 | 1.23 | 1.23 | | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 3 | 3 | 2 | 2 | 2 | | 2 | "2" | "2" | "2" | "2" | "2" | "2" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 4 | 4 | | aaaaaaa | aaaa | | | "aaaaaaa" | "aaaaaaa" | "aaaaaaa" | "aaa | "aaa | "aaaaaaa" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 5 | 5 | | | | | | | | null | null | null | null | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 6 | 6 | 0 | false | fals | f | | false | false | false | fals | fals | false | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 7 | 7 | 1 | true | true | t | | true | true | true | true | true | true | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 8 | 8 | | | | | | | | {"aaa": 123} | {"aa | {"aa | {"aaa": 123} | 123 | 123 + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 9 | 9 | | [1,2] | [1,2 | | | "[1,2]" | "[1,2]" | "[1,2]" | "[1, | "[1, | "[1,2]" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 10 | 10 | | "str" | "str | | | "\"str\"" | "\"str\"" | "\"str\"" | "\"s | "\"s | "\"str\"" | | + err | | | | | | | | | | | | | | | +(14 rows) + +-- JSON_TABLE: Test backward parsing +CREATE VIEW json_table_view AS +SELECT * FROM + JSON_TABLE( + 'null' FORMAT JSON, 'lax $[*]' PASSING 1 + 2 AS a, json '"foo"' AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa', + NESTED PATH '$[1]' AS p1 COLUMNS ( + a1 int, + NESTED PATH '$[*]' AS "p1 1" COLUMNS ( + a11 text + ), + b1 text + ), + NESTED PATH '$[2]' AS p2 COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" COLUMNS ( + a21 text + ), + NESTED PATH '$[*]' AS p22 COLUMNS ( + a22 text + ) + ) + ) + ); +\sv json_table_view +CREATE OR REPLACE VIEW public.json_table_view AS + SELECT "json_table".id, + "json_table".id2, + "json_table"."int", + "json_table".text, + "json_table"."char(4)", + "json_table".bool, + "json_table"."numeric", + "json_table".js, + "json_table".jb, + "json_table".jst, + "json_table".jsc, + "json_table".jsv, + "json_table".jsb, + "json_table".aaa, + "json_table".aaa1, + "json_table".a1, + "json_table".b1, + "json_table".a11, + "json_table".a21, + "json_table".a22 + FROM JSON_TABLE( + 'null'::text FORMAT JSON, '$[*]' AS json_table_path_1 + PASSING + 1 + 2 AS a, + '"foo"'::json AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, + "int" integer PATH '$', + text text PATH '$', + "char(4)" character(4) PATH '$', + bool boolean PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc character(4) FORMAT JSON PATH '$', + jsv character varying(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa integer PATH '$."aaa"', + aaa1 integer PATH '$."aaa"', + NESTED PATH '$[1]' AS p1 + COLUMNS ( + a1 integer PATH '$."a1"', + b1 text PATH '$."b1"', + NESTED PATH '$[*]' AS "p1 1" + COLUMNS ( + a11 text PATH '$."a11"' + ) + ), + NESTED PATH '$[2]' AS p2 + COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" + COLUMNS ( + a21 text PATH '$."a21"' + ), + NESTED PATH '$[*]' AS p22 + COLUMNS ( + a22 text PATH '$."a22"' + ) + ) + ) + PLAN (json_table_path_1 OUTER ((p1 OUTER "p1 1") UNION (p2 OUTER ("p2:1" UNION p22)))) + ) +EXPLAIN (COSTS OFF, VERBOSE) SELECT * FROM json_table_view; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Table Function Scan on "json_table" + Output: "json_table".id, "json_table".id2, "json_table"."int", "json_table".text, "json_table"."char(4)", "json_table".bool, "json_table"."numeric", "json_table".js, "json_table".jb, "json_table".jst, "json_table".jsc, "json_table".jsv, "json_table".jsb, "json_table".aaa, "json_table".aaa1, "json_table".a1, "json_table".b1, "json_table".a11, "json_table".a21, "json_table".a22 + Table Function Call: JSON_TABLE('null'::text FORMAT JSON, '$[*]' AS json_table_path_1 PASSING 3 AS a, '"foo"'::json AS "b c" COLUMNS (id FOR ORDINALITY, id2 FOR ORDINALITY, "int" integer PATH '$', text text PATH '$', "char(4)" character(4) PATH '$', bool boolean PATH '$', "numeric" numeric PATH '$', js json PATH '$', jb jsonb PATH '$', jst text FORMAT JSON PATH '$', jsc character(4) FORMAT JSON PATH '$', jsv character varying(4) FORMAT JSON PATH '$', jsb jsonb FORMAT JSON PATH '$', aaa integer PATH '$."aaa"', aaa1 integer PATH '$."aaa"', NESTED PATH '$[1]' AS p1 COLUMNS (a1 integer PATH '$."a1"', b1 text PATH '$."b1"', NESTED PATH '$[*]' AS "p1 1" COLUMNS (a11 text PATH '$."a11"')), NESTED PATH '$[2]' AS p2 COLUMNS ( NESTED PATH '$[*]' AS "p2:1" COLUMNS (a21 text PATH '$."a21"'), NESTED PATH '$[*]' AS p22 COLUMNS (a22 text PATH '$."a22"'))) PLAN (json_table_path_1 OUTER ((p1 OUTER "p1 1") UNION (p2 OUTER ("p2:1" UNION p22))))) +(3 rows) + +-- JSON_TABLE: ON EMPTY/ON ERROR behavior +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js), + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$')) jt; + js | a +-------+--- + 1 | 1 + "err" | +(2 rows) + +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$') ERROR ON ERROR) jt + ON true; +ERROR: invalid input syntax for type json +DETAIL: Token "err" is invalid. +CONTEXT: JSON data, line 1: err +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$' ERROR ON ERROR)) jt + ON true; +ERROR: invalid input syntax for integer: "err" +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH '$.a' ERROR ON EMPTY)) jt; + a +--- + +(1 row) + +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH 'strict $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +ERROR: SQL/JSON member not found +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH 'lax $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +ERROR: no SQL/JSON item +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH '$' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 2 +(1 row) + +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH 'strict $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 2 +(1 row) + +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH 'lax $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 1 +(1 row) + +-- JSON_TABLE: nested paths and plans +-- Should fail (JSON_TABLE columns shall contain explicit AS path +-- specifications if explicit PLAN clause is used) +SELECT * FROM JSON_TABLE( + json '[]', '$' -- AS required here + COLUMNS ( + foo int PATH '$' + ) + PLAN DEFAULT (UNION) +) jt; +ERROR: invalid JSON_TABLE expression +LINE 2: json '[]', '$' + ^ +DETAIL: JSON_TABLE columns shall contain explicit AS pathname specification if explicit PLAN clause is used +SELECT * FROM JSON_TABLE( + json '[]', '$' AS path1 + COLUMNS ( + NESTED PATH '$' COLUMNS ( -- AS required here + foo int PATH '$' + ) + ) + PLAN DEFAULT (UNION) +) jt; +ERROR: invalid JSON_TABLE expression +LINE 4: NESTED PATH '$' COLUMNS ( + ^ +DETAIL: JSON_TABLE columns shall contain explicit AS pathname specification if explicit PLAN clause is used +-- Should fail (column names anf path names shall be distinct) +SELECT * FROM JSON_TABLE( + json '[]', '$' AS a + COLUMNS ( + a int + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + json '[]', '$' AS a + COLUMNS ( + b int, + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + json '[]', '$' + COLUMNS ( + b int, + NESTED PATH '$' AS b + COLUMNS ( + c int + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: b +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + json '[]', '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + b int + ), + NESTED PATH '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +-- JSON_TABLE: plan validation +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p1) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 12: PLAN (p1) + ^ +DETAIL: path name mismatch: expected p0 but p1 is given +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 4: NESTED PATH '$' AS p1 COLUMNS ( + ^ +DETAIL: plan node for nested path p1 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER p3) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 4: NESTED PATH '$' AS p1 COLUMNS ( + ^ +DETAIL: plan node for nested path p1 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p13)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 8: NESTED PATH '$' AS p2 COLUMNS ( + ^ +DETAIL: plan node for nested path p2 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 5: NESTED PATH '$' AS p11 COLUMNS ( foo int ), + ^ +DETAIL: plan node for nested path p11 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 12: PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) + ^ +DETAIL: plan node contains some extra or duplicate sibling nodes +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER p11) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 6: NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ^ +DETAIL: plan node for nested path p12 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 9: NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ^ +DETAIL: plan node for nested path p21 was not found in plan +SELECT * FROM JSON_TABLE( + json 'null', 'strict $[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21))) +) jt; + bar | foo | baz +-----+-----+----- +(0 rows) + +SELECT * FROM JSON_TABLE( + json 'null', 'strict $[*]' -- without root path name + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21)) +) jt; + bar | foo | baz +-----+-----+----- +(0 rows) + +-- JSON_TABLE: plan execution +CREATE TEMP TABLE json_table_test (js text); +INSERT INTO json_table_test +VALUES ( + '[ + {"a": 1, "b": [], "c": []}, + {"a": 2, "b": [1, 2, 3], "c": [10, null, 20]}, + {"a": 3, "b": [1, 2], "c": []}, + {"x": "4", "b": [1, 2], "c": 123} + ]' +); +-- unspecified plan (outer, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- default plan (outer, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, union) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- specific plan (p outer (pb union pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb union pc)) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- specific plan (p outer (pc union pb)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pc union pb)) + ) jt; + n | a | c | b +---+----+----+--- + 1 | 1 | | + 2 | 2 | 10 | + 2 | 2 | | + 2 | 2 | 20 | + 2 | 2 | | 1 + 2 | 2 | | 2 + 2 | 2 | | 3 + 3 | 3 | | 1 + 3 | 3 | | 2 + 4 | -1 | | 1 + 4 | -1 | | 2 +(11 rows) + +-- default plan (inner, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (inner) + ) jt; + n | a | b | c +---+----+---+---- + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(10 rows) + +-- specific plan (p inner (pb union pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb union pc)) + ) jt; + n | a | b | c +---+----+---+---- + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(10 rows) + +-- default plan (inner, cross) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (cross, inner) + ) jt; + n | a | b | c +---+---+---+---- + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 +(9 rows) + +-- specific plan (p inner (pb cross pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb cross pc)) + ) jt; + n | a | b | c +---+---+---+---- + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 +(9 rows) + +-- default plan (outer, cross) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, cross) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 + 3 | 3 | | + 4 | -1 | | +(12 rows) + +-- specific plan (p outer (pb cross pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb cross pc)) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 + 3 | 3 | | + 4 | -1 | | +(12 rows) + +select + jt.*, b1 + 100 as b +from + json_table (json + '[ + {"a": 1, "b": [[1, 10], [2], [3, 30, 300]], "c": [1, null, 2]}, + {"a": 2, "b": [10, 20], "c": [1, null, 2]}, + {"x": "3", "b": [11, 22, 33, 44]} + ]', + '$[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on error, + nested path 'strict $.b[*]' as pb columns ( + b text format json path '$', + nested path 'strict $[*]' as pb1 columns ( + b1 int path '$' + ) + ), + nested path 'strict $.c[*]' as pc columns ( + c text format json path '$', + nested path 'strict $[*]' as pc1 columns ( + c1 int path '$' + ) + ) + ) + --plan default(outer, cross) + plan(p outer ((pb inner pb1) cross (pc outer pc1))) + ) jt; + n | a | b | b1 | c | c1 | b +---+---+--------------+-----+------+----+----- + 1 | 1 | [1, 10] | 1 | 1 | | 101 + 1 | 1 | [1, 10] | 1 | null | | 101 + 1 | 1 | [1, 10] | 1 | 2 | | 101 + 1 | 1 | [1, 10] | 10 | 1 | | 110 + 1 | 1 | [1, 10] | 10 | null | | 110 + 1 | 1 | [1, 10] | 10 | 2 | | 110 + 1 | 1 | [2] | 2 | 1 | | 102 + 1 | 1 | [2] | 2 | null | | 102 + 1 | 1 | [2] | 2 | 2 | | 102 + 1 | 1 | [3, 30, 300] | 3 | 1 | | 103 + 1 | 1 | [3, 30, 300] | 3 | null | | 103 + 1 | 1 | [3, 30, 300] | 3 | 2 | | 103 + 1 | 1 | [3, 30, 300] | 30 | 1 | | 130 + 1 | 1 | [3, 30, 300] | 30 | null | | 130 + 1 | 1 | [3, 30, 300] | 30 | 2 | | 130 + 1 | 1 | [3, 30, 300] | 300 | 1 | | 400 + 1 | 1 | [3, 30, 300] | 300 | null | | 400 + 1 | 1 | [3, 30, 300] | 300 | 2 | | 400 + 2 | 2 | | | | | + 3 | | | | | | +(20 rows) + +-- Should succeed (JSON arguments are passed to root and nested paths) +SELECT * +FROM + generate_series(1, 4) x, + generate_series(1, 3) y, + JSON_TABLE(json + '[[1,2,3],[2,3,4,5],[3,4,5,6]]', + 'strict $[*] ? (@.[*] < $x)' + PASSING x AS x, y AS y + COLUMNS ( + y text FORMAT JSON PATH '$', + NESTED PATH 'strict $[*] ? (@ >= $y)' + COLUMNS ( + z int PATH '$' + ) + ) + ) jt; + x | y | y | z +---+---+-----------+--- + 2 | 1 | [1,2,3] | 1 + 2 | 1 | [1,2,3] | 2 + 2 | 1 | [1,2,3] | 3 + 3 | 1 | [1,2,3] | 1 + 3 | 1 | [1,2,3] | 2 + 3 | 1 | [1,2,3] | 3 + 3 | 1 | [2,3,4,5] | 2 + 3 | 1 | [2,3,4,5] | 3 + 3 | 1 | [2,3,4,5] | 4 + 3 | 1 | [2,3,4,5] | 5 + 4 | 1 | [1,2,3] | 1 + 4 | 1 | [1,2,3] | 2 + 4 | 1 | [1,2,3] | 3 + 4 | 1 | [2,3,4,5] | 2 + 4 | 1 | [2,3,4,5] | 3 + 4 | 1 | [2,3,4,5] | 4 + 4 | 1 | [2,3,4,5] | 5 + 4 | 1 | [3,4,5,6] | 3 + 4 | 1 | [3,4,5,6] | 4 + 4 | 1 | [3,4,5,6] | 5 + 4 | 1 | [3,4,5,6] | 6 + 2 | 2 | [1,2,3] | 2 + 2 | 2 | [1,2,3] | 3 + 3 | 2 | [1,2,3] | 2 + 3 | 2 | [1,2,3] | 3 + 3 | 2 | [2,3,4,5] | 2 + 3 | 2 | [2,3,4,5] | 3 + 3 | 2 | [2,3,4,5] | 4 + 3 | 2 | [2,3,4,5] | 5 + 4 | 2 | [1,2,3] | 2 + 4 | 2 | [1,2,3] | 3 + 4 | 2 | [2,3,4,5] | 2 + 4 | 2 | [2,3,4,5] | 3 + 4 | 2 | [2,3,4,5] | 4 + 4 | 2 | [2,3,4,5] | 5 + 4 | 2 | [3,4,5,6] | 3 + 4 | 2 | [3,4,5,6] | 4 + 4 | 2 | [3,4,5,6] | 5 + 4 | 2 | [3,4,5,6] | 6 + 2 | 3 | [1,2,3] | 3 + 3 | 3 | [1,2,3] | 3 + 3 | 3 | [2,3,4,5] | 3 + 3 | 3 | [2,3,4,5] | 4 + 3 | 3 | [2,3,4,5] | 5 + 4 | 3 | [1,2,3] | 3 + 4 | 3 | [2,3,4,5] | 3 + 4 | 3 | [2,3,4,5] | 4 + 4 | 3 | [2,3,4,5] | 5 + 4 | 3 | [3,4,5,6] | 3 + 4 | 3 | [3,4,5,6] | 4 + 4 | 3 | [3,4,5,6] | 5 + 4 | 3 | [3,4,5,6] | 6 +(52 rows) + +-- Should fail (JSON arguments are not passed to column paths) +SELECT * +FROM JSON_TABLE( + json '[1,2,3]', + '$[*] ? (@ < $x)' + PASSING 10 AS x + COLUMNS (y text FORMAT JSON PATH '$ ? (@ < $x)') + ) jt; +ERROR: could not find 'x' passed variable diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out index cf16a15c0f..a8625e2741 100644 --- a/src/test/regress/expected/jsonb.out +++ b/src/test/regress/expected/jsonb.out @@ -2695,6 +2695,114 @@ SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; 42 (1 row) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; + count +------- + 0 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; + count +------- + 337 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; + count +------- + 42 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + CREATE INDEX jidx ON testjsonb USING gin (j); SET enable_seqscan = off; SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}'; @@ -2770,6 +2878,196 @@ SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; 42 (1 row) +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + QUERY PLAN +----------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @~ '($."wait" == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @~ '($."wait" == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; + count +------- + 0 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; + count +------- + 337 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; + count +------- + 42 +(1 row) + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + QUERY PLAN +------------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @? '$."wait"?(@ == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @? '$."wait"?(@ == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + -- array exists - array elements should behave as keys (for GIN index scans too) CREATE INDEX jidx_array ON testjsonb USING gin((j->'array')); SELECT count(*) from testjsonb WHERE j->'array' ? 'bar'; @@ -2920,6 +3218,161 @@ SELECT count(*) FROM testjsonb WHERE j @> '{}'; 1012 (1 row) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + count +------- + 1012 +(1 row) + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + QUERY PLAN +------------------------------------------------------------------- + Aggregate + -> Bitmap Heap Scan on testjsonb + Recheck Cond: (j @? '$."wait"?(@ == null)'::jsonpath) + -> Bitmap Index Scan on jidx + Index Cond: (j @? '$."wait"?(@ == null)'::jsonpath) +(5 rows) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; + count +------- + 1 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; + count +------- + 15 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; + count +------- + 2 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; + count +------- + 3 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$'; + count +------- + 1012 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; + count +------- + 194 +(1 row) + +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + count +------- + 0 +(1 row) + RESET enable_seqscan; DROP INDEX jidx; -- nested tests diff --git a/src/test/regress/expected/jsonb_jsonpath.out b/src/test/regress/expected/jsonb_jsonpath.out new file mode 100644 index 0000000000..fdc80eedad --- /dev/null +++ b/src/test/regress/expected/jsonb_jsonpath.out @@ -0,0 +1,2030 @@ +select jsonb '{"a": 12}' @? '$.a.b'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": 12}' @? '$.b'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"a": 12}}' @? '$.a.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{"b": {"a": 12}}' @? '$.*.a'; + ?column? +---------- + t +(1 row) + +select jsonb '{}' @? '$.*'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": 1}' @? '$.*'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{1}'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{2}'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{3}'; + ?column? +---------- + f +(1 row) + +select jsonb '[]' @? '$.[*]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? '$.[*]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$.[1]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? 'strict $.[1]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '[1]' @? '$.[0]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$.[0.3]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$.[0.5]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$.[0.9]'; + ?column? +---------- + t +(1 row) + +select jsonb '[1]' @? '$.[1.2]'; + ?column? +---------- + f +(1 row) + +select jsonb '[1]' @? 'strict $.[1.2]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '{}' @? 'strict $.[0.3]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '{}' @? 'lax $.[0.3]'; + ?column? +---------- + t +(1 row) + +select jsonb '{}' @? 'strict $.[1.2]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '{}' @? 'lax $.[1.2]'; + ?column? +---------- + f +(1 row) + +select jsonb '{}' @? 'strict $.[-2 to 3]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '{}' @? 'lax $.[-2 to 3]'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; + ?column? +---------- + t +(1 row) + +select jsonb '1' @? '$ ? ((@ == "1") is unknown)'; + ?column? +---------- + t +(1 row) + +select jsonb '1' @? '$ ? ((@ == 1) is unknown)'; + ?column? +---------- + f +(1 row) + +select jsonb '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.a'; + ?column? +---------- + 12 +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.b'; + ?column? +----------- + {"a": 13} +(1 row) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.*'; + ?column? +----------- + 12 + {"a": 13} +(2 rows) + +select jsonb '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].*'; + ?column? +---------- + 13 + 14 +(2 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0].a'; + ?column? +---------- +(0 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[1].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[2].a'; + ?column? +---------- +(0 rows) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0,1].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0 to 10].a'; + ?column? +---------- + 13 +(1 row) + +select jsonb '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$.[2.5 - 1 to @.size() - 2]'; + ?column? +----------- + {"a": 13} + {"b": 14} + "ccc" +(3 rows) + +select jsonb '1' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '1' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{}' @* 'lax $[0]'; + ?column? +---------- + {} +(1 row) + +select jsonb '[1]' @* 'lax $[0]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1]' @* 'lax $[*]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1,2,3]' @* 'lax $[*]'; + ?column? +---------- + 1 + 2 + 3 +(3 rows) + +select jsonb '[]' @* '$[last]'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $[last]'; +ERROR: Invalid SQL/JSON subscript +select jsonb '[1]' @* '$[last]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{}' @* 'lax $[last]'; + ?column? +---------- + {} +(1 row) + +select jsonb '[1,2,3]' @* '$[last]'; + ?column? +---------- + 3 +(1 row) + +select jsonb '[1,2,3]' @* '$[last - 1]'; + ?column? +---------- + 2 +(1 row) + +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "number")]'; + ?column? +---------- + 3 +(1 row) + +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "string")]'; +ERROR: Invalid SQL/JSON subscript +select * from jsonpath_query(jsonb '{"a": 10}', '$'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)'); +ERROR: could not find 'value' passed variable +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + {"a": 10} +(1 row) + +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); + jsonpath_query +---------------- +(0 rows) + +select * from jsonpath_query(jsonb '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 +(1 row) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[*] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[0,1] ? (@ < $value)', '{"value" : 13}'); + jsonpath_query +---------------- + 10 + 11 +(2 rows) + +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[0 to 2] ? (@ < $value)', '{"value" : 15}'); + jsonpath_query +---------------- + 10 + 11 + 12 +(3 rows) + +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$.[*] ? (@ == "1")'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$.[*] ? (@ == $value)', '{"value" : "1"}'); + jsonpath_query +---------------- + "1" +(1 row) + +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ != null)'); + jsonpath_query +---------------- + 1 + "2" +(2 rows) + +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ == null)'); + jsonpath_query +---------------- + null +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**'; + ?column? +----------------- + {"a": {"b": 1}} + {"b": 1} + 1 +(3 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}'; + ?column? +---------- + {"b": 1} +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,}'; + ?column? +---------- + {"b": 1} + 1 +(2 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2}'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2,}'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{3,}'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2,3}.b ? (@ > 0)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{0,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"b": 1}}' @? '$.**{1,2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1,}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1,2}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{2,3}.b ? ( @ > 0)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; + ?column? +---------- + {"x": 2} +(1 row) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; + ?column? +---------- +(0 rows) + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + ?column? +---------- + {"x": 2} +(1 row) + +--test ternary logic +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + x | y | x && y +--------+--------+-------- + true | true | true + true | false | false + true | "null" | null + false | true | false + false | false | false + false | "null" | false + "null" | true | null + "null" | false | false + "null" | "null" | null +(9 rows) + +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + x | y | x || y +--------+--------+-------- + true | true | true + true | false | true + true | "null" | true + false | true | true + false | false | false + false | "null" | null + "null" | true | true + "null" | false | null + "null" | "null" | null +(9 rows) + +select jsonb '{"a": 1, "b":1}' @? '$ ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$ ? (.a == .b)'; + ?column? +---------- + f +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? ($.c.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.* ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 1, "b":1}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.** ? (.a == .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == 1 + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (1 + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == .b + 1)'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (.b + 1))'; + ?column? +------------------ + {"a": 2, "b": 1} +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - 1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -1)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -.b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 2, "b":1}}' @? '$.** ? (.a == 1 - - .b)'; + ?column? +---------- + t +(1 row) + +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - +.b)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +2)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +3)'; + ?column? +---------- + f +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -2)'; + ?column? +---------- + t +(1 row) + +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -3)'; + ?column? +---------- + f +(1 row) + +select jsonb '1' @? '$ ? ($ > 0)'; + ?column? +---------- + t +(1 row) + +-- unwrapping of operator arguments in lax mode +select jsonb '{"a": [2]}' @* 'lax $.a * 3'; + ?column? +---------- + 6 +(1 row) + +select jsonb '{"a": [2]}' @* 'lax $.a + 3'; + ?column? +---------- + 5 +(1 row) + +select jsonb '{"a": [2, 3, 4]}' @* 'lax -$.a'; + ?column? +---------- + -2 + -3 + -4 +(3 rows) + +-- should fail +select jsonb '{"a": [1, 2]}' @* 'lax $.a * 3'; +ERROR: Singleton SQL/JSON item required +-- extension: boolean expressions +select jsonb '2' @* '$ > 1'; + ?column? +---------- + true +(1 row) + +select jsonb '2' @* '$ <= 1'; + ?column? +---------- + false +(1 row) + +select jsonb '2' @* '$ == "2"'; + ?column? +---------- + null +(1 row) + +select jsonb '2' @~ '$ > 1'; + ?column? +---------- + t +(1 row) + +select jsonb '2' @~ '$ <= 1'; + ?column? +---------- + f +(1 row) + +select jsonb '2' @~ '$ == "2"'; + ?column? +---------- + +(1 row) + +select jsonb '2' @~ '1'; + ?column? +---------- + +(1 row) + +select jsonb '{}' @~ '$'; + ?column? +---------- + +(1 row) + +select jsonb '[]' @~ '$'; + ?column? +---------- + +(1 row) + +select jsonb '[1,2,3]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonb '[]' @~ '$[*]'; +ERROR: Singleton SQL/JSON item required +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); + jsonpath_predicate +-------------------- + f +(1 row) + +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + jsonpath_predicate +-------------------- + t +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* '$.type()'; + ?column? +---------- + "array" +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* 'lax $.type()'; + ?column? +---------- + "array" +(1 row) + +select jsonb '[null,1,true,"a",[],{}]' @* '$[*].type()'; + ?column? +----------- + "null" + "number" + "boolean" + "string" + "array" + "object" +(6 rows) + +select jsonb 'null' @* 'null.type()'; + ?column? +---------- + "null" +(1 row) + +select jsonb 'null' @* 'true.type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb 'null' @* '123.type()'; + ?column? +---------- + "number" +(1 row) + +select jsonb 'null' @* '"123".type()'; + ?column? +---------- + "string" +(1 row) + +select jsonb 'null' @* 'aaa.type()'; + ?column? +---------- + "string" +(1 row) + +select jsonb '{"a": 2}' @* '($.a - 5).abs() + 10'; + ?column? +---------- + 13 +(1 row) + +select jsonb '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; + ?column? +---------- + 4 +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; + ?column? +---------- + true +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*] > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb '[1, 2, 3]' @* '($[*].a > 3).type()'; + ?column? +----------- + "boolean" +(1 row) + +select jsonb '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + ?column? +---------- + "null" +(1 row) + +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +ERROR: SQL/JSON array not found +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + ?column? +---------- + 1 + 1 + 1 + 1 + 0 + 1 + 3 + 1 + 1 +(9 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; + ?column? +---------- + 0 + 1 + 2 + 3.4 + 5.6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; + ?column? +---------- + 0 + 1 + -2 + -4 + 5 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; + ?column? +---------- + 0 + 1 + -2 + -3 + 6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; + ?column? +---------- + 0 + 1 + 2 + 3 + 6 +(5 rows) + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + ?column? +---------- + "number" + "number" + "number" + "number" + "number" +(5 rows) + +select jsonb '[{},1]' @* '$[*].keyvalue()'; +ERROR: SQL/JSON object not found +select jsonb '{}' @* '$.keyvalue()'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +ERROR: SQL/JSON object not found +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + ?column? +------------------------------------- + {"key": "a", "value": 1} + {"key": "b", "value": [1, 2]} + {"key": "c", "value": {"a": "bbb"}} +(3 rows) + +select jsonb 'null' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb 'true' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '[]' @* '$.double()'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '{}' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '1.23' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select jsonb '"1.23"' @* '$.double()'; + ?column? +---------- + 1.23 +(1 row) + +select jsonb '"1.23aaa"' @* '$.double()'; +ERROR: Non-numeric SQL/JSON item +select jsonb '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; + ?column? +---------- + "abc" + "abcabc" +(2 rows) + +select jsonb '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + ["", "a", "abc", "abcabc"] +(1 row) + +select jsonb '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; + ?column? +---------- +(0 rows) + +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + ["abc", "abcabc", null, 1] +(1 row) + +select jsonb '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; + ?column? +---------------------------- + [null, 1, "abc", "abcabc"] +(1 row) + +select jsonb '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; + ?column? +---------------------------- + [null, 1, "abd", "abdabc"] +(1 row) + +select jsonb '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + ?column? +---------- + null + 1 +(2 rows) + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; + ?column? +---------- + "abc" + "abdacb" +(2 rows) + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + ?column? +---------- + "abc" + "aBdC" + "abdacb" +(3 rows) + +select jsonb 'null' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb 'true' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '[]' @* '$.datetime()'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* 'strict $.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '{}' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +select jsonb '""' @* '$.datetime()'; +ERROR: Invalid argument for SQL/JSON datetime function +-- Standard extension: UNIX epoch to timestamptz +select jsonb '0' @* '$.datetime()'; + ?column? +-------------------------------- + "Wed Dec 31 16:00:00 1969 PST" +(1 row) + +select jsonb '0' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '1490216035.5' @* '$.datetime()'; + ?column? +---------------------------------- + "Wed Mar 22 13:53:55.5 2017 PDT" +(1 row) + +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select jsonb '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +set time zone '+00'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +---------------------------- + "Fri Mar 10 12:34:00 2017" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017 +00" +(1 row) + +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 07:34:00 2017 +00" +(1 row) + +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 17:34:00 2017 +00" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 07:14:00 2017 +00" +(1 row) + +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 17:54:00 2017 +00" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+00" +(1 row) + +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+05" +(1 row) + +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00-05" +(1 row) + +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone '+10'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; + ?column? +---------------------------- + "Fri Mar 10 12:34:00 2017" +(1 row) + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017 +10" +(1 row) + +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Fri Mar 10 17:34:00 2017 +10" +(1 row) + +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; + ?column? +-------------------------------- + "Sat Mar 11 03:34:00 2017 +10" +(1 row) + +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Fri Mar 10 17:14:00 2017 +10" +(1 row) + +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; + ?column? +-------------------------------- + "Sat Mar 11 03:54:00 2017 +10" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; + ?column? +------------ + "12:34:00" +(1 row) + +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+10" +(1 row) + +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00+05" +(1 row) + +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; + ?column? +--------------- + "12:34:00-05" +(1 row) + +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00+05:20" +(1 row) + +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + ?column? +------------------ + "12:34:00-05:20" +(1 row) + +set time zone default; +select jsonb '"2017-03-10"' @* '$.datetime().type()'; + ?column? +---------- + "date" +(1 row) + +select jsonb '"2017-03-10"' @* '$.datetime()'; + ?column? +-------------- + "03-10-2017" +(1 row) + +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime().type()'; + ?column? +------------------------------- + "timestamp without time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime()'; + ?column? +---------------------------- + "Fri Mar 10 12:34:56 2017" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; + ?column? +-------------------------------- + "Fri Mar 10 01:34:56 2017 PST" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +---------------------------- + "timestamp with time zone" +(1 row) + +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; + ?column? +-------------------------------- + "Fri Mar 10 01:24:56 2017 PST" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime().type()'; + ?column? +-------------------------- + "time without time zone" +(1 row) + +select jsonb '"12:34:56"' @* '$.datetime()'; + ?column? +------------ + "12:34:56" +(1 row) + +select jsonb '"12:34:56 +3"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select jsonb '"12:34:56 +3"' @* '$.datetime()'; + ?column? +--------------- + "12:34:56+03" +(1 row) + +select jsonb '"12:34:56 +3:10"' @* '$.datetime().type()'; + ?column? +----------------------- + "time with time zone" +(1 row) + +select jsonb '"12:34:56 +3:10"' @* '$.datetime()'; + ?column? +------------------ + "12:34:56+03:10" +(1 row) + +set time zone '+00'; +-- date comparison +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-10-2017" + "Fri Mar 10 00:00:00 2017" + "Fri Mar 10 00:00:00 2017 +00" +(3 rows) + +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-10-2017" + "03-11-2017" + "Fri Mar 10 00:00:00 2017" + "Fri Mar 10 12:34:56 2017" + "Fri Mar 10 00:00:00 2017 +00" +(5 rows) + +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + ?column? +-------------------------------- + "03-09-2017" + "Thu Mar 09 21:02:03 2017 +00" +(2 rows) + +-- time comparison +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:35:00" + "12:35:00+00" +(2 rows) + +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:35:00" + "12:36:00" + "12:35:00+00" +(3 rows) + +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + ?column? +--------------- + "12:34:00" + "12:35:00+01" + "13:35:00+01" +(3 rows) + +-- timetz comparison +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:35:00+01" +(1 row) + +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:35:00+01" + "12:36:00+01" + "12:35:00-02" + "11:35:00" + "12:35:00" +(5 rows) + +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + ?column? +--------------- + "12:34:00+01" + "12:35:00+02" + "10:35:00" +(3 rows) + +-- timestamp comparison +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:35:00 2017" + "Fri Mar 10 12:35:00 2017 +00" +(2 rows) + +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:35:00 2017" + "Fri Mar 10 12:36:00 2017" + "Fri Mar 10 12:35:00 2017 +00" + "Fri Mar 10 13:35:00 2017 +00" + "03-11-2017" +(5 rows) + +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + ?column? +-------------------------------- + "Fri Mar 10 12:34:00 2017" + "Fri Mar 10 11:35:00 2017 +00" + "03-10-2017" +(3 rows) + +-- timestamptz comparison +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:35:00 2017 +00" + "Fri Mar 10 11:35:00 2017" +(2 rows) + +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:35:00 2017 +00" + "Fri Mar 10 11:36:00 2017 +00" + "Fri Mar 10 14:35:00 2017 +00" + "Fri Mar 10 11:35:00 2017" + "Fri Mar 10 12:35:00 2017" + "03-11-2017" +(6 rows) + +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + ?column? +-------------------------------- + "Fri Mar 10 11:34:00 2017 +00" + "Fri Mar 10 10:35:00 2017 +00" + "Fri Mar 10 10:35:00 2017" + "03-10-2017" +(4 rows) + +set time zone default; +-- jsonpath operators +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*]'; + ?column? +---------- + {"a": 1} + {"a": 2} +(2 rows) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; + ?column? +---------- +(0 rows) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '[$[*].a]'; + ?column? +---------- + [1, 2] +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 1)'; + ?column? +---------- + t +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 2)'; + ?column? +---------- + f +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; + ?column? +---------- + t +(1 row) + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + ?column? +---------- + f +(1 row) + +-- extension: map item method +select jsonb '1' @* 'strict $.map(@ + 10)'; +ERROR: SQL/JSON array not found +select jsonb '1' @* 'lax $.map(@ + 10)'; + ?column? +---------- + 11 +(1 row) + +select jsonb '[1, 2, 3]' @* '$.map(@ + 10)'; + ?column? +-------------- + [11, 12, 13] +(1 row) + +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.map(@.map(@ + 10))'; + ?column? +---------------------------------------- + [[11, 12], [13, 14, 15], [], [16, 17]] +(1 row) + +-- extension: reduce/fold item methods +select jsonb '1' @* 'strict $.reduce($1 + $2)'; +ERROR: SQL/JSON array not found +select jsonb '1' @* 'lax $.reduce($1 + $2)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '1' @* 'strict $.fold($1 + $2, 10)'; +ERROR: SQL/JSON array not found +select jsonb '1' @* 'lax $.fold($1 + $2, 10)'; + ?column? +---------- + 11 +(1 row) + +select jsonb '[1, 2, 3]' @* '$.reduce($1 + $2)'; + ?column? +---------- + 6 +(1 row) + +select jsonb '[1, 2, 3]' @* '$.fold($1 + $2, 100)'; + ?column? +---------- + 106 +(1 row) + +select jsonb '[]' @* '$.reduce($1 + $2)'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* '$.fold($1 + $2, 100)'; + ?column? +---------- + 100 +(1 row) + +select jsonb '[1]' @* '$.reduce($1 + $2)'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1, 2, 3]' @* '$.foldl([$1, $2], [])'; + ?column? +------------------- + [[[[], 1], 2], 3] +(1 row) + +select jsonb '[1, 2, 3]' @* '$.foldr([$2, $1], [])'; + ?column? +------------------- + [[[[], 3], 2], 1] +(1 row) + +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.fold($1 + $2.fold($1 + $2, 100), 1000)'; + ?column? +---------- + 1428 +(1 row) + +-- extension: min/max item methods +select jsonb '1' @* 'strict $.min()'; +ERROR: SQL/JSON array not found +select jsonb '1' @* 'lax $.min()'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[]' @* '$.min()'; + ?column? +---------- +(0 rows) + +select jsonb '[]' @* '$.max()'; + ?column? +---------- +(0 rows) + +select jsonb '[null]' @* '$.min()'; + ?column? +---------- + null +(1 row) + +select jsonb '[null]' @* '$.max()'; + ?column? +---------- + null +(1 row) + +select jsonb '[1, 2, 3]' @* '$.min()'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[1, 2, 3]' @* '$.max()'; + ?column? +---------- + 3 +(1 row) + +select jsonb '[2, 3, 5, null, 1, 4, null]' @* '$.min()'; + ?column? +---------- + 1 +(1 row) + +select jsonb '[2, 3, 5, null, 1, 4, null]' @* '$.max()'; + ?column? +---------- + 5 +(1 row) + +select jsonb '["aa", null, "a", "bbb"]' @* '$.min()'; + ?column? +---------- + "a" +(1 row) + +select jsonb '["aa", null, "a", "bbb"]' @* '$.max()'; + ?column? +---------- + "bbb" +(1 row) + +select jsonb '[1, null, "2"]' @* '$.max()'; +ERROR: SQL/JSON scalar required +-- extension: path sequences +select jsonb '[1,2,3,4,5]' @* '10, 20, $[*], 30'; + ?column? +---------- + 10 + 20 + 1 + 2 + 3 + 4 + 5 + 30 +(8 rows) + +select jsonb '[1,2,3,4,5]' @* 'lax 10, 20, $[*].a, 30'; + ?column? +---------- + 10 + 20 + 30 +(3 rows) + +select jsonb '[1,2,3,4,5]' @* 'strict 10, 20, $[*].a, 30'; +ERROR: SQL/JSON member not found +select jsonb '[1,2,3,4,5]' @* '-(10, 20, $[1 to 3], 30)'; + ?column? +---------- + -10 + -20 + -2 + -3 + -4 + -30 +(6 rows) + +select jsonb '[1,2,3,4,5]' @* 'lax (10, 20, $[1 to 3], 30).map(@ + 100)'; + ?column? +---------- + 110 + 120 + 102 + 103 + 104 + 130 +(6 rows) + +select jsonb '[1,2,3,4,5]' @* '$[(0, $[*], 5) ? (@ == 3)]'; + ?column? +---------- + 4 +(1 row) + +select jsonb '[1,2,3,4,5]' @* '$[(0, $[*], 3) ? (@ == 3)]'; +ERROR: Invalid SQL/JSON subscript +-- extension: array constructors +select jsonb '[1, 2, 3]' @* '[]'; + ?column? +---------- + [] +(1 row) + +select jsonb '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5]'; + ?column? +----------------------------- + [1, 2, 101, 102, 103, 4, 5] +(1 row) + +select jsonb '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5][*]'; + ?column? +---------- + 1 + 2 + 101 + 102 + 103 + 4 + 5 +(7 rows) + +select jsonb '[1, 2, 3]' @* '[(1, (2, $.map(@ + 100)[*])), (4, 5)]'; + ?column? +----------------------------- + [1, 2, 101, 102, 103, 4, 5] +(1 row) + +select jsonb '[1, 2, 3]' @* '[[1, 2], [$.map(@ + 100)[*], 4], 5, [(1,2)?(@ > 5)]]'; + ?column? +------------------------------------- + [[1, 2], [101, 102, 103, 4], 5, []] +(1 row) + +select jsonb '[1, 2, 3]' @* 'strict [1, 2, $.map(@.a)[*], 4, 5]'; +ERROR: SQL/JSON member not found +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '[$[*].map(@ + 10)[*] ? (@ > 13)]'; + ?column? +------------------ + [14, 15, 16, 17] +(1 row) + +-- extension: object constructors +select jsonb '[1, 2, 3]' @* '{}'; + ?column? +---------- + {} +(1 row) + +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}'; + ?column? +-------------------------------- + {"a": 5, "b": [1, 2, 3, 4, 5]} +(1 row) + +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}.*'; + ?column? +----------------- + 5 + [1, 2, 3, 4, 5] +(2 rows) + +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": ($[*], 4, 5)}'; +ERROR: Singleton SQL/JSON item required +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$.map({x: @, y: @ < 3})[*], {z: "foo"}]}'; + ?column? +----------------------------------------------------------------------------------------------- + {"a": 5, "b": [{"x": 1, "y": true}, {"x": 2, "y": true}, {"x": 3, "y": false}, {"z": "foo"}]} +(1 row) + +-- extension: object subscripting +select jsonb '{"a": 1}' @? '$["a"]'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 1}' @? '$["b"]'; + ?column? +---------- + f +(1 row) + +select jsonb '{"a": 1}' @? 'strict $["b"]'; +ERROR: SQL/JSON member not found +select jsonb '{"a": 1}' @? '$["b", "a"]'; + ?column? +---------- + t +(1 row) + +select jsonb '{"a": 1}' @* '$["a"]'; + ?column? +---------- + 1 +(1 row) + +select jsonb '{"a": 1}' @* 'strict $["b"]'; +ERROR: SQL/JSON member not found +select jsonb '{"a": 1}' @* 'lax $["b"]'; + ?column? +---------- +(0 rows) + +select jsonb '{"a": 1, "b": 2}' @* 'lax $["b", "c", "b", "a", 0 to 3]'; + ?column? +------------------ + 2 + 2 + 1 + {"a": 1, "b": 2} +(4 rows) + +select jsonb 'null' @* '{"a": 1}["a"]'; + ?column? +---------- + 1 +(1 row) + +select jsonb 'null' @* '{"a": 1}["b"]'; + ?column? +---------- +(0 rows) + diff --git a/src/test/regress/expected/jsonb_sqljson.out b/src/test/regress/expected/jsonb_sqljson.out new file mode 100644 index 0000000000..e792a7609b --- /dev/null +++ b/src/test/regress/expected/jsonb_sqljson.out @@ -0,0 +1,1799 @@ +-- JSON_EXISTS +SELECT JSON_EXISTS(NULL::jsonb, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_EXISTS(jsonb '[]', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(JSON_OBJECT(RETURNING jsonb), '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '1', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb 'null', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '[]', '$'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '1', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '1', 'strict $.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '1', 'strict $.a' ERROR ON ERROR); +ERROR: SQL/JSON member not found +SELECT JSON_EXISTS(jsonb 'null', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '[]', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'strict $.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'lax $.a'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '{}', '$.a'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '{"b": 1, "a": 2}', '$.a'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '1', '$.a.b'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": {"b": 1}}', '$.a.b'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.a.b'); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING 1 AS x); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING '1' AS x); + ?column? +---------- + f +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 2 AS y); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 1 AS y); + ?column? +---------- + f +(1 row) + +-- extension: boolean expressions +SELECT JSON_EXISTS(jsonb '1', '$ > 2'); + ?column? +---------- + t +(1 row) + +SELECT JSON_EXISTS(jsonb '1', '$.a > 2' ERROR ON ERROR); + ?column? +---------- + t +(1 row) + +-- JSON_VALUE +SELECT JSON_VALUE(NULL::jsonb, '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb 'null', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb 'null', '$' RETURNING int); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb 'true', '$'); + ?column? +---------- + true +(1 row) + +SELECT JSON_VALUE(jsonb 'true', '$' RETURNING bool); + ?column? +---------- + t +(1 row) + +SELECT JSON_VALUE(jsonb '123', '$'); + ?column? +---------- + 123 +(1 row) + +SELECT JSON_VALUE(jsonb '123', '$' RETURNING int) + 234; + ?column? +---------- + 357 +(1 row) + +SELECT JSON_VALUE(jsonb '123', '$' RETURNING text); + ?column? +---------- + 123 +(1 row) + +/* jsonb bytea ??? */ +SELECT JSON_VALUE(jsonb '123', '$' RETURNING bytea); + ?column? +---------- + \x313233 +(1 row) + +SELECT JSON_VALUE(jsonb '1.23', '$'); + ?column? +---------- + 1.23 +(1 row) + +SELECT JSON_VALUE(jsonb '1.23', '$' RETURNING int); + ?column? +---------- + 1 +(1 row) + +SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING numeric); + ?column? +---------- + 1.23 +(1 row) + +SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: "1.23" +SELECT JSON_VALUE(jsonb '"aaa"', '$'); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING text); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(5)); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(2)); + ?column? +---------- + aa +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json ERROR ON ERROR); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb ERROR ON ERROR); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING json); + ?column? +----------- + "\"aaa\"" +(1 row) + +SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING jsonb); + ?column? +----------- + "\"aaa\"" +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: "aaa" +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int DEFAULT 111 ON ERROR); + ?column? +---------- + 111 +(1 row) + +SELECT JSON_VALUE(jsonb '"123"', '$' RETURNING int) + 234; + ?column? +---------- + 357 +(1 row) + +SELECT JSON_VALUE(jsonb '"2017-02-20"', '$' RETURNING date) + 9; + ?column? +------------ + 03-01-2017 +(1 row) + +-- Test NULL checks execution in domain types +CREATE DOMAIN sqljsonb_int_not_null AS int NOT NULL; +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null); +ERROR: domain sqljsonb_int_not_null does not allow null values +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null NULL ON ERROR); +ERROR: domain sqljsonb_int_not_null does not allow null values +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null DEFAULT NULL ON ERROR); +ERROR: domain sqljsonb_int_not_null does not allow null values +SELECT JSON_VALUE(jsonb '[]', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '[]', '$' ERROR ON ERROR); +ERROR: SQL/JSON scalar required +SELECT JSON_VALUE(jsonb '{}', '$'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '{}', '$' ERROR ON ERROR); +ERROR: SQL/JSON scalar required +SELECT JSON_VALUE(jsonb '1', '$.a'); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'strict $.a' ERROR ON ERROR); +ERROR: SQL/JSON member not found +SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 'error' ON ERROR); + ?column? +---------- + error +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY ERROR ON ERROR); +ERROR: no SQL/JSON item +SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 2 ON ERROR); + ?column? +---------- + 2 +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT 2 ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' NULL ON EMPTY DEFAULT '2' ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON EMPTY DEFAULT '3' ON ERROR); + ?column? +---------- + 2 +(1 row) + +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY DEFAULT '3' ON ERROR); + ?column? +---------- + 3 +(1 row) + +SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' ERROR ON ERROR); +ERROR: more than one SQL/JSON item +SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' DEFAULT '0' ON ERROR); + ?column? +---------- + 0 +(1 row) + +SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int ERROR ON ERROR); +ERROR: invalid input syntax for integer: " " +SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + ?column? +---------- + 5 +(1 row) + +SELECT JSON_VALUE(jsonb '["1"]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + ?column? +---------- + 1 +(1 row) + +SELECT + x, + JSON_VALUE( + jsonb '{"a": 1, "b": 2}', + '$.* ? (@ > $x)' PASSING x AS x + RETURNING int + DEFAULT -1 ON EMPTY + DEFAULT -2 ON ERROR + ) y +FROM + generate_series(0, 2) x; + x | y +---+---- + 0 | -2 + 1 | 2 + 2 | -1 +(3 rows) + +SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a); + ?column? +---------- + (1,2) +(1 row) + +SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a RETURNING point); + ?column? +---------- + (1,2) +(1 row) + +-- JSON_QUERY +SELECT + JSON_QUERY(js, '$'), + JSON_QUERY(js, '$' WITHOUT WRAPPER), + JSON_QUERY(js, '$' WITH CONDITIONAL WRAPPER), + JSON_QUERY(js, '$' WITH UNCONDITIONAL ARRAY WRAPPER), + JSON_QUERY(js, '$' WITH ARRAY WRAPPER) +FROM + (VALUES + (jsonb 'null'), + ('12.3'), + ('true'), + ('"aaa"'), + ('[1, null, "2"]'), + ('{"a": 1, "b": [2]}') + ) foo(js); + ?column? | ?column? | ?column? | ?column? | ?column? +--------------------+--------------------+--------------------+----------------------+---------------------- + null | null | [null] | [null] | [null] + 12.3 | 12.3 | [12.3] | [12.3] | [12.3] + true | true | [true] | [true] | [true] + "aaa" | "aaa" | ["aaa"] | ["aaa"] | ["aaa"] + [1, null, "2"] | [1, null, "2"] | [1, null, "2"] | [[1, null, "2"]] | [[1, null, "2"]] + {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | [{"a": 1, "b": [2]}] | [{"a": 1, "b": [2]}] +(6 rows) + +SELECT + JSON_QUERY(js, 'strict $[*]') AS "unspec", + JSON_QUERY(js, 'strict $[*]' WITHOUT WRAPPER) AS "without", + JSON_QUERY(js, 'strict $[*]' WITH CONDITIONAL WRAPPER) AS "with cond", + JSON_QUERY(js, 'strict $[*]' WITH UNCONDITIONAL ARRAY WRAPPER) AS "with uncond", + JSON_QUERY(js, 'strict $[*]' WITH ARRAY WRAPPER) AS "with" +FROM + (VALUES + (jsonb '1'), + ('[]'), + ('[null]'), + ('[12.3]'), + ('[true]'), + ('["aaa"]'), + ('[[1, 2, 3]]'), + ('[{"a": 1, "b": [2]}]'), + ('[1, "2", null, [3]]') + ) foo(js); + unspec | without | with cond | with uncond | with +--------------------+--------------------+---------------------+----------------------+---------------------- + | | | | + | | | | + null | null | [null] | [null] | [null] + 12.3 | 12.3 | [12.3] | [12.3] | [12.3] + true | true | [true] | [true] | [true] + "aaa" | "aaa" | ["aaa"] | ["aaa"] | ["aaa"] + [1, 2, 3] | [1, 2, 3] | [1, 2, 3] | [[1, 2, 3]] | [[1, 2, 3]] + {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | {"a": 1, "b": [2]} | [{"a": 1, "b": [2]}] | [{"a": 1, "b": [2]}] + | | [1, "2", null, [3]] | [1, "2", null, [3]] | [1, "2", null, [3]] +(9 rows) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES ON SCALAR STRING); + ?column? +---------- + "aaa" +(1 row) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES ON SCALAR STRING); + ?column? +---------- + aaa +(1 row) + +SELECT JSON_QUERY(jsonb '"aaa"', '$' OMIT QUOTES ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: Token "aaa" is invalid. +CONTEXT: JSON data, line 1: aaa +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING json OMIT QUOTES ERROR ON ERROR); +ERROR: invalid input syntax for type json +DETAIL: Token "aaa" is invalid. +CONTEXT: JSON data, line 1: aaa +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING bytea FORMAT JSON OMIT QUOTES ERROR ON ERROR); + ?column? +---------- + \x616161 +(1 row) + +-- QUOTES behavior should not be specified when WITH WRAPPER used: +-- Should fail +SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER OMIT QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER OMIT QUOTES)... + ^ +SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER KEEP QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER KEEP QUOTES)... + ^ +SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: ...N_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTE... + ^ +SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTES); +ERROR: SQL/JSON QUOTES behavior shall not be specified when WITH WRAPPER is used +LINE 1: ...N_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTE... + ^ +-- Should succeed +SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER OMIT QUOTES); + ?column? +---------- + [1] +(1 row) + +SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER KEEP QUOTES); + ?column? +---------- + [1] +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]'); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' NULL ON EMPTY); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY ARRAY ON EMPTY); + ?column? +---------- + [] +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY OBJECT ON EMPTY); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY NULL ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY ARRAY ON ERROR); + ?column? +---------- + [] +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY ERROR ON ERROR); +ERROR: no SQL/JSON item +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON ERROR); + ?column? +---------- + +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' ERROR ON ERROR); +ERROR: more than one SQL/JSON item +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json FORMAT JSON); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb FORMAT JSON); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(10)); + ?column? +------------ + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(3)); + ?column? +---------- + [1, +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text FORMAT JSON); + ?column? +---------- + [1, 2] +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea); + ?column? +---------------- + \x5b312c20325d +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea FORMAT JSON); + ?column? +---------------- + \x5b312c20325d +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea EMPTY OBJECT ON ERROR); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea FORMAT JSON EMPTY OBJECT ON ERROR); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING json EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING jsonb EMPTY OBJECT ON ERROR); + ?column? +---------- + {} +(1 row) + +SELECT + x, y, + JSON_QUERY( + jsonb '[1,2,3,4,5,null]', + '$[*] ? (@ >= $x && @ <= $y)' + PASSING x AS x, y AS y + WITH CONDITIONAL WRAPPER + EMPTY ARRAY ON EMPTY + ) list +FROM + generate_series(0, 4) x, + generate_series(0, 4) y; + x | y | list +---+---+-------------- + 0 | 0 | [] + 0 | 1 | [1] + 0 | 2 | [1, 2] + 0 | 3 | [1, 2, 3] + 0 | 4 | [1, 2, 3, 4] + 1 | 0 | [] + 1 | 1 | [1] + 1 | 2 | [1, 2] + 1 | 3 | [1, 2, 3] + 1 | 4 | [1, 2, 3, 4] + 2 | 0 | [] + 2 | 1 | [] + 2 | 2 | [2] + 2 | 3 | [2, 3] + 2 | 4 | [2, 3, 4] + 3 | 0 | [] + 3 | 1 | [] + 3 | 2 | [] + 3 | 3 | [3] + 3 | 4 | [3, 4] + 4 | 0 | [] + 4 | 1 | [] + 4 | 2 | [] + 4 | 3 | [] + 4 | 4 | [4] +(25 rows) + +-- Conversion to record types +CREATE TYPE sqljsonb_rec AS (a int, t text, js json, jb jsonb, jsa json[]); +CREATE TYPE sqljsonb_reca AS (reca sqljsonb_rec[]); +SELECT JSON_QUERY(jsonb '[{"a": 1, "b": "foo", "t": "aaa", "js": [1, "2", {}], "jb": {"x": [1, "2", {}]}}, {"a": 2}]', '$[0]' RETURNING sqljsonb_rec); + ?column? +----------------------------------------------------- + (1,aaa,"[1, ""2"", {}]","{""x"": [1, ""2"", {}]}",) +(1 row) + +SELECT * FROM unnest((JSON_QUERY(jsonb '{"jsa": [{"a": 1, "b": ["foo"]}, {"a": 2, "c": {}}, 123]}', '$' RETURNING sqljsonb_rec)).jsa); + unnest +------------------------ + {"a": 1, "b": ["foo"]} + {"a": 2, "c": {}} + 123 +(3 rows) + +SELECT * FROM unnest((JSON_QUERY(jsonb '{"reca": [{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]}', '$' RETURNING sqljsonb_reca)).reca); + a | t | js | jb | jsa +---+-------------+----+------------+----- + 1 | ["foo", []] | | | + 2 | | | [{}, true] | +(2 rows) + +-- Conversion to array types +SELECT JSON_QUERY(jsonb '[1,2,null,"3"]', '$[*]' RETURNING int[] WITH WRAPPER); + ?column? +-------------- + {1,2,NULL,3} +(1 row) + +SELECT * FROM unnest(JSON_QUERY(jsonb '[{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]', '$' RETURNING sqljsonb_rec[])); + a | t | js | jb | jsa +---+-------------+----+------------+----- + 1 | ["foo", []] | | | + 2 | | | [{}, true] | +(2 rows) + +-- Conversion to domain types +SELECT JSON_QUERY(jsonb '{"a": 1}', '$.a' RETURNING sqljsonb_int_not_null); + ?column? +---------- + 1 +(1 row) + +SELECT JSON_QUERY(jsonb '{"a": 1}', '$.b' RETURNING sqljsonb_int_not_null); +ERROR: domain sqljsonb_int_not_null does not allow null values +-- Test constraints +CREATE TABLE test_jsonb_constraints ( + js text, + i int, + x jsonb DEFAULT JSON_QUERY(jsonb '[1,2]', '$[*]' WITH WRAPPER) + CONSTRAINT test_jsonb_constraint1 + CHECK (js IS JSON) + CONSTRAINT test_jsonb_constraint2 + CHECK (JSON_EXISTS(js::jsonb, '$.a' PASSING i + 5 AS int, i::text AS txt, array[1,2,3] as arr)) + CONSTRAINT test_jsonb_constraint3 + CHECK (JSON_VALUE(js::jsonb, '$.a' RETURNING int DEFAULT ('12' || i)::int ON EMPTY ERROR ON ERROR) > i) + CONSTRAINT test_jsonb_constraint4 + CHECK (JSON_QUERY(js::jsonb, '$.a' WITH CONDITIONAL WRAPPER EMPTY OBJECT ON ERROR) < jsonb '[10]') + CONSTRAINT test_jsonb_constraint5 + CHECK (JSON_QUERY(js::jsonb, '$.a' RETURNING char(5) OMIT QUOTES EMPTY ARRAY ON EMPTY) > 'a') +); +\d test_jsonb_constraints + Table "public.test_jsonb_constraints" + Column | Type | Collation | Nullable | Default +--------+---------+-----------+----------+------------------------------------------------------------------------------------------------------------ + js | text | | | + i | integer | | | + x | jsonb | | | JSON_QUERY('[1, 2]'::jsonb, '$[*]' RETURNING jsonb WITH UNCONDITIONAL WRAPPER NULL ON EMPTY NULL ON ERROR) +Check constraints: + "test_jsonb_constraint1" CHECK (pg_catalog.json_is_valid(js, 'any'::text, false)) + "test_jsonb_constraint2" CHECK (JSON_EXISTS(js::jsonb, '$."a"' PASSING i + 5 AS int, i::text AS txt, to_jsonb(ARRAY[1, 2, 3]) AS arr FALSE ON ERROR)) + "test_jsonb_constraint3" CHECK ((JSON_VALUE(js::jsonb, '$."a"' RETURNING integer DEFAULT ('12'::text || i)::integer ON EMPTY ERROR ON ERROR)) > i) + "test_jsonb_constraint4" CHECK ((JSON_QUERY(js::jsonb, '$."a"' RETURNING jsonb WITH CONDITIONAL WRAPPER NULL ON EMPTY EMPTY OBJECT ON ERROR)) < '[10]'::jsonb) + "test_jsonb_constraint5" CHECK ((JSON_QUERY(js::jsonb, '$."a"' RETURNING character(5) OMIT QUOTES EMPTY ARRAY ON EMPTY NULL ON ERROR)) > 'a'::bpchar) + +SELECT check_clause +FROM information_schema.check_constraints +WHERE constraint_name LIKE 'test_jsonb_constraint%'; + check_clause +----------------------------------------------------------------------------------------------------------------------------------- + (pg_catalog.json_is_valid(js, 'any'::text, false)) + (JSON_EXISTS((js)::jsonb, '$."a"' PASSING (i + 5) AS int, (i)::text AS txt, to_jsonb(ARRAY[1, 2, 3]) AS arr FALSE ON ERROR)) + ((JSON_VALUE((js)::jsonb, '$."a"' RETURNING integer DEFAULT (('12'::text || i))::integer ON EMPTY ERROR ON ERROR) > i)) + ((JSON_QUERY((js)::jsonb, '$."a"' RETURNING jsonb WITH CONDITIONAL WRAPPER NULL ON EMPTY EMPTY OBJECT ON ERROR) < '[10]'::jsonb)) + ((JSON_QUERY((js)::jsonb, '$."a"' RETURNING character(5) OMIT QUOTES EMPTY ARRAY ON EMPTY NULL ON ERROR) > 'a'::bpchar)) +(5 rows) + +SELECT adsrc FROM pg_attrdef WHERE adrelid = 'test_jsonb_constraints'::regclass; + adsrc +------------------------------------------------------------------------------------------------------------ + JSON_QUERY('[1, 2]'::jsonb, '$[*]' RETURNING jsonb WITH UNCONDITIONAL WRAPPER NULL ON EMPTY NULL ON ERROR) +(1 row) + +INSERT INTO test_jsonb_constraints VALUES ('', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint1" +DETAIL: Failing row contains (, 1, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('1', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint2" +DETAIL: Failing row contains (1, 1, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('[]'); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint2" +DETAIL: Failing row contains ([], null, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('{"b": 1}', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint2" +DETAIL: Failing row contains ({"b": 1}, 1, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('{"a": 1}', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint3" +DETAIL: Failing row contains ({"a": 1}, 1, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('{"a": 7}', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint5" +DETAIL: Failing row contains ({"a": 7}, 1, [1, 2]). +INSERT INTO test_jsonb_constraints VALUES ('{"a": 10}', 1); +ERROR: new row for relation "test_jsonb_constraints" violates check constraint "test_jsonb_constraint4" +DETAIL: Failing row contains ({"a": 10}, 1, [1, 2]). +DROP TABLE test_jsonb_constraints; +-- JSON_TABLE +-- Should fail (JSON_TABLE can be used only in FROM clause) +SELECT JSON_TABLE('[]', '$'); +ERROR: syntax error at or near "(" +LINE 1: SELECT JSON_TABLE('[]', '$'); + ^ +-- Should fail (no columns) +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); +ERROR: syntax error at or near ")" +LINE 1: SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); + ^ +-- NULL => empty table +SELECT * FROM JSON_TABLE(NULL::jsonb, '$' COLUMNS (foo int)) bar; + foo +----- +(0 rows) + +-- +SELECT * FROM JSON_TABLE(jsonb '123', '$' + COLUMNS (item int PATH '$', foo int)) bar; + item | foo +------+----- + 123 | +(1 row) + +-- JSON_TABLE: basic functionality +SELECT * +FROM + (VALUES + ('1'), + ('[]'), + ('{}'), + ('[1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""]') + ) vals(js) + LEFT OUTER JOIN +-- JSON_TABLE is implicitly lateral + JSON_TABLE( + vals.js::jsonb, 'lax $[*]' + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa' + ) + ) jt + ON true; + js | id | id2 | int | text | char(4) | bool | numeric | js | jb | jst | jsc | jsv | jsb | aaa | aaa1 +--------------------------------------------------------------------------------+----+-----+-----+---------+---------+------+---------+-----------+-----------+--------------+------+------+--------------+-----+------ + 1 | 1 | 1 | 1 | 1 | 1 | t | 1 | 1 | 1 | 1 | 1 | 1 | 1 | | + [] | | | | | | | | | | | | | | | + {} | 1 | 1 | | | | | | | | {} | {} | {} | {} | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 1 | 1 | 1 | 1 | 1 | t | 1 | 1 | 1 | 1 | 1 | 1 | 1 | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 2 | 2 | 1 | 1.23 | 1.23 | | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | 1.23 | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 3 | 3 | 2 | 2 | 2 | | 2 | "2" | "2" | "2" | "2" | "2" | "2" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 4 | 4 | | aaaaaaa | aaaa | | | "aaaaaaa" | "aaaaaaa" | "aaaaaaa" | "aaa | "aaa | "aaaaaaa" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 5 | 5 | | | | | | | | null | null | null | null | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 6 | 6 | 0 | false | fals | f | | false | false | false | fals | fals | false | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 7 | 7 | 1 | true | true | t | | true | true | true | true | true | true | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 8 | 8 | | | | | | | | {"aaa": 123} | {"aa | {"aa | {"aaa": 123} | 123 | 123 + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 9 | 9 | | [1,2] | [1,2 | | | "[1,2]" | "[1,2]" | "[1,2]" | "[1, | "[1, | "[1,2]" | | + [1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""] | 10 | 10 | | "str" | "str | | | "\"str\"" | "\"str\"" | "\"str\"" | "\"s | "\"s | "\"str\"" | | +(13 rows) + +-- JSON_TABLE: Test backward parsing +CREATE VIEW jsonb_table_view AS +SELECT * FROM + JSON_TABLE( + jsonb 'null', 'lax $[*]' PASSING 1 + 2 AS a, json '"foo"' AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa', + NESTED PATH '$[1]' AS p1 COLUMNS ( + a1 int, + NESTED PATH '$[*]' AS "p1 1" COLUMNS ( + a11 text + ), + b1 text + ), + NESTED PATH '$[2]' AS p2 COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" COLUMNS ( + a21 text + ), + NESTED PATH '$[*]' AS p22 COLUMNS ( + a22 text + ) + ) + ) + ); +\sv jsonb_table_view +CREATE OR REPLACE VIEW public.jsonb_table_view AS + SELECT "json_table".id, + "json_table".id2, + "json_table"."int", + "json_table".text, + "json_table"."char(4)", + "json_table".bool, + "json_table"."numeric", + "json_table".js, + "json_table".jb, + "json_table".jst, + "json_table".jsc, + "json_table".jsv, + "json_table".jsb, + "json_table".aaa, + "json_table".aaa1, + "json_table".a1, + "json_table".b1, + "json_table".a11, + "json_table".a21, + "json_table".a22 + FROM JSON_TABLE( + 'null'::jsonb, '$[*]' AS json_table_path_1 + PASSING + 1 + 2 AS a, + '"foo"'::json::jsonb AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, + "int" integer PATH '$', + text text PATH '$', + "char(4)" character(4) PATH '$', + bool boolean PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc character(4) FORMAT JSON PATH '$', + jsv character varying(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa integer PATH '$."aaa"', + aaa1 integer PATH '$."aaa"', + NESTED PATH '$[1]' AS p1 + COLUMNS ( + a1 integer PATH '$."a1"', + b1 text PATH '$."b1"', + NESTED PATH '$[*]' AS "p1 1" + COLUMNS ( + a11 text PATH '$."a11"' + ) + ), + NESTED PATH '$[2]' AS p2 + COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" + COLUMNS ( + a21 text PATH '$."a21"' + ), + NESTED PATH '$[*]' AS p22 + COLUMNS ( + a22 text PATH '$."a22"' + ) + ) + ) + PLAN (json_table_path_1 OUTER ((p1 OUTER "p1 1") UNION (p2 OUTER ("p2:1" UNION p22)))) + ) +EXPLAIN (COSTS OFF, VERBOSE) SELECT * FROM jsonb_table_view; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Table Function Scan on "json_table" + Output: "json_table".id, "json_table".id2, "json_table"."int", "json_table".text, "json_table"."char(4)", "json_table".bool, "json_table"."numeric", "json_table".js, "json_table".jb, "json_table".jst, "json_table".jsc, "json_table".jsv, "json_table".jsb, "json_table".aaa, "json_table".aaa1, "json_table".a1, "json_table".b1, "json_table".a11, "json_table".a21, "json_table".a22 + Table Function Call: JSON_TABLE('null'::jsonb, '$[*]' AS json_table_path_1 PASSING 3 AS a, '"foo"'::jsonb AS "b c" COLUMNS (id FOR ORDINALITY, id2 FOR ORDINALITY, "int" integer PATH '$', text text PATH '$', "char(4)" character(4) PATH '$', bool boolean PATH '$', "numeric" numeric PATH '$', js json PATH '$', jb jsonb PATH '$', jst text FORMAT JSON PATH '$', jsc character(4) FORMAT JSON PATH '$', jsv character varying(4) FORMAT JSON PATH '$', jsb jsonb FORMAT JSON PATH '$', aaa integer PATH '$."aaa"', aaa1 integer PATH '$."aaa"', NESTED PATH '$[1]' AS p1 COLUMNS (a1 integer PATH '$."a1"', b1 text PATH '$."b1"', NESTED PATH '$[*]' AS "p1 1" COLUMNS (a11 text PATH '$."a11"')), NESTED PATH '$[2]' AS p2 COLUMNS ( NESTED PATH '$[*]' AS "p2:1" COLUMNS (a21 text PATH '$."a21"'), NESTED PATH '$[*]' AS p22 COLUMNS (a22 text PATH '$."a22"'))) PLAN (json_table_path_1 OUTER ((p1 OUTER "p1 1") UNION (p2 OUTER ("p2:1" UNION p22))))) +(3 rows) + +-- JSON_TABLE: ON EMPTY/ON ERROR behavior +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js), + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$')) jt; + js | a +-------+--- + 1 | 1 + "err" | +(2 rows) + +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$') ERROR ON ERROR) jt + ON true; +ERROR: invalid input syntax for integer: "err" +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$' ERROR ON ERROR)) jt + ON true; +ERROR: invalid input syntax for integer: "err" +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH '$.a' ERROR ON EMPTY)) jt; + a +--- + +(1 row) + +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'strict $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +ERROR: SQL/JSON member not found +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'lax $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +ERROR: no SQL/JSON item +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH '$' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 2 +(1 row) + +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'strict $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 2 +(1 row) + +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'lax $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + a +--- + 1 +(1 row) + +-- JSON_TABLE: nested paths and plans +-- Should fail (JSON_TABLE columns shall contain explicit AS path +-- specifications if explicit PLAN clause is used) +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' -- AS required here + COLUMNS ( + foo int PATH '$' + ) + PLAN DEFAULT (UNION) +) jt; +ERROR: invalid JSON_TABLE expression +LINE 2: jsonb '[]', '$' + ^ +DETAIL: JSON_TABLE columns shall contain explicit AS pathname specification if explicit PLAN clause is used +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS path1 + COLUMNS ( + NESTED PATH '$' COLUMNS ( -- AS required here + foo int PATH '$' + ) + ) + PLAN DEFAULT (UNION) +) jt; +ERROR: invalid JSON_TABLE expression +LINE 4: NESTED PATH '$' COLUMNS ( + ^ +DETAIL: JSON_TABLE columns shall contain explicit AS pathname specification if explicit PLAN clause is used +-- Should fail (column names anf path names shall be distinct) +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS a + COLUMNS ( + a int + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS a + COLUMNS ( + b int, + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' + COLUMNS ( + b int, + NESTED PATH '$' AS b + COLUMNS ( + c int + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: b +HINT: JSON_TABLE path names and column names shall be distinct from one another +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + b int + ), + NESTED PATH '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) + ) +) jt; +ERROR: duplicate JSON_TABLE column name: a +HINT: JSON_TABLE path names and column names shall be distinct from one another +-- JSON_TABLE: plan validation +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p1) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 12: PLAN (p1) + ^ +DETAIL: path name mismatch: expected p0 but p1 is given +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 4: NESTED PATH '$' AS p1 COLUMNS ( + ^ +DETAIL: plan node for nested path p1 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER p3) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 4: NESTED PATH '$' AS p1 COLUMNS ( + ^ +DETAIL: plan node for nested path p1 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p13)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 8: NESTED PATH '$' AS p2 COLUMNS ( + ^ +DETAIL: plan node for nested path p2 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 5: NESTED PATH '$' AS p11 COLUMNS ( foo int ), + ^ +DETAIL: plan node for nested path p11 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 12: PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) + ^ +DETAIL: plan node contains some extra or duplicate sibling nodes +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER p11) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 6: NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ^ +DETAIL: plan node for nested path p12 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS p2)) +) jt; +ERROR: invalid JSON_TABLE plan +LINE 9: NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ^ +DETAIL: plan node for nested path p21 was not found in plan +SELECT * FROM JSON_TABLE( + jsonb 'null', 'strict $[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21))) +) jt; + bar | foo | baz +-----+-----+----- +(0 rows) + +SELECT * FROM JSON_TABLE( + jsonb 'null', 'strict $[*]' -- without root path name + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21)) +) jt; + bar | foo | baz +-----+-----+----- +(0 rows) + +-- JSON_TABLE: plan execution +CREATE TEMP TABLE jsonb_table_test (js jsonb); +INSERT INTO jsonb_table_test +VALUES ( + '[ + {"a": 1, "b": [], "c": []}, + {"a": 2, "b": [1, 2, 3], "c": [10, null, 20]}, + {"a": 3, "b": [1, 2], "c": []}, + {"x": "4", "b": [1, 2], "c": 123} + ]' +); +-- unspecified plan (outer, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- default plan (outer, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, union) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- specific plan (p outer (pb union pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb union pc)) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(11 rows) + +-- specific plan (p outer (pc union pb)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pc union pb)) + ) jt; + n | a | c | b +---+----+----+--- + 1 | 1 | | + 2 | 2 | 10 | + 2 | 2 | | + 2 | 2 | 20 | + 2 | 2 | | 1 + 2 | 2 | | 2 + 2 | 2 | | 3 + 3 | 3 | | 1 + 3 | 3 | | 2 + 4 | -1 | | 1 + 4 | -1 | | 2 +(11 rows) + +-- default plan (inner, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (inner) + ) jt; + n | a | b | c +---+----+---+---- + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(10 rows) + +-- specific plan (p inner (pb union pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb union pc)) + ) jt; + n | a | b | c +---+----+---+---- + 2 | 2 | 1 | + 2 | 2 | 2 | + 2 | 2 | 3 | + 2 | 2 | | 10 + 2 | 2 | | + 2 | 2 | | 20 + 3 | 3 | 1 | + 3 | 3 | 2 | + 4 | -1 | 1 | + 4 | -1 | 2 | +(10 rows) + +-- default plan (inner, cross) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (cross, inner) + ) jt; + n | a | b | c +---+---+---+---- + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 +(9 rows) + +-- specific plan (p inner (pb cross pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb cross pc)) + ) jt; + n | a | b | c +---+---+---+---- + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 +(9 rows) + +-- default plan (outer, cross) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, cross) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 + 3 | 3 | | + 4 | -1 | | +(12 rows) + +-- specific plan (p outer (pb cross pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb cross pc)) + ) jt; + n | a | b | c +---+----+---+---- + 1 | 1 | | + 2 | 2 | 1 | 10 + 2 | 2 | 1 | + 2 | 2 | 1 | 20 + 2 | 2 | 2 | 10 + 2 | 2 | 2 | + 2 | 2 | 2 | 20 + 2 | 2 | 3 | 10 + 2 | 2 | 3 | + 2 | 2 | 3 | 20 + 3 | 3 | | + 4 | -1 | | +(12 rows) + +select + jt.*, b1 + 100 as b +from + json_table (jsonb + '[ + {"a": 1, "b": [[1, 10], [2], [3, 30, 300]], "c": [1, null, 2]}, + {"a": 2, "b": [10, 20], "c": [1, null, 2]}, + {"x": "3", "b": [11, 22, 33, 44]} + ]', + '$[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on error, + nested path 'strict $.b[*]' as pb columns ( + b text format json path '$', + nested path 'strict $[*]' as pb1 columns ( + b1 int path '$' + ) + ), + nested path 'strict $.c[*]' as pc columns ( + c text format json path '$', + nested path 'strict $[*]' as pc1 columns ( + c1 int path '$' + ) + ) + ) + --plan default(outer, cross) + plan(p outer ((pb inner pb1) cross (pc outer pc1))) + ) jt; + n | a | b | b1 | c | c1 | b +---+---+--------------+-----+------+----+----- + 1 | 1 | [1, 10] | 1 | 1 | | 101 + 1 | 1 | [1, 10] | 1 | null | | 101 + 1 | 1 | [1, 10] | 1 | 2 | | 101 + 1 | 1 | [1, 10] | 10 | 1 | | 110 + 1 | 1 | [1, 10] | 10 | null | | 110 + 1 | 1 | [1, 10] | 10 | 2 | | 110 + 1 | 1 | [2] | 2 | 1 | | 102 + 1 | 1 | [2] | 2 | null | | 102 + 1 | 1 | [2] | 2 | 2 | | 102 + 1 | 1 | [3, 30, 300] | 3 | 1 | | 103 + 1 | 1 | [3, 30, 300] | 3 | null | | 103 + 1 | 1 | [3, 30, 300] | 3 | 2 | | 103 + 1 | 1 | [3, 30, 300] | 30 | 1 | | 130 + 1 | 1 | [3, 30, 300] | 30 | null | | 130 + 1 | 1 | [3, 30, 300] | 30 | 2 | | 130 + 1 | 1 | [3, 30, 300] | 300 | 1 | | 400 + 1 | 1 | [3, 30, 300] | 300 | null | | 400 + 1 | 1 | [3, 30, 300] | 300 | 2 | | 400 + 2 | 2 | | | | | + 3 | | | | | | +(20 rows) + +-- Should succeed (JSON arguments are passed to root and nested paths) +SELECT * +FROM + generate_series(1, 4) x, + generate_series(1, 3) y, + JSON_TABLE(jsonb + '[[1,2,3],[2,3,4,5],[3,4,5,6]]', + 'strict $[*] ? (@.[*] < $x)' + PASSING x AS x, y AS y + COLUMNS ( + y text FORMAT JSON PATH '$', + NESTED PATH 'strict $[*] ? (@ >= $y)' + COLUMNS ( + z int PATH '$' + ) + ) + ) jt; + x | y | y | z +---+---+--------------+--- + 2 | 1 | [1, 2, 3] | 1 + 2 | 1 | [1, 2, 3] | 2 + 2 | 1 | [1, 2, 3] | 3 + 3 | 1 | [1, 2, 3] | 1 + 3 | 1 | [1, 2, 3] | 2 + 3 | 1 | [1, 2, 3] | 3 + 3 | 1 | [2, 3, 4, 5] | 2 + 3 | 1 | [2, 3, 4, 5] | 3 + 3 | 1 | [2, 3, 4, 5] | 4 + 3 | 1 | [2, 3, 4, 5] | 5 + 4 | 1 | [1, 2, 3] | 1 + 4 | 1 | [1, 2, 3] | 2 + 4 | 1 | [1, 2, 3] | 3 + 4 | 1 | [2, 3, 4, 5] | 2 + 4 | 1 | [2, 3, 4, 5] | 3 + 4 | 1 | [2, 3, 4, 5] | 4 + 4 | 1 | [2, 3, 4, 5] | 5 + 4 | 1 | [3, 4, 5, 6] | 3 + 4 | 1 | [3, 4, 5, 6] | 4 + 4 | 1 | [3, 4, 5, 6] | 5 + 4 | 1 | [3, 4, 5, 6] | 6 + 2 | 2 | [1, 2, 3] | 2 + 2 | 2 | [1, 2, 3] | 3 + 3 | 2 | [1, 2, 3] | 2 + 3 | 2 | [1, 2, 3] | 3 + 3 | 2 | [2, 3, 4, 5] | 2 + 3 | 2 | [2, 3, 4, 5] | 3 + 3 | 2 | [2, 3, 4, 5] | 4 + 3 | 2 | [2, 3, 4, 5] | 5 + 4 | 2 | [1, 2, 3] | 2 + 4 | 2 | [1, 2, 3] | 3 + 4 | 2 | [2, 3, 4, 5] | 2 + 4 | 2 | [2, 3, 4, 5] | 3 + 4 | 2 | [2, 3, 4, 5] | 4 + 4 | 2 | [2, 3, 4, 5] | 5 + 4 | 2 | [3, 4, 5, 6] | 3 + 4 | 2 | [3, 4, 5, 6] | 4 + 4 | 2 | [3, 4, 5, 6] | 5 + 4 | 2 | [3, 4, 5, 6] | 6 + 2 | 3 | [1, 2, 3] | 3 + 3 | 3 | [1, 2, 3] | 3 + 3 | 3 | [2, 3, 4, 5] | 3 + 3 | 3 | [2, 3, 4, 5] | 4 + 3 | 3 | [2, 3, 4, 5] | 5 + 4 | 3 | [1, 2, 3] | 3 + 4 | 3 | [2, 3, 4, 5] | 3 + 4 | 3 | [2, 3, 4, 5] | 4 + 4 | 3 | [2, 3, 4, 5] | 5 + 4 | 3 | [3, 4, 5, 6] | 3 + 4 | 3 | [3, 4, 5, 6] | 4 + 4 | 3 | [3, 4, 5, 6] | 5 + 4 | 3 | [3, 4, 5, 6] | 6 +(52 rows) + +-- Should fail (JSON arguments are not passed to column paths) +SELECT * +FROM JSON_TABLE( + jsonb '[1,2,3]', + '$[*] ? (@ < $x)' + PASSING 10 AS x + COLUMNS (y text FORMAT JSON PATH '$ ? (@ < $x)') + ) jt; +ERROR: could not find 'x' passed variable diff --git a/src/test/regress/expected/jsonpath.out b/src/test/regress/expected/jsonpath.out new file mode 100644 index 0000000000..1d313d0d28 --- /dev/null +++ b/src/test/regress/expected/jsonpath.out @@ -0,0 +1,886 @@ +--jsonpath io +select ''::jsonpath; +ERROR: invalid input syntax for jsonpath: "" +LINE 1: select ''::jsonpath; + ^ +select '$'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select 'strict $'::jsonpath; + jsonpath +---------- + strict $ +(1 row) + +select 'lax $'::jsonpath; + jsonpath +---------- + $ +(1 row) + +select '$.a'::jsonpath; + jsonpath +---------- + $."a" +(1 row) + +select '$.a.v'::jsonpath; + jsonpath +----------- + $."a"."v" +(1 row) + +select '$.a.*'::jsonpath; + jsonpath +---------- + $."a".* +(1 row) + +select '$.*.[*]'::jsonpath; + jsonpath +---------- + $.*[*] +(1 row) + +select '$.*[*]'::jsonpath; + jsonpath +---------- + $.*[*] +(1 row) + +select '$.a.[*]'::jsonpath; + jsonpath +---------- + $."a"[*] +(1 row) + +select '$.a[*]'::jsonpath; + jsonpath +---------- + $."a"[*] +(1 row) + +select '$.a.[*][*]'::jsonpath; + jsonpath +------------- + $."a"[*][*] +(1 row) + +select '$.a.[*].[*]'::jsonpath; + jsonpath +------------- + $."a"[*][*] +(1 row) + +select '$.a[*][*]'::jsonpath; + jsonpath +------------- + $."a"[*][*] +(1 row) + +select '$.a[*].[*]'::jsonpath; + jsonpath +------------- + $."a"[*][*] +(1 row) + +select '$[*]'::jsonpath; + jsonpath +---------- + $[*] +(1 row) + +select '$[0]'::jsonpath; + jsonpath +---------- + $[0] +(1 row) + +select '$[*][0]'::jsonpath; + jsonpath +---------- + $[*][0] +(1 row) + +select '$[*].a'::jsonpath; + jsonpath +---------- + $[*]."a" +(1 row) + +select '$[*][0].a.b'::jsonpath; + jsonpath +----------------- + $[*][0]."a"."b" +(1 row) + +select '$.a.**.b'::jsonpath; + jsonpath +-------------- + $."a".**."b" +(1 row) + +select '$.a.**{2}.b'::jsonpath; + jsonpath +----------------- + $."a".**{2}."b" +(1 row) + +select '$.a.**{2,2}.b'::jsonpath; + jsonpath +----------------- + $."a".**{2}."b" +(1 row) + +select '$.a.**{2,5}.b'::jsonpath; + jsonpath +------------------- + $."a".**{2,5}."b" +(1 row) + +select '$.a.**{,5}.b'::jsonpath; + jsonpath +------------------ + $."a".**{,5}."b" +(1 row) + +select '$.a.**{5,}.b'::jsonpath; + jsonpath +------------------ + $."a".**{5,}."b" +(1 row) + +select '$+1'::jsonpath; + jsonpath +---------- + ($ + 1) +(1 row) + +select '$-1'::jsonpath; + jsonpath +---------- + ($ - 1) +(1 row) + +select '$--+1'::jsonpath; + jsonpath +---------- + ($ - -1) +(1 row) + +select '$.a/+-1'::jsonpath; + jsonpath +-------------- + ($."a" / -1) +(1 row) + +select '$.g ? ($.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?($."a" == 1) +(1 row) + +select '$.g ? (@ == 1)'::jsonpath; + jsonpath +---------------- + $."g"?(@ == 1) +(1 row) + +select '$.g ? (a == 1)'::jsonpath; + jsonpath +------------------ + $."g"?("a" == 1) +(1 row) + +select '$.g ? (.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?(@."a" == 1) +(1 row) + +select '$.g ? (@.a == 1)'::jsonpath; + jsonpath +-------------------- + $."g"?(@."a" == 1) +(1 row) + +select '$.g ? (@.a == 1 || a == 4)'::jsonpath; + jsonpath +-------------------------------- + $."g"?(@."a" == 1 || "a" == 4) +(1 row) + +select '$.g ? (@.a == 1 && a == 4)'::jsonpath; + jsonpath +-------------------------------- + $."g"?(@."a" == 1 && "a" == 4) +(1 row) + +select '$.g ? (@.a == 1 || a == 4 && b == 7)'::jsonpath; + jsonpath +-------------------------------------------- + $."g"?(@."a" == 1 || "a" == 4 && "b" == 7) +(1 row) + +select '$.g ? (@.a == 1 || !(a == 4) && b == 7)'::jsonpath; + jsonpath +----------------------------------------------- + $."g"?(@."a" == 1 || !("a" == 4) && "b" == 7) +(1 row) + +select '$.g ? (@.a == 1 || !(x >= 123 || a == 4) && b == 7)'::jsonpath; + jsonpath +------------------------------------------------------------- + $."g"?(@."a" == 1 || !("x" >= 123 || "a" == 4) && "b" == 7) +(1 row) + +select '$.g ? (.x >= @[*]?(@.a > "abc"))'::jsonpath; + jsonpath +--------------------------------------- + $."g"?(@."x" >= @[*]?(@."a" > "abc")) +(1 row) + +select '$.g ? ((x >= 123 || a == 4) is unknown)'::jsonpath; + jsonpath +--------------------------------------------- + $."g"?(("x" >= 123 || "a" == 4) is unknown) +(1 row) + +select '$.g ? (exists (.x))'::jsonpath; + jsonpath +------------------------ + $."g"?(exists (@."x")) +(1 row) + +select '$.g ? (exists (@.x ? (@ == 14)))'::jsonpath; + jsonpath +---------------------------------- + $."g"?(exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? (exists (.x ? (@ == 14)))'::jsonpath; + jsonpath +---------------------------------- + $."g"?(exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? ((x >= 123 || a == 4) && exists (.x ? (@ == 14)))'::jsonpath; + jsonpath +-------------------------------------------------------------- + $."g"?(("x" >= 123 || "a" == 4) && exists (@."x"?(@ == 14))) +(1 row) + +select '$.g ? (+x >= +-(+a + 2))'::jsonpath; + jsonpath +-------------------------------- + $."g"?(+"x" >= +(-(+"a" + 2))) +(1 row) + +select '$a'::jsonpath; + jsonpath +---------- + $"a" +(1 row) + +select '$a.b'::jsonpath; + jsonpath +---------- + $"a"."b" +(1 row) + +select '$a[*]'::jsonpath; + jsonpath +---------- + $"a"[*] +(1 row) + +select '$.g ? (zip == $zip)'::jsonpath; + jsonpath +------------------------- + $."g"?("zip" == $"zip") +(1 row) + +select '$.a.[1,2, 3 to 16]'::jsonpath; + jsonpath +-------------------- + $."a"[1,2,3 to 16] +(1 row) + +select '$.a[1,2, 3 to 16]'::jsonpath; + jsonpath +-------------------- + $."a"[1,2,3 to 16] +(1 row) + +select '$.a[$a + 1, ($b[*]) to -(@[0] * 2)]'::jsonpath; + jsonpath +---------------------------------------- + $."a"[$"a" + 1,$"b"[*] to -(@[0] * 2)] +(1 row) + +select '$.a[$.a.size() - 3]'::jsonpath; + jsonpath +------------------------- + $."a"[$."a".size() - 3] +(1 row) + +select 'last'::jsonpath; +ERROR: LAST is allowed only in array subscripts +LINE 1: select 'last'::jsonpath; + ^ +select '"last"'::jsonpath; + jsonpath +---------- + "last" +(1 row) + +select '$.last'::jsonpath; + jsonpath +---------- + $."last" +(1 row) + +select '$ ? (last > 0)'::jsonpath; +ERROR: LAST is allowed only in array subscripts +LINE 1: select '$ ? (last > 0)'::jsonpath; + ^ +select '$[last]'::jsonpath; + jsonpath +---------- + $[last] +(1 row) + +select '$[@ ? (last > 0)]'::jsonpath; + jsonpath +----------------- + $[@?(last > 0)] +(1 row) + +select 'null.type()'::jsonpath; + jsonpath +------------- + null.type() +(1 row) + +select '1.type()'::jsonpath; + jsonpath +---------- + 1.type() +(1 row) + +select '"aaa".type()'::jsonpath; + jsonpath +-------------- + "aaa".type() +(1 row) + +select 'aaa.type()'::jsonpath; + jsonpath +-------------- + "aaa".type() +(1 row) + +select 'true.type()'::jsonpath; + jsonpath +------------- + true.type() +(1 row) + +select '$.datetime()'::jsonpath; + jsonpath +-------------- + $.datetime() +(1 row) + +select '$.datetime("datetime template")'::jsonpath; + jsonpath +--------------------------------- + $.datetime("datetime template") +(1 row) + +select '$.reduce($1 + $2 + @[1])'::jsonpath; + jsonpath +-------------------------------- + $.reduce(($"1" + $"2") + @[1]) +(1 row) + +select '$.fold($1 + $2 + @[1], 2 + 3)'::jsonpath; + jsonpath +------------------------------------- + $.fold(($"1" + $"2") + @[1], 2 + 3) +(1 row) + +select '$.min().abs() + 5'::jsonpath; + jsonpath +--------------------- + ($.min().abs() + 5) +(1 row) + +select '$.max().floor()'::jsonpath; + jsonpath +----------------- + $.max().floor() +(1 row) + +select '$ ? (@ starts with "abc")'::jsonpath; + jsonpath +------------------------- + $?(@ starts with "abc") +(1 row) + +select '$ ? (@ starts with $var)'::jsonpath; + jsonpath +-------------------------- + $?(@ starts with $"var") +(1 row) + +select '$ ? (@ like_regex "pattern")'::jsonpath; + jsonpath +---------------------------- + $?(@ like_regex "pattern") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "")'::jsonpath; + jsonpath +---------------------------- + $?(@ like_regex "pattern") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "i")'::jsonpath; + jsonpath +------------------------------------- + $?(@ like_regex "pattern" flag "i") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "is")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "is") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "isim")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "im") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "xsms")'::jsonpath; + jsonpath +-------------------------------------- + $?(@ like_regex "pattern" flag "sx") +(1 row) + +select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; +ERROR: bad jsonpath representation +LINE 1: select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; + ^ +DETAIL: unrecognized flag of LIKE_REGEX predicate at or near """ +select '$ < 1'::jsonpath; + jsonpath +---------- + ($ < 1) +(1 row) + +select '($ < 1) || $.a.b <= $x'::jsonpath; + jsonpath +------------------------------ + ($ < 1 || $."a"."b" <= $"x") +(1 row) + +select '@ + 1'::jsonpath; +ERROR: @ is not allowed in root expressions +LINE 1: select '@ + 1'::jsonpath; + ^ +select '($).a.b'::jsonpath; + jsonpath +----------- + $."a"."b" +(1 row) + +select '($.a.b).c.d'::jsonpath; + jsonpath +------------------- + $."a"."b"."c"."d" +(1 row) + +select '($.a.b + -$.x.y).c.d'::jsonpath; + jsonpath +---------------------------------- + ($."a"."b" + -$."x"."y")."c"."d" +(1 row) + +select '(-+$.a.b).c.d'::jsonpath; + jsonpath +------------------------- + (-(+$."a"."b"))."c"."d" +(1 row) + +select '1 + ($.a.b + 2).c.d'::jsonpath; + jsonpath +------------------------------- + (1 + ($."a"."b" + 2)."c"."d") +(1 row) + +select '1 + ($.a.b > 2).c.d'::jsonpath; + jsonpath +------------------------------- + (1 + ($."a"."b" > 2)."c"."d") +(1 row) + +select '1, 2 + 3, $.a[*] + 5'::jsonpath; + jsonpath +------------------------ + 1, 2 + 3, $."a"[*] + 5 +(1 row) + +select '(1, 2, $.a)'::jsonpath; + jsonpath +------------- + 1, 2, $."a" +(1 row) + +select '(1, 2, $.a).a[*]'::jsonpath; + jsonpath +---------------------- + (1, 2, $."a")."a"[*] +(1 row) + +select '(1, 2, $.a) == 5'::jsonpath; + jsonpath +---------------------- + ((1, 2, $."a") == 5) +(1 row) + +select '$[(1, 2, $.a) to (3, 4)]'::jsonpath; + jsonpath +---------------------------- + $[(1, 2, $."a") to (3, 4)] +(1 row) + +select '$[(1, (2, $.a)), 3, (4, 5)]'::jsonpath; + jsonpath +----------------------------- + $[(1, (2, $."a")),3,(4, 5)] +(1 row) + +select '[]'::jsonpath; + jsonpath +---------- + [] +(1 row) + +select '[[1, 2], ([(3, 4, 5), 6], []), $.a[*]]'::jsonpath; + jsonpath +------------------------------------------ + [[1, 2], ([(3, 4, 5), 6], []), $."a"[*]] +(1 row) + +select '{}'::jsonpath; + jsonpath +---------- + {} +(1 row) + +select '{a: 1 + 2}'::jsonpath; + jsonpath +-------------- + {"a": 1 + 2} +(1 row) + +select '{a: 1 + 2, b : (1,2), c: [$[*],4,5], d: { "e e e": "f f f" }}'::jsonpath; + jsonpath +----------------------------------------------------------------------- + {"a": 1 + 2, "b": (1, 2), "c": [$[*], 4, 5], "d": {"e e e": "f f f"}} +(1 row) + +select '$ ? (a < 1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < -1)'::jsonpath; + jsonpath +-------------- + $?("a" < -1) +(1 row) + +select '$ ? (a < +1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < .1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < -.1)'::jsonpath; + jsonpath +---------------- + $?("a" < -0.1) +(1 row) + +select '$ ? (a < +.1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < 0.1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < -0.1)'::jsonpath; + jsonpath +---------------- + $?("a" < -0.1) +(1 row) + +select '$ ? (a < +0.1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < 10.1)'::jsonpath; + jsonpath +---------------- + $?("a" < 10.1) +(1 row) + +select '$ ? (a < -10.1)'::jsonpath; + jsonpath +----------------- + $?("a" < -10.1) +(1 row) + +select '$ ? (a < +10.1)'::jsonpath; + jsonpath +---------------- + $?("a" < 10.1) +(1 row) + +select '$ ? (a < 1e1)'::jsonpath; + jsonpath +-------------- + $?("a" < 10) +(1 row) + +select '$ ? (a < -1e1)'::jsonpath; + jsonpath +--------------- + $?("a" < -10) +(1 row) + +select '$ ? (a < +1e1)'::jsonpath; + jsonpath +-------------- + $?("a" < 10) +(1 row) + +select '$ ? (a < .1e1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < -.1e1)'::jsonpath; + jsonpath +-------------- + $?("a" < -1) +(1 row) + +select '$ ? (a < +.1e1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < 0.1e1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < -0.1e1)'::jsonpath; + jsonpath +-------------- + $?("a" < -1) +(1 row) + +select '$ ? (a < +0.1e1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < 10.1e1)'::jsonpath; + jsonpath +--------------- + $?("a" < 101) +(1 row) + +select '$ ? (a < -10.1e1)'::jsonpath; + jsonpath +---------------- + $?("a" < -101) +(1 row) + +select '$ ? (a < +10.1e1)'::jsonpath; + jsonpath +--------------- + $?("a" < 101) +(1 row) + +select '$ ? (a < 1e-1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < -1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < -0.1) +(1 row) + +select '$ ? (a < +1e-1)'::jsonpath; + jsonpath +--------------- + $?("a" < 0.1) +(1 row) + +select '$ ? (a < .1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 0.01) +(1 row) + +select '$ ? (a < -.1e-1)'::jsonpath; + jsonpath +----------------- + $?("a" < -0.01) +(1 row) + +select '$ ? (a < +.1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 0.01) +(1 row) + +select '$ ? (a < 0.1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 0.01) +(1 row) + +select '$ ? (a < -0.1e-1)'::jsonpath; + jsonpath +----------------- + $?("a" < -0.01) +(1 row) + +select '$ ? (a < +0.1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 0.01) +(1 row) + +select '$ ? (a < 10.1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 1.01) +(1 row) + +select '$ ? (a < -10.1e-1)'::jsonpath; + jsonpath +----------------- + $?("a" < -1.01) +(1 row) + +select '$ ? (a < +10.1e-1)'::jsonpath; + jsonpath +---------------- + $?("a" < 1.01) +(1 row) + +select '$ ? (a < 1e+1)'::jsonpath; + jsonpath +-------------- + $?("a" < 10) +(1 row) + +select '$ ? (a < -1e+1)'::jsonpath; + jsonpath +--------------- + $?("a" < -10) +(1 row) + +select '$ ? (a < +1e+1)'::jsonpath; + jsonpath +-------------- + $?("a" < 10) +(1 row) + +select '$ ? (a < .1e+1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < -.1e+1)'::jsonpath; + jsonpath +-------------- + $?("a" < -1) +(1 row) + +select '$ ? (a < +.1e+1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < 0.1e+1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < -0.1e+1)'::jsonpath; + jsonpath +-------------- + $?("a" < -1) +(1 row) + +select '$ ? (a < +0.1e+1)'::jsonpath; + jsonpath +------------- + $?("a" < 1) +(1 row) + +select '$ ? (a < 10.1e+1)'::jsonpath; + jsonpath +--------------- + $?("a" < 101) +(1 row) + +select '$ ? (a < -10.1e+1)'::jsonpath; + jsonpath +---------------- + $?("a" < -101) +(1 row) + +select '$ ? (a < +10.1e+1)'::jsonpath; + jsonpath +--------------- + $?("a" < 101) +(1 row) + diff --git a/src/test/regress/expected/opr_sanity.out b/src/test/regress/expected/opr_sanity.out index 684f7f20a8..cc82b6a64e 100644 --- a/src/test/regress/expected/opr_sanity.out +++ b/src/test/regress/expected/opr_sanity.out @@ -205,11 +205,12 @@ WHERE p1.oid != p2.oid AND ORDER BY 1, 2; proargtypes | proargtypes -------------+------------- + 25 | 114 25 | 1042 25 | 1043 1114 | 1184 1560 | 1562 -(4 rows) +(5 rows) SELECT DISTINCT p1.proargtypes[1], p2.proargtypes[1] FROM pg_proc AS p1, pg_proc AS p2 @@ -1817,6 +1818,8 @@ ORDER BY 1, 2, 3; 2742 | 9 | ? 2742 | 10 | ?| 2742 | 11 | ?& + 2742 | 15 | @? + 2742 | 16 | @~ 3580 | 1 | < 3580 | 1 | << 3580 | 2 | &< @@ -1880,7 +1883,7 @@ ORDER BY 1, 2, 3; 4000 | 25 | <<= 4000 | 26 | >> 4000 | 27 | >>= -(121 rows) +(123 rows) -- Check that all opclass search operators have selectivity estimators. -- This is not absolutely required, but it seems a reasonable thing diff --git a/src/test/regress/expected/sqljson.out b/src/test/regress/expected/sqljson.out new file mode 100644 index 0000000000..167bfe4b51 --- /dev/null +++ b/src/test/regress/expected/sqljson.out @@ -0,0 +1,796 @@ +-- JSON_OBJECT() +SELECT JSON_OBJECT(); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING json); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING json FORMAT JSON); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING jsonb); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING jsonb FORMAT JSON); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text FORMAT JSON); + ?column? +---------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8); +ERROR: cannot set JSON encoding for non-bytea output types +LINE 1: SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8)... + ^ +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +ERROR: unrecognized JSON encoding: invalid_encoding +SELECT JSON_OBJECT(RETURNING bytea); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF8); + ?column? +---------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF16); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF1... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF32); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF3... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); +ERROR: cannot use non-string types with explicit FORMAT JSON clause +LINE 1: SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); + ^ +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF... + ^ +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON); +WARNING: FORMAT JSON has no effect for json and jsonb types + ?column? +---------------- + {"foo" : null} +(1 row) + +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UT... + ^ +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON); +WARNING: FORMAT JSON has no effect for json and jsonb types + ?column? +---------------- + {"foo" : null} +(1 row) + +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING U... + ^ +SELECT JSON_OBJECT(NULL: 1); +ERROR: argument 3 cannot be null +HINT: Object keys should be text. +SELECT JSON_OBJECT('a': 2 + 3); + ?column? +----------- + {"a" : 5} +(1 row) + +SELECT JSON_OBJECT('a' VALUE 2 + 3); + ?column? +----------- + {"a" : 5} +(1 row) + +--SELECT JSON_OBJECT(KEY 'a' VALUE 2 + 3); +SELECT JSON_OBJECT('a' || 2: 1); + ?column? +------------ + {"a2" : 1} +(1 row) + +SELECT JSON_OBJECT(('a' || 2) VALUE 1); + ?column? +------------ + {"a2" : 1} +(1 row) + +--SELECT JSON_OBJECT('a' || 2 VALUE 1); +--SELECT JSON_OBJECT(KEY 'a' || 2 VALUE 1); +SELECT JSON_OBJECT('a': 2::text); + ?column? +------------- + {"a" : "2"} +(1 row) + +SELECT JSON_OBJECT('a' VALUE 2::text); + ?column? +------------- + {"a" : "2"} +(1 row) + +--SELECT JSON_OBJECT(KEY 'a' VALUE 2::text); +SELECT JSON_OBJECT(1::text: 2); + ?column? +----------- + {"1" : 2} +(1 row) + +SELECT JSON_OBJECT((1::text) VALUE 2); + ?column? +----------- + {"1" : 2} +(1 row) + +--SELECT JSON_OBJECT(1::text VALUE 2); +--SELECT JSON_OBJECT(KEY 1::text VALUE 2); +SELECT JSON_OBJECT(json '[1]': 123); +ERROR: key value must be scalar, not array, composite, or json +SELECT JSON_OBJECT(ARRAY[1,2,3]: 'aaa'); +ERROR: key value must be scalar, not array, composite, or json +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' +); + ?column? +------------------------------------------------------------------------ + {"a" : "123", "1.23" : 123, "c" : [ 1,true,{ } ], "d" : {"x": 123.45}} +(1 row) + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' + RETURNING jsonb +); + ?column? +------------------------------------------------------------------- + {"a": "123", "c": [1, true, {}], "d": {"x": 123.45}, "1.23": 123} +(1 row) + +/* +SELECT JSON_OBJECT( + 'a': '123', + KEY 1.23 VALUE 123, + 'c' VALUE json '[1, true, {}]' +); +*/ +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa')); + ?column? +----------------------------------------------- + {"a" : "123", "b" : {"a" : 111, "b" : "aaa"}} +(1 row) + +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa' RETURNING jsonb)); + ?column? +--------------------------------------------- + {"a" : "123", "b" : {"a": 111, "b": "aaa"}} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text)); + ?column? +----------------------- + {"a" : "{\"b\" : 1}"} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text) FORMAT JSON); + ?column? +------------------- + {"a" : {"b" : 1}} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea)); + ?column? +--------------------------------- + {"a" : "\\x7b226222203a20317d"} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea) FORMAT JSON); + ?column? +------------------- + {"a" : {"b" : 1}} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2); + ?column? +---------------------------------- + {"a" : "1", "b" : null, "c" : 2} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 NULL ON NULL); + ?column? +---------------------------------- + {"a" : "1", "b" : null, "c" : 2} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 ABSENT ON NULL); + ?column? +---------------------- + {"a" : "1", "c" : 2} +(1 row) + +SELECT JSON_OBJECT(1: 1, '1': NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL NULL ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 NULL ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE); + ?column? +-------------------- + {"1" : 1, "1" : 1} +(1 row) + +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE RETURNING jsonb); + ?column? +---------- + {"1": 1} +(1 row) + +SELECT JSON_OBJECT(1: 1, '2': NULL, '3': 1, 4: NULL, '5': 'a' ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + ?column? +---------------------------- + {"1": 1, "3": 1, "5": "a"} +(1 row) + +-- JSON_ARRAY() +SELECT JSON_ARRAY(); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING json); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING json FORMAT JSON); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING jsonb); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING jsonb FORMAT JSON); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text FORMAT JSON); + ?column? +---------- + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); +ERROR: cannot set JSON encoding for non-bytea output types +LINE 1: SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); + ^ +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +ERROR: unrecognized JSON encoding: invalid_encoding +SELECT JSON_ARRAY(RETURNING bytea); + ?column? +---------- + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON); + ?column? +---------- + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF8); + ?column? +---------- + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_ARRAY('aaa', 111, true, array[1,2,3], NULL, json '{"a": [1]}', jsonb '["a",3]'); + ?column? +--------------------------------------------------- + ["aaa", 111, true, [1,2,3], {"a": [1]}, ["a", 3]] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL); + ?column? +------------------ + ["a", null, "b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL); + ?column? +------------ + ["a", "b"] +(1 row) + +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL); + ?column? +---------- + ["b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL RETURNING jsonb); + ?column? +------------------ + ["a", null, "b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL RETURNING jsonb); + ?column? +------------ + ["a", "b"] +(1 row) + +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL RETURNING jsonb); + ?column? +---------- + ["b"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' RETURNING text)); + ?column? +------------------------------- + ["[\"{ \\\"a\\\" : 123 }\"]"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text)); + ?column? +----------------------- + ["[{ \"a\" : 123 }]"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text) FORMAT JSON); + ?column? +------------------- + [[{ "a" : 123 }]] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i)); + ?column? +----------- + [1, 2, 4] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i)); + ?column? +---------- + [[1,2], + + [3,4]] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) RETURNING jsonb); + ?column? +------------------ + [[1, 2], [3, 4]] +(1 row) + +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (3), (1), (NULL), (2)) foo(i) ORDER BY i); + ?column? +----------- + [1, 2, 3] +(1 row) + +-- Should fail +SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); + ^ +SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); + ^ +SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); + ^ +-- JSON_ARRAYAGG() +SELECT JSON_ARRAYAGG(i) IS NULL, + JSON_ARRAYAGG(i RETURNING jsonb) IS NULL +FROM generate_series(1, 0) i; + ?column? | ?column? +----------+---------- + t | t +(1 row) + +SELECT JSON_ARRAYAGG(i), + JSON_ARRAYAGG(i RETURNING jsonb) +FROM generate_series(1, 5) i; + ?column? | ?column? +-----------------+----------------- + [1, 2, 3, 4, 5] | [1, 2, 3, 4, 5] +(1 row) + +SELECT JSON_ARRAYAGG(i ORDER BY i DESC) +FROM generate_series(1, 5) i; + ?column? +----------------- + [5, 4, 3, 2, 1] +(1 row) + +SELECT JSON_ARRAYAGG(i::text::json) +FROM generate_series(1, 5) i; + ?column? +----------------- + [1, 2, 3, 4, 5] +(1 row) + +SELECT JSON_ARRAYAGG(JSON_ARRAY(i, i + 1 RETURNING text) FORMAT JSON) +FROM generate_series(1, 5) i; + ?column? +------------------------------------------ + [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6]] +(1 row) + +SELECT JSON_ARRAYAGG(NULL), + JSON_ARRAYAGG(NULL RETURNING jsonb) +FROM generate_series(1, 5); + ?column? | ?column? +----------+---------- + [] | [] +(1 row) + +SELECT JSON_ARRAYAGG(NULL NULL ON NULL), + JSON_ARRAYAGG(NULL NULL ON NULL RETURNING jsonb) +FROM generate_series(1, 5); + ?column? | ?column? +--------------------------------+-------------------------------- + [null, null, null, null, null] | [null, null, null, null, null] +(1 row) + +SELECT + JSON_ARRAYAGG(bar), + JSON_ARRAYAGG(bar RETURNING jsonb), + JSON_ARRAYAGG(bar ABSENT ON NULL), + JSON_ARRAYAGG(bar ABSENT ON NULL RETURNING jsonb), + JSON_ARRAYAGG(bar NULL ON NULL), + JSON_ARRAYAGG(bar NULL ON NULL RETURNING jsonb), + JSON_ARRAYAGG(foo), + JSON_ARRAYAGG(foo RETURNING jsonb), + JSON_ARRAYAGG(foo ORDER BY bar) FILTER (WHERE bar > 2), + JSON_ARRAYAGG(foo ORDER BY bar RETURNING jsonb) FILTER (WHERE bar > 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL)) foo(bar); + ?column? | ?column? | ?column? | ?column? | ?column? | ?column? | ?column? | ?column? | ?column? | ?column? +-----------------+-----------------+-----------------+-----------------+-----------------------------------------+-----------------------------------------+-----------------+--------------------------------------------------------------------------------------------------------------------------+--------------+-------------------------------------- + [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [null, 3, 1, null, null, 5, 2, 4, null] | [null, 3, 1, null, null, 5, 2, 4, null] | [{"bar":null}, +| [{"bar": null}, {"bar": 3}, {"bar": 1}, {"bar": null}, {"bar": null}, {"bar": 5}, {"bar": 2}, {"bar": 4}, {"bar": null}] | [{"bar":3}, +| [{"bar": 3}, {"bar": 4}, {"bar": 5}] + | | | | | | {"bar":3}, +| | {"bar":4}, +| + | | | | | | {"bar":1}, +| | {"bar":5}] | + | | | | | | {"bar":null}, +| | | + | | | | | | {"bar":null}, +| | | + | | | | | | {"bar":5}, +| | | + | | | | | | {"bar":2}, +| | | + | | | | | | {"bar":4}, +| | | + | | | | | | {"bar":null}] | | | +(1 row) + +SELECT + bar, JSON_ARRAYAGG(bar) FILTER (WHERE bar > 2) OVER (PARTITION BY foo.bar % 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL), (5), (4)) foo(bar); + bar | ?column? +-----+----------- + 4 | [4, 4] + 4 | [4, 4] + 2 | [4, 4] + 5 | [5, 3, 5] + 3 | [5, 3, 5] + 1 | [5, 3, 5] + 5 | [5, 3, 5] + | + | + | + | +(11 rows) + +-- JSON_OBJECTAGG() +SELECT JSON_OBJECTAGG('key': 1) IS NULL, + JSON_OBJECTAGG('key': 1 RETURNING jsonb) IS NULL +WHERE FALSE; + ?column? | ?column? +----------+---------- + t | t +(1 row) + +SELECT JSON_OBJECTAGG(NULL: 1); +ERROR: field name must not be null +SELECT JSON_OBJECTAGG(NULL: 1 RETURNING jsonb); +ERROR: field name must not be null +SELECT + JSON_OBJECTAGG(i: i), +-- JSON_OBJECTAGG(i VALUE i), +-- JSON_OBJECTAGG(KEY i VALUE i), + JSON_OBJECTAGG(i: i RETURNING jsonb) +FROM + generate_series(1, 5) i; + ?column? | ?column? +-------------------------------------------------+------------------------------------------ + { "1" : 1, "2" : 2, "3" : 3, "4" : 4, "5" : 5 } | {"1": 1, "2": 2, "3": 3, "4": 4, "5": 5} +(1 row) + +SELECT + JSON_OBJECTAGG(k: v), + JSON_OBJECTAGG(k: v NULL ON NULL), + JSON_OBJECTAGG(k: v ABSENT ON NULL), + JSON_OBJECTAGG(k: v RETURNING jsonb), + JSON_OBJECTAGG(k: v NULL ON NULL RETURNING jsonb), + JSON_OBJECTAGG(k: v ABSENT ON NULL RETURNING jsonb) +FROM + (VALUES (1, 1), (1, NULL), (2, NULL), (3, 3)) foo(k, v); + ?column? | ?column? | ?column? | ?column? | ?column? | ?column? +----------------------------------------------+----------------------------------------------+----------------------+--------------------------------+--------------------------------+------------------ + { "1" : 1, "1" : null, "2" : null, "3" : 3 } | { "1" : 1, "1" : null, "2" : null, "3" : 3 } | { "1" : 1, "3" : 3 } | {"1": null, "2": null, "3": 3} | {"1": null, "2": null, "3": 3} | {"1": 1, "3": 3} +(1 row) + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (0, NULL), (3, NULL), (2, 2), (4, NULL)) foo(k, v); + ?column? +---------------------- + { "1" : 1, "2" : 2 } +(1 row) + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +-- IS JSON predicate +SELECT NULL IS JSON; + ?column? +---------- + +(1 row) + +SELECT NULL IS NOT JSON; + ?column? +---------- + +(1 row) + +SELECT NULL::json IS JSON; + ?column? +---------- + +(1 row) + +SELECT NULL::jsonb IS JSON; + ?column? +---------- + +(1 row) + +SELECT NULL::text IS JSON; + ?column? +---------- + +(1 row) + +SELECT NULL::bytea IS JSON; + ?column? +---------- + +(1 row) + +SELECT NULL::int IS JSON; +ERROR: cannot use type integer in IS JSON predicate +SELECT '' IS JSON; + ?column? +---------- + f +(1 row) + +SELECT bytea '\x00' IS JSON; +ERROR: invalid byte sequence for encoding "UTF8": 0x00 +CREATE TABLE test_is_json (js text); +INSERT INTO test_is_json VALUES + (NULL), + (''), + ('123'), + ('"aaa "'), + ('true'), + ('null'), + ('[]'), + ('[1, "2", {}]'), + ('{}'), + ('{ "a": 1, "b": null }'), + ('{ "a": 1, "a": null }'), + ('{ "a": 1, "b": [{ "a": 1 }, { "a": 2 }] }'), + ('{ "a": 1, "b": [{ "a": 1, "b": 0, "a": 2 }] }'), + ('aaa'), + ('{a:1}'), + ('["a",]'); +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + test_is_json; + js | IS JSON | IS NOT JSON | IS VALUE | IS OBJECT | IS ARRAY | IS SCALAR | WITHOUT UNIQUE | WITH UNIQUE +-----------------------------------------------+---------+-------------+----------+-----------+----------+-----------+----------------+------------- + | | | | | | | | + | f | t | f | f | f | f | f | f + 123 | t | f | t | f | f | t | t | t + "aaa " | t | f | t | f | f | t | t | t + true | t | f | t | f | f | t | t | t + null | t | f | t | f | f | t | t | t + [] | t | f | t | f | t | f | t | t + [1, "2", {}] | t | f | t | f | t | f | t | t + {} | t | f | t | t | f | f | t | t + { "a": 1, "b": null } | t | f | t | t | f | f | t | t + { "a": 1, "a": null } | t | f | t | t | f | f | t | f + { "a": 1, "b": [{ "a": 1 }, { "a": 2 }] } | t | f | t | t | f | f | t | t + { "a": 1, "b": [{ "a": 1, "b": 0, "a": 2 }] } | t | f | t | t | f | f | t | f + aaa | f | t | f | f | f | f | f | f + {a:1} | f | t | f | f | f | f | f | f + ["a",] | f | t | f | f | f | f | f | f +(16 rows) + +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js::json FROM test_is_json WHERE js IS JSON) foo(js); + js | IS JSON | IS NOT JSON | IS VALUE | IS OBJECT | IS ARRAY | IS SCALAR | WITHOUT UNIQUE | WITH UNIQUE +-----------------------------------------------+---------+-------------+----------+-----------+----------+-----------+----------------+------------- + 123 | t | f | t | f | f | t | t | t + "aaa " | t | f | t | f | f | t | t | t + true | t | f | t | f | f | t | t | t + null | t | f | t | f | f | t | t | t + [] | t | f | t | f | t | f | t | t + [1, "2", {}] | t | f | t | f | t | f | t | t + {} | t | f | t | t | f | f | t | t + { "a": 1, "b": null } | t | f | t | t | f | f | t | t + { "a": 1, "a": null } | t | f | t | t | f | f | t | f + { "a": 1, "b": [{ "a": 1 }, { "a": 2 }] } | t | f | t | t | f | f | t | t + { "a": 1, "b": [{ "a": 1, "b": 0, "a": 2 }] } | t | f | t | t | f | f | t | f +(11 rows) + +SELECT + js0, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js, js::bytea FROM test_is_json WHERE js IS JSON) foo(js0, js); + js0 | IS JSON | IS NOT JSON | IS VALUE | IS OBJECT | IS ARRAY | IS SCALAR | WITHOUT UNIQUE | WITH UNIQUE +-----------------------------------------------+---------+-------------+----------+-----------+----------+-----------+----------------+------------- + 123 | t | f | t | f | f | t | t | t + "aaa " | t | f | t | f | f | t | t | t + true | t | f | t | f | f | t | t | t + null | t | f | t | f | f | t | t | t + [] | t | f | t | f | t | f | t | t + [1, "2", {}] | t | f | t | f | t | f | t | t + {} | t | f | t | t | f | f | t | t + { "a": 1, "b": null } | t | f | t | t | f | f | t | t + { "a": 1, "a": null } | t | f | t | t | f | f | t | f + { "a": 1, "b": [{ "a": 1 }, { "a": 2 }] } | t | f | t | t | f | f | t | t + { "a": 1, "b": [{ "a": 1, "b": 0, "a": 2 }] } | t | f | t | t | f | f | t | f +(11 rows) + +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js::jsonb FROM test_is_json WHERE js IS JSON) foo(js); + js | IS JSON | IS NOT JSON | IS VALUE | IS OBJECT | IS ARRAY | IS SCALAR | WITHOUT UNIQUE | WITH UNIQUE +-------------------------------------+---------+-------------+----------+-----------+----------+-----------+----------------+------------- + 123 | t | f | t | f | f | t | t | t + "aaa " | t | f | t | f | f | t | t | t + true | t | f | t | f | f | t | t | t + null | t | f | t | f | f | t | t | t + [] | t | f | t | f | t | f | t | t + [1, "2", {}] | t | f | t | f | t | f | t | t + {} | t | f | t | t | f | f | t | t + {"a": 1, "b": null} | t | f | t | t | f | f | t | t + {"a": null} | t | f | t | t | f | f | t | t + {"a": 1, "b": [{"a": 1}, {"a": 2}]} | t | f | t | t | f | f | t | t + {"a": 1, "b": [{"a": 2, "b": 0}]} | t | f | t | t | f | f | t | t +(11 rows) + diff --git a/src/test/regress/index_schedule b/src/test/regress/index_schedule new file mode 100644 index 0000000000..76248d03fe --- /dev/null +++ b/src/test/regress/index_schedule @@ -0,0 +1,12 @@ +test: tablespace +test: create_function_1 +test: create_type +test: create_table +test: copy +test: create_misc +test: polygon +test: point +test: circle +test: create_index +test: btree_index +test: gist diff --git a/src/test/regress/parallel_schedule b/src/test/regress/parallel_schedule index aa5e6af621..6683abdbab 100644 --- a/src/test/regress/parallel_schedule +++ b/src/test/regress/parallel_schedule @@ -104,7 +104,12 @@ test: publication subscription # ---------- # Another group of parallel tests # ---------- -test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock json jsonb json_encoding indirect_toast equivclass +test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock indirect_toast equivclass + +# ---------- +# Another group of parallel tests +# ---------- +test: json jsonb json_encoding jsonpath json_jsonpath jsonb_jsonpath jsonb_sqljson sqljson # ---------- # Another group of parallel tests diff --git a/src/test/regress/serial_schedule b/src/test/regress/serial_schedule index 3866314a92..c67a8eaffb 100644 --- a/src/test/regress/serial_schedule +++ b/src/test/regress/serial_schedule @@ -157,6 +157,12 @@ test: advisory_lock test: json test: jsonb test: json_encoding +test: jsonpath +test: json_jsonpath +test: jsonb_jsonpath +test: sqljson +test: json_sqljson +test: jsonb_sqljson test: indirect_toast test: equivclass test: plancache diff --git a/src/test/regress/sql/horology.sql b/src/test/regress/sql/horology.sql index a7bc9dcfc4..ebb196a1cf 100644 --- a/src/test/regress/sql/horology.sql +++ b/src/test/regress/sql/horology.sql @@ -446,6 +446,12 @@ SELECT to_timestamp(' 20050302', 'YYYYMMDD'); SELECT to_timestamp('2011-12-18 11:38 AM', 'YYYY-MM-DD HH12:MI PM'); SELECT to_timestamp('2011-12-18 11:38 PM', 'YYYY-MM-DD HH12:MI PM'); +SELECT to_timestamp('2011-12-18 11:38 +05', 'YYYY-MM-DD HH12:MI TZH'); +SELECT to_timestamp('2011-12-18 11:38 -05', 'YYYY-MM-DD HH12:MI TZH'); +SELECT to_timestamp('2011-12-18 11:38 +05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); +SELECT to_timestamp('2011-12-18 11:38 -05:20', 'YYYY-MM-DD HH12:MI TZH:TZM'); +SELECT to_timestamp('2011-12-18 11:38 20', 'YYYY-MM-DD HH12:MI TZM'); + -- -- Check handling of multiple spaces in format and/or input -- diff --git a/src/test/regress/sql/json_jsonpath.sql b/src/test/regress/sql/json_jsonpath.sql new file mode 100644 index 0000000000..3530265ed6 --- /dev/null +++ b/src/test/regress/sql/json_jsonpath.sql @@ -0,0 +1,440 @@ +select json '{"a": 12}' @? '$.a.b'; +select json '{"a": 12}' @? '$.b'; +select json '{"a": {"a": 12}}' @? '$.a.a'; +select json '{"a": {"a": 12}}' @? '$.*.a'; +select json '{"b": {"a": 12}}' @? '$.*.a'; +select json '{}' @? '$.*'; +select json '{"a": 1}' @? '$.*'; +select json '{"a": {"b": 1}}' @? 'lax $.**{1}'; +select json '{"a": {"b": 1}}' @? 'lax $.**{2}'; +select json '{"a": {"b": 1}}' @? 'lax $.**{3}'; +select json '[]' @? '$.[*]'; +select json '[1]' @? '$.[*]'; +select json '[1]' @? '$.[1]'; +select json '[1]' @? 'strict $.[1]'; +select json '[1]' @? '$.[0]'; +select json '[1]' @? '$.[0.3]'; +select json '[1]' @? '$.[0.5]'; +select json '[1]' @? '$.[0.9]'; +select json '[1]' @? '$.[1.2]'; +select json '[1]' @? 'strict $.[1.2]'; +select json '{}' @? 'strict $.[0.3]'; +select json '{}' @? 'lax $.[0.3]'; +select json '{}' @? 'strict $.[1.2]'; +select json '{}' @? 'lax $.[1.2]'; +select json '{}' @? 'strict $.[-2 to 3]'; +select json '{}' @? 'lax $.[-2 to 3]'; + +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; +select json '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; +select json '1' @? '$ ? ((@ == "1") is unknown)'; +select json '1' @? '$ ? ((@ == 1) is unknown)'; +select json '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + +select json '{"a": 12, "b": {"a": 13}}' @* '$.a'; +select json '{"a": 12, "b": {"a": 13}}' @* '$.b'; +select json '{"a": 12, "b": {"a": 13}}' @* '$.*'; +select json '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].*'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[1].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[2].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0,1].a'; +select json '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0 to 10].a'; +select json '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$.[2.5 - 1 to @.size() - 2]'; +select json '1' @* 'lax $[0]'; +select json '1' @* 'lax $[*]'; +select json '{}' @* 'lax $[0]'; +select json '[1]' @* 'lax $[0]'; +select json '[1]' @* 'lax $[*]'; +select json '[1,2,3]' @* 'lax $[*]'; +select json '[]' @* '$[last]'; +select json '[]' @* 'strict $[last]'; +select json '[1]' @* '$[last]'; +select json '{}' @* 'lax $[last]'; +select json '[1,2,3]' @* '$[last]'; +select json '[1,2,3]' @* '$[last - 1]'; +select json '[1,2,3]' @* '$[last ? (@.type() == "number")]'; +select json '[1,2,3]' @* '$[last ? (@.type() == "string")]'; + +select * from jsonpath_query(json '{"a": 10}', '$'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); +select * from jsonpath_query(json '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[*] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[0,1] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(json '[10,11,12,13,14,15]', '$.[0 to 2] ? (@ < $value)', '{"value" : 15}'); +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$.[*] ? (@ == "1")'); +select * from jsonpath_query(json '[1,"1",2,"2",null]', '$.[*] ? (@ == $value)', '{"value" : "1"}'); +select json '[1, "2", null]' @* '$[*] ? (@ != null)'; +select json '[1, "2", null]' @* '$[*] ? (@ == null)'; + +select json '{"a": {"b": 1}}' @* 'lax $.**'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1,}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{2}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{2,}'; +select json '{"a": {"b": 1}}' @* 'lax $.**{3,}'; +select json '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{0,}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1,}.b ? (@ > 0)'; +select json '{"a": {"b": 1}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0,}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2,3}.b ? (@ > 0)'; + +select json '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{0,}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1,}.b ? ( @ > 0)'; +select json '{"a": {"b": 1}}' @? '$.**{1,2}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{0,}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1,}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{1,2}.b ? ( @ > 0)'; +select json '{"a": {"c": {"b": 1}}}' @? '$.**{2,3}.b ? ( @ > 0)'; + +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; +select json '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + +--test ternary logic +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + +select + x, y, + jsonpath_query( + json '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + json_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (json 'true'), ('false'), ('"null"')) x(x), + (values (json 'true'), ('false'), ('"null"')) y(y); + +select json '{"a": 1, "b": 1}' @? '$ ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$ ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.c ? ($.c.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.* ? (.a == .b)'; +select json '{"a": 1, "b": 1}' @? '$.** ? (.a == .b)'; +select json '{"c": {"a": 1, "b": 1}}' @? '$.** ? (.a == .b)'; + +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == 1 + 1)'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (1 + 1))'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == .b + 1)'; +select json '{"c": {"a": 2, "b": 1}}' @* '$.** ? (.a == (.b + 1))'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - 1)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -1)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == -.b)'; +select json '{"c": {"a": -1, "b": 1}}' @? '$.** ? (.a == - .b)'; +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - .b)'; +select json '{"c": {"a": 2, "b": 1}}' @? '$.** ? (.a == 1 - - .b)'; +select json '{"c": {"a": 0, "b": 1}}' @? '$.** ? (.a == 1 - +.b)'; +select json '[1,2,3]' @? '$ ? (+@[*] > +2)'; +select json '[1,2,3]' @? '$ ? (+@[*] > +3)'; +select json '[1,2,3]' @? '$ ? (-@[*] < -2)'; +select json '[1,2,3]' @? '$ ? (-@[*] < -3)'; +select json '1' @? '$ ? ($ > 0)'; + +-- unwrapping of operator arguments in lax mode +select json '{"a": [2]}' @* 'lax $.a * 3'; +select json '{"a": [2]}' @* 'lax $.a + 3'; +select json '{"a": [2, 3, 4]}' @* 'lax -$.a'; +-- should fail +select json '{"a": [1, 2]}' @* 'lax $.a * 3'; + +-- extension: boolean expressions +select json '2' @* '$ > 1'; +select json '2' @* '$ <= 1'; +select json '2' @* '$ == "2"'; + +select json '2' @~ '$ > 1'; +select json '2' @~ '$ <= 1'; +select json '2' @~ '$ == "2"'; +select json '2' @~ '1'; +select json '{}' @~ '$'; +select json '[]' @~ '$'; +select json '[1,2,3]' @~ '$[*]'; +select json '[]' @~ '$[*]'; +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); +select jsonpath_predicate(json '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + +select json '[null,1,true,"a",[],{}]' @* '$.type()'; +select json '[null,1,true,"a",[],{}]' @* 'lax $.type()'; +select json '[null,1,true,"a",[],{}]' @* '$[*].type()'; +select json 'null' @* 'null.type()'; +select json 'null' @* 'true.type()'; +select json 'null' @* '123.type()'; +select json 'null' @* '"123".type()'; +select json 'null' @* 'aaa.type()'; + +select json '{"a": 2}' @* '($.a - 5).abs() + 10'; +select json '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; +select json '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; +select json '[1, 2, 3]' @* '($[*] > 3).type()'; +select json '[1, 2, 3]' @* '($[*].a > 3).type()'; +select json '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +select json '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; +select json '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + +select json '[{},1]' @* '$[*].keyvalue()'; +select json '{}' @* '$.keyvalue()'; +select json '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +select json '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + +select json 'null' @* '$.double()'; +select json 'true' @* '$.double()'; +select json '[]' @* '$.double()'; +select json '[]' @* 'strict $.double()'; +select json '{}' @* '$.double()'; +select json '1.23' @* '$.double()'; +select json '"1.23"' @* '$.double()'; +select json '"1.23aaa"' @* '$.double()'; + +select json '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; +select json '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; +select json '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; +select json '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; +select json '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; +select json '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; +select json '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + +select json 'null' @* '$.datetime()'; +select json 'true' @* '$.datetime()'; +select json '[]' @* '$.datetime()'; +select json '[]' @* 'strict $.datetime()'; +select json '{}' @* '$.datetime()'; +select json '""' @* '$.datetime()'; + +-- Standard extension: UNIX epoch to timestamptz +select json '0' @* '$.datetime()'; +select json '0' @* '$.datetime().type()'; +select json '1490216035.5' @* '$.datetime()'; + +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; +select json '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + +select json '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; +select json '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; +select json '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + +set time zone '+00'; + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"12:34"' @* '$.datetime("HH24:MI")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone '+10'; + +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select json '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select json '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select json '"12:34"' @* '$.datetime("HH24:MI")'; +select json '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select json '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select json '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone default; + +select json '"2017-03-10"' @* '$.datetime().type()'; +select json '"2017-03-10"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; +select json '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; +select json '"12:34:56"' @* '$.datetime().type()'; +select json '"12:34:56"' @* '$.datetime()'; +select json '"12:34:56 +3"' @* '$.datetime().type()'; +select json '"12:34:56 +3"' @* '$.datetime()'; +select json '"12:34:56 +3:10"' @* '$.datetime().type()'; +select json '"12:34:56 +3:10"' @* '$.datetime()'; + +set time zone '+00'; + +-- date comparison +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; +select json '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' + @* '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + +-- time comparison +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; +select json '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' + @* '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + +-- timetz comparison +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; +select json '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' + @* '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + +-- timestamp comparison +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select json '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + +-- timestamptz comparison +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select json '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' + @* '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + +set time zone default; + +-- jsonpath operators + +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*]'; +SELECT json '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; +SELECT json '[{"a": 1}, {"a": 2}]' @* '[$[*].a]'; + +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 1)'; +SELECT json '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 2)'; + +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; +SELECT json '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + +-- extension: map item method +select json '1' @* 'strict $.map(@ + 10)'; +select json '1' @* 'lax $.map(@ + 10)'; +select json '[1, 2, 3]' @* '$.map(@ + 10)'; +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.map(@.map(@ + 10))'; + +-- extension: reduce/fold item methods +select json '1' @* 'strict $.reduce($1 + $2)'; +select json '1' @* 'lax $.reduce($1 + $2)'; +select json '1' @* 'strict $.fold($1 + $2, 10)'; +select json '1' @* 'lax $.fold($1 + $2, 10)'; +select json '[1, 2, 3]' @* '$.reduce($1 + $2)'; +select json '[1, 2, 3]' @* '$.fold($1 + $2, 100)'; +select json '[]' @* '$.reduce($1 + $2)'; +select json '[]' @* '$.fold($1 + $2, 100)'; +select json '[1]' @* '$.reduce($1 + $2)'; +select json '[1, 2, 3]' @* '$.foldl([$1, $2], [])'; +select json '[1, 2, 3]' @* '$.foldr([$2, $1], [])'; +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.fold($1 + $2.fold($1 + $2, 100), 1000)'; + +-- extension: min/max item methods +select json '1' @* 'strict $.min()'; +select json '1' @* 'lax $.min()'; +select json '[]' @* '$.min()'; +select json '[]' @* '$.max()'; +select json '[null]' @* '$.min()'; +select json '[null]' @* '$.max()'; +select json '[1, 2, 3]' @* '$.min()'; +select json '[1, 2, 3]' @* '$.max()'; +select json '[2, 3, 5, null, 1, 4, null]' @* '$.min()'; +select json '[2, 3, 5, null, 1, 4, null]' @* '$.max()'; +select json '["aa", null, "a", "bbb"]' @* '$.min()'; +select json '["aa", null, "a", "bbb"]' @* '$.max()'; +select json '[1, null, "2"]' @* '$.max()'; + +-- extension: path sequences +select json '[1,2,3,4,5]' @* '10, 20, $[*], 30'; +select json '[1,2,3,4,5]' @* 'lax 10, 20, $[*].a, 30'; +select json '[1,2,3,4,5]' @* 'strict 10, 20, $[*].a, 30'; +select json '[1,2,3,4,5]' @* '-(10, 20, $[1 to 3], 30)'; +select json '[1,2,3,4,5]' @* 'lax (10, 20, $[1 to 3], 30).map(@ + 100)'; +select json '[1,2,3,4,5]' @* '$[(0, $[*], 5) ? (@ == 3)]'; +select json '[1,2,3,4,5]' @* '$[(0, $[*], 3) ? (@ == 3)]'; + +-- extension: array constructors +select json '[1, 2, 3]' @* '[]'; +select json '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5]'; +select json '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5][*]'; +select json '[1, 2, 3]' @* '[(1, (2, $.map(@ + 100)[*])), (4, 5)]'; +select json '[1, 2, 3]' @* '[[1, 2], [$.map(@ + 100)[*], 4], 5, [(1,2)?(@ > 5)]]'; +select json '[1, 2, 3]' @* 'strict [1, 2, $.map(@.a)[*], 4, 5]'; +select json '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '[$[*].map(@ + 10)[*] ? (@ > 13)]'; + +-- extension: object constructors +select json '[1, 2, 3]' @* '{}'; +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}'; +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}.*'; +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": ($[*], 4, 5)}'; +select json '[1, 2, 3]' @* '{a: 2 + 3, "b": [$.map({x: @, y: @ < 3})[*], {z: "foo"}]}'; + +-- extension: object subscripting +select json '{"a": 1}' @? '$["a"]'; +select json '{"a": 1}' @? '$["b"]'; +select json '{"a": 1}' @? 'strict $["b"]'; +select json '{"a": 1}' @? '$["b", "a"]'; + +select json '{"a": 1}' @* '$["a"]'; +select json '{"a": 1}' @* 'strict $["b"]'; +select json '{"a": 1}' @* 'lax $["b"]'; +select json '{"a": 1, "b": 2}' @* 'lax $["b", "c", "b", "a", 0 to 3]'; + +select json 'null' @* '{"a": 1}["a"]'; +select json 'null' @* '{"a": 1}["b"]'; diff --git a/src/test/regress/sql/json_sqljson.sql b/src/test/regress/sql/json_sqljson.sql new file mode 100644 index 0000000000..fc3fb2be4d --- /dev/null +++ b/src/test/regress/sql/json_sqljson.sql @@ -0,0 +1,864 @@ +-- JSON_EXISTS + +SELECT JSON_EXISTS(NULL FORMAT JSON, '$'); +SELECT JSON_EXISTS(NULL::text FORMAT JSON, '$'); +SELECT JSON_EXISTS(NULL::bytea FORMAT JSON, '$'); +SELECT JSON_EXISTS(NULL::json FORMAT JSON, '$'); +SELECT JSON_EXISTS(NULL::jsonb FORMAT JSON, '$'); +SELECT JSON_EXISTS(NULL::json, '$'); + +SELECT JSON_EXISTS('' FORMAT JSON, '$'); +SELECT JSON_EXISTS('' FORMAT JSON, '$' TRUE ON ERROR); +SELECT JSON_EXISTS('' FORMAT JSON, '$' FALSE ON ERROR); +SELECT JSON_EXISTS('' FORMAT JSON, '$' UNKNOWN ON ERROR); +SELECT JSON_EXISTS('' FORMAT JSON, '$' ERROR ON ERROR); + + +SELECT JSON_EXISTS(bytea '' FORMAT JSON, '$' ERROR ON ERROR); + +SELECT JSON_EXISTS(json '[]', '$'); +SELECT JSON_EXISTS('[]' FORMAT JSON, '$'); +SELECT JSON_EXISTS(JSON_OBJECT(RETURNING bytea FORMAT JSON) FORMAT JSON, '$'); + +SELECT JSON_EXISTS(json '1', '$'); +SELECT JSON_EXISTS(json 'null', '$'); +SELECT JSON_EXISTS(json '[]', '$'); + +SELECT JSON_EXISTS(json '1', '$.a'); +SELECT JSON_EXISTS(json '1', 'strict $.a'); +SELECT JSON_EXISTS(json '1', 'strict $.a' ERROR ON ERROR); +SELECT JSON_EXISTS(json 'null', '$.a'); +SELECT JSON_EXISTS(json '[]', '$.a'); +SELECT JSON_EXISTS(json '[1, "aaa", {"a": 1}]', 'strict $.a'); +SELECT JSON_EXISTS(json '[1, "aaa", {"a": 1}]', 'lax $.a'); +SELECT JSON_EXISTS(json '{}', '$.a'); +SELECT JSON_EXISTS(json '{"b": 1, "a": 2}', '$.a'); + +SELECT JSON_EXISTS(json '1', '$.a.b'); +SELECT JSON_EXISTS(json '{"a": {"b": 1}}', '$.a.b'); +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.a.b'); + +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING 1 AS x); +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING '1' AS x); +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 2 AS y); +SELECT JSON_EXISTS(json '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 1 AS y); + +-- extension: boolean expressions +SELECT JSON_EXISTS(json '1', '$ > 2'); +SELECT JSON_EXISTS(json '1', '$.a > 2' ERROR ON ERROR); + +-- JSON_VALUE + +SELECT JSON_VALUE(NULL, '$'); +SELECT JSON_VALUE(NULL FORMAT JSON, '$'); +SELECT JSON_VALUE(NULL::text, '$'); +SELECT JSON_VALUE(NULL::bytea, '$'); +SELECT JSON_VALUE(NULL::json, '$'); +SELECT JSON_VALUE(NULL::jsonb FORMAT JSON, '$'); + +SELECT JSON_VALUE('' FORMAT JSON, '$'); +SELECT JSON_VALUE('' FORMAT JSON, '$' NULL ON ERROR); +SELECT JSON_VALUE('' FORMAT JSON, '$' DEFAULT '"default value"' ON ERROR); +SELECT JSON_VALUE('' FORMAT JSON, '$' ERROR ON ERROR); + +SELECT JSON_VALUE(json 'null', '$'); +SELECT JSON_VALUE(json 'null', '$' RETURNING int); + +SELECT JSON_VALUE(json 'true', '$'); +SELECT JSON_VALUE(json 'true', '$' RETURNING bool); + +SELECT JSON_VALUE(json '123', '$'); +SELECT JSON_VALUE(json '123', '$' RETURNING int) + 234; +SELECT JSON_VALUE(json '123', '$' RETURNING text); +/* jsonb bytea ??? */ +SELECT JSON_VALUE(json '123', '$' RETURNING bytea); + +SELECT JSON_VALUE(json '1.23', '$'); +SELECT JSON_VALUE(json '1.23', '$' RETURNING int); +SELECT JSON_VALUE(json '"1.23"', '$' RETURNING numeric); +SELECT JSON_VALUE(json '"1.23"', '$' RETURNING int ERROR ON ERROR); + +SELECT JSON_VALUE(json '"aaa"', '$'); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING text); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING char(5)); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING char(2)); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING json); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING jsonb); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING json ERROR ON ERROR); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING jsonb ERROR ON ERROR); +SELECT JSON_VALUE(json '"\"aaa\""', '$' RETURNING json); +SELECT JSON_VALUE(json '"\"aaa\""', '$' RETURNING jsonb); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int ERROR ON ERROR); +SELECT JSON_VALUE(json '"aaa"', '$' RETURNING int DEFAULT 111 ON ERROR); +SELECT JSON_VALUE(json '"123"', '$' RETURNING int) + 234; + +SELECT JSON_VALUE(json '"2017-02-20"', '$' RETURNING date) + 9; + +-- Test NULL checks execution in domain types +CREATE DOMAIN sqljson_int_not_null AS int NOT NULL; +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null); +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null NULL ON ERROR); +SELECT JSON_VALUE(json '1', '$.a' RETURNING sqljson_int_not_null DEFAULT NULL ON ERROR); + +SELECT JSON_VALUE(json '[]', '$'); +SELECT JSON_VALUE(json '[]', '$' ERROR ON ERROR); +SELECT JSON_VALUE(json '{}', '$'); +SELECT JSON_VALUE(json '{}', '$' ERROR ON ERROR); + +SELECT JSON_VALUE(json '1', '$.a'); +SELECT JSON_VALUE(json '1', 'strict $.a' ERROR ON ERROR); +SELECT JSON_VALUE(json '1', 'strict $.a' DEFAULT 'error' ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON EMPTY ERROR ON ERROR); +SELECT JSON_VALUE(json '1', 'strict $.a' DEFAULT 2 ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT 2 ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT '2' ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' NULL ON EMPTY DEFAULT '2' ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' DEFAULT '2' ON EMPTY DEFAULT '3' ON ERROR); +SELECT JSON_VALUE(json '1', 'lax $.a' ERROR ON EMPTY DEFAULT '3' ON ERROR); + +SELECT JSON_VALUE(json '[1,2]', '$[*]' ERROR ON ERROR); +SELECT JSON_VALUE(json '[1,2]', '$[*]' DEFAULT '0' ON ERROR); +SELECT JSON_VALUE(json '[" "]', '$[*]' RETURNING int ERROR ON ERROR); +SELECT JSON_VALUE(json '[" "]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); +SELECT JSON_VALUE(json '["1"]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + +SELECT + x, + JSON_VALUE( + json '{"a": 1, "b": 2}', + '$.* ? (@ > $x)' PASSING x AS x + RETURNING int + DEFAULT -1 ON EMPTY + DEFAULT -2 ON ERROR + ) y +FROM + generate_series(0, 2) x; + +SELECT JSON_VALUE(json 'null', '$a' PASSING point ' (1, 2 )' AS a); +SELECT JSON_VALUE(json 'null', '$a' PASSING point ' (1, 2 )' AS a RETURNING point); + +-- JSON_QUERY + +SELECT + JSON_QUERY(js FORMAT JSON, '$'), + JSON_QUERY(js FORMAT JSON, '$' WITHOUT WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH CONDITIONAL WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH UNCONDITIONAL ARRAY WRAPPER), + JSON_QUERY(js FORMAT JSON, '$' WITH ARRAY WRAPPER) +FROM + (VALUES + ('null'), + ('12.3'), + ('true'), + ('"aaa"'), + ('[1, null, "2"]'), + ('{"a": 1, "b": [2]}') + ) foo(js); + +SELECT + JSON_QUERY(js FORMAT JSON, 'strict $[*]') AS "unspec", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITHOUT WRAPPER) AS "without", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH CONDITIONAL WRAPPER) AS "with cond", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH UNCONDITIONAL ARRAY WRAPPER) AS "with uncond", + JSON_QUERY(js FORMAT JSON, 'strict $[*]' WITH ARRAY WRAPPER) AS "with" +FROM + (VALUES + ('1'), + ('[]'), + ('[null]'), + ('[12.3]'), + ('[true]'), + ('["aaa"]'), + ('[[1, 2, 3]]'), + ('[{"a": 1, "b": [2]}]'), + ('[1, "2", null, [3]]') + ) foo(js); + +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text KEEP QUOTES); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text KEEP QUOTES ON SCALAR STRING); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text OMIT QUOTES); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING text OMIT QUOTES ON SCALAR STRING); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' OMIT QUOTES ERROR ON ERROR); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING json OMIT QUOTES ERROR ON ERROR); +SELECT JSON_QUERY('"aaa"' FORMAT JSON, '$' RETURNING bytea FORMAT JSON OMIT QUOTES ERROR ON ERROR); + +-- QUOTES behavior should not be specified when WITH WRAPPER used: +-- Should fail +SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER OMIT QUOTES); +SELECT JSON_QUERY(json '[1]', '$' WITH WRAPPER KEEP QUOTES); +SELECT JSON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTES); +SELECT JSON_QUERY(json '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTES); +-- Should succeed +SELECT JSON_QUERY(json '[1]', '$' WITHOUT WRAPPER OMIT QUOTES); +SELECT JSON_QUERY(json '[1]', '$' WITHOUT WRAPPER KEEP QUOTES); + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]'); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' NULL ON EMPTY); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' EMPTY ARRAY ON EMPTY); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' EMPTY OBJECT ON EMPTY); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY); + +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY NULL ON ERROR); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY EMPTY ARRAY ON ERROR); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON EMPTY ERROR ON ERROR); +SELECT JSON_QUERY('[]' FORMAT JSON, '$[*]' ERROR ON ERROR); + +SELECT JSON_QUERY('[1,2]' FORMAT JSON, '$[*]' ERROR ON ERROR); + +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING json); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING json FORMAT JSON); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING jsonb); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING jsonb FORMAT JSON); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING text); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING char(10)); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING char(3)); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING text FORMAT JSON); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING bytea); +SELECT JSON_QUERY(json '[1,2]', '$' RETURNING bytea FORMAT JSON); + +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING bytea EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING bytea FORMAT JSON EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING json EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(json '[1,2]', '$[*]' RETURNING jsonb EMPTY OBJECT ON ERROR); + +SELECT + x, y, + JSON_QUERY( + json '[1,2,3,4,5,null]', + '$[*] ? (@ >= $x && @ <= $y)' + PASSING x AS x, y AS y + WITH CONDITIONAL WRAPPER + EMPTY ARRAY ON EMPTY + ) list +FROM + generate_series(0, 4) x, + generate_series(0, 4) y; + +-- Conversion to record types +CREATE TYPE sqljson_rec AS (a int, t text, js json, jb jsonb, jsa json[]); +CREATE TYPE sqljson_reca AS (reca sqljson_rec[]); + +SELECT JSON_QUERY(json '[{"a": 1, "b": "foo", "t": "aaa", "js": [1, "2", {}], "jb": {"x": [1, "2", {}]}}, {"a": 2}]', '$[0]' RETURNING sqljson_rec); +SELECT * FROM unnest((JSON_QUERY(json '{"jsa": [{"a": 1, "b": ["foo"]}, {"a": 2, "c": {}}, 123]}', '$' RETURNING sqljson_rec)).jsa); +SELECT * FROM unnest((JSON_QUERY(json '{"reca": [{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]}', '$' RETURNING sqljson_reca)).reca); + +-- Conversion to array types +SELECT JSON_QUERY(json '[1,2,null,"3"]', '$[*]' RETURNING int[] WITH WRAPPER); +SELECT * FROM unnest(JSON_QUERY(json '[{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]', '$' RETURNING sqljson_rec[])); + +-- Conversion to domain types +SELECT JSON_QUERY(json '{"a": 1}', '$.a' RETURNING sqljson_int_not_null); +SELECT JSON_QUERY(json '{"a": 1}', '$.b' RETURNING sqljson_int_not_null); + +-- Test constraints + +CREATE TABLE test_json_constraints ( + js text, + i int, + x jsonb DEFAULT JSON_QUERY(json '[1,2]', '$[*]' WITH WRAPPER) + CONSTRAINT test_json_constraint1 + CHECK (js IS JSON) + CONSTRAINT test_json_constraint2 + CHECK (JSON_EXISTS(js FORMAT JSON, '$.a' PASSING i + 5 AS int, i::text AS txt, array[1,2,3] as arr)) + CONSTRAINT test_json_constraint3 + CHECK (JSON_VALUE(js::json, '$.a' RETURNING int DEFAULT ('12' || i)::int ON EMPTY ERROR ON ERROR) > i) + CONSTRAINT test_json_constraint4 + CHECK (JSON_QUERY(js FORMAT JSON, '$.a' RETURNING jsonb WITH CONDITIONAL WRAPPER EMPTY OBJECT ON ERROR) < jsonb '[10]') + CONSTRAINT test_json_constraint5 + CHECK (JSON_QUERY(js FORMAT JSON, '$.a' RETURNING char(5) OMIT QUOTES EMPTY ARRAY ON EMPTY) > 'a') +); + +\d test_json_constraints + +SELECT check_clause +FROM information_schema.check_constraints +WHERE constraint_name LIKE 'test_json_constraint%'; + +SELECT adsrc FROM pg_attrdef WHERE adrelid = 'test_json_constraints'::regclass; + +INSERT INTO test_json_constraints VALUES ('', 1); +INSERT INTO test_json_constraints VALUES ('1', 1); +INSERT INTO test_json_constraints VALUES ('[]'); +INSERT INTO test_json_constraints VALUES ('{"b": 1}', 1); +INSERT INTO test_json_constraints VALUES ('{"a": 1}', 1); +INSERT INTO test_json_constraints VALUES ('{"a": 7}', 1); +INSERT INTO test_json_constraints VALUES ('{"a": 10}', 1); + +DROP TABLE test_json_constraints; + +-- JSON_TABLE + +-- Should fail (JSON_TABLE can be used only in FROM clause) +SELECT JSON_TABLE('[]', '$'); + +-- Should fail (no columns) +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); + +-- NULL => empty table +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS (foo int)) bar; + +-- invalid json => empty table +SELECT * FROM JSON_TABLE('', '$' COLUMNS (foo int)) bar; +SELECT * FROM JSON_TABLE('' FORMAT JSON, '$' COLUMNS (foo int)) bar; + +-- invalid json => error +SELECT * FROM JSON_TABLE('' FORMAT JSON, '$' COLUMNS (foo int) ERROR ON ERROR) bar; + +-- +SELECT * FROM JSON_TABLE('123' FORMAT JSON, '$' + COLUMNS (item int PATH '$', foo int)) bar; + +SELECT * FROM JSON_TABLE(json '123', '$' + COLUMNS (item int PATH '$', foo int)) bar; + +-- JSON_TABLE: basic functionality +SELECT * +FROM + (VALUES + ('1'), + ('[]'), + ('{}'), + ('[1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""]'), + ('err') + ) vals(js) + LEFT OUTER JOIN +-- JSON_TABLE is implicitly lateral + JSON_TABLE( + vals.js FORMAT json, 'lax $[*]' + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa' + ) + ) jt + ON true; + +-- JSON_TABLE: Test backward parsing + +CREATE VIEW json_table_view AS +SELECT * FROM + JSON_TABLE( + 'null' FORMAT JSON, 'lax $[*]' PASSING 1 + 2 AS a, json '"foo"' AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa', + NESTED PATH '$[1]' AS p1 COLUMNS ( + a1 int, + NESTED PATH '$[*]' AS "p1 1" COLUMNS ( + a11 text + ), + b1 text + ), + NESTED PATH '$[2]' AS p2 COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" COLUMNS ( + a21 text + ), + NESTED PATH '$[*]' AS p22 COLUMNS ( + a22 text + ) + ) + ) + ); + +\sv json_table_view + +EXPLAIN (COSTS OFF, VERBOSE) SELECT * FROM json_table_view; + +-- JSON_TABLE: ON EMPTY/ON ERROR behavior +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js), + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$')) jt; + +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$') ERROR ON ERROR) jt + ON true; + +SELECT * +FROM + (VALUES ('1'), ('err'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js FORMAT JSON, '$' COLUMNS (a int PATH '$' ERROR ON ERROR)) jt + ON true; + +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH '$.a' ERROR ON EMPTY)) jt; +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH 'strict $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +SELECT * FROM JSON_TABLE('1', '$' COLUMNS (a int PATH 'lax $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; + +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH '$' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH 'strict $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; +SELECT * FROM JSON_TABLE(json '"a"', '$' COLUMNS (a int PATH 'lax $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + +-- JSON_TABLE: nested paths and plans + +-- Should fail (JSON_TABLE columns shall contain explicit AS path +-- specifications if explicit PLAN clause is used) +SELECT * FROM JSON_TABLE( + json '[]', '$' -- AS required here + COLUMNS ( + foo int PATH '$' + ) + PLAN DEFAULT (UNION) +) jt; + +SELECT * FROM JSON_TABLE( + json '[]', '$' AS path1 + COLUMNS ( + NESTED PATH '$' COLUMNS ( -- AS required here + foo int PATH '$' + ) + ) + PLAN DEFAULT (UNION) +) jt; + +-- Should fail (column names anf path names shall be distinct) +SELECT * FROM JSON_TABLE( + json '[]', '$' AS a + COLUMNS ( + a int + ) +) jt; + +SELECT * FROM JSON_TABLE( + json '[]', '$' AS a + COLUMNS ( + b int, + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) +) jt; + +SELECT * FROM JSON_TABLE( + json '[]', '$' + COLUMNS ( + b int, + NESTED PATH '$' AS b + COLUMNS ( + c int + ) + ) +) jt; + +SELECT * FROM JSON_TABLE( + json '[]', '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + b int + ), + NESTED PATH '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) + ) +) jt; + +-- JSON_TABLE: plan validation + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p1) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER p3) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p13)) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER p11) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', 'strict $[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21))) +) jt; + +SELECT * FROM JSON_TABLE( + json 'null', 'strict $[*]' -- without root path name + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21)) +) jt; + +-- JSON_TABLE: plan execution + +CREATE TEMP TABLE json_table_test (js text); + +INSERT INTO json_table_test +VALUES ( + '[ + {"a": 1, "b": [], "c": []}, + {"a": 2, "b": [1, 2, 3], "c": [10, null, 20]}, + {"a": 3, "b": [1, 2], "c": []}, + {"x": "4", "b": [1, 2], "c": 123} + ]' +); + +-- unspecified plan (outer, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + ) jt; + +-- default plan (outer, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, union) + ) jt; + +-- specific plan (p outer (pb union pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb union pc)) + ) jt; + +-- specific plan (p outer (pc union pb)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pc union pb)) + ) jt; + +-- default plan (inner, union) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (inner) + ) jt; + +-- specific plan (p inner (pb union pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb union pc)) + ) jt; + +-- default plan (inner, cross) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (cross, inner) + ) jt; + +-- specific plan (p inner (pb cross pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb cross pc)) + ) jt; + +-- default plan (outer, cross) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, cross) + ) jt; + +-- specific plan (p outer (pb cross pc)) +select + jt.* +from + json_table_test jtt, + json_table ( + jtt.js FORMAT JSON,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb cross pc)) + ) jt; + + +select + jt.*, b1 + 100 as b +from + json_table (json + '[ + {"a": 1, "b": [[1, 10], [2], [3, 30, 300]], "c": [1, null, 2]}, + {"a": 2, "b": [10, 20], "c": [1, null, 2]}, + {"x": "3", "b": [11, 22, 33, 44]} + ]', + '$[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on error, + nested path 'strict $.b[*]' as pb columns ( + b text format json path '$', + nested path 'strict $[*]' as pb1 columns ( + b1 int path '$' + ) + ), + nested path 'strict $.c[*]' as pc columns ( + c text format json path '$', + nested path 'strict $[*]' as pc1 columns ( + c1 int path '$' + ) + ) + ) + --plan default(outer, cross) + plan(p outer ((pb inner pb1) cross (pc outer pc1))) + ) jt; + +-- Should succeed (JSON arguments are passed to root and nested paths) +SELECT * +FROM + generate_series(1, 4) x, + generate_series(1, 3) y, + JSON_TABLE(json + '[[1,2,3],[2,3,4,5],[3,4,5,6]]', + 'strict $[*] ? (@.[*] < $x)' + PASSING x AS x, y AS y + COLUMNS ( + y text FORMAT JSON PATH '$', + NESTED PATH 'strict $[*] ? (@ >= $y)' + COLUMNS ( + z int PATH '$' + ) + ) + ) jt; + +-- Should fail (JSON arguments are not passed to column paths) +SELECT * +FROM JSON_TABLE( + json '[1,2,3]', + '$[*] ? (@ < $x)' + PASSING 10 AS x + COLUMNS (y text FORMAT JSON PATH '$ ? (@ < $x)') + ) jt; diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql index 8698b8d332..2709905fc7 100644 --- a/src/test/regress/sql/jsonb.sql +++ b/src/test/regress/sql/jsonb.sql @@ -727,6 +727,24 @@ SELECT count(*) FROM testjsonb WHERE j ? 'public'; SELECT count(*) FROM testjsonb WHERE j ? 'bar'; SELECT count(*) FROM testjsonb WHERE j ?| ARRAY['public','disabled']; SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; CREATE INDEX jidx ON testjsonb USING gin (j); SET enable_seqscan = off; @@ -745,6 +763,39 @@ SELECT count(*) FROM testjsonb WHERE j ? 'bar'; SELECT count(*) FROM testjsonb WHERE j ?| ARRAY['public','disabled']; SELECT count(*) FROM testjsonb WHERE j ?& ARRAY['public','disabled']; +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.bar)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) || exists($.disabled)'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.public) && exists($.disabled)'; +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + -- array exists - array elements should behave as keys (for GIN index scans too) CREATE INDEX jidx_array ON testjsonb USING gin((j->'array')); SELECT count(*) from testjsonb WHERE j->'array' ? 'bar'; @@ -794,6 +845,34 @@ SELECT count(*) FROM testjsonb WHERE j @> '{"age":25.0}'; -- exercise GIN_SEARCH_MODE_ALL SELECT count(*) FROM testjsonb WHERE j @> '{}'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == null'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.wait == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.wait ? (@ == null))'; +SELECT count(*) FROM testjsonb WHERE j @~ '"CC" == $.wait'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.wait == "CC" && true == $.public'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.age == 25.0'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "foo"'; +SELECT count(*) FROM testjsonb WHERE j @~ '$.array[*] == "bar"'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($ ? (@.array[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array ? (@[*] == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($.array[*] ? (@ == "bar"))'; +SELECT count(*) FROM testjsonb WHERE j @~ 'exists($)'; + +EXPLAIN (COSTS OFF) +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? (@ == null)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.wait ? ("CC" == @)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.wait == "CC" && true == @.public)'; +SELECT count(*) FROM testjsonb WHERE j @? '$.age ? (@ == 25)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.age == 25.0)'; +SELECT count(*) FROM testjsonb WHERE j @? '$ ? (@.array[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array ? (@[*] == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$.array[*] ? (@ == "bar")'; +SELECT count(*) FROM testjsonb WHERE j @? '$'; +SELECT count(*) FROM testjsonb WHERE j @? '$.public'; +SELECT count(*) FROM testjsonb WHERE j @? '$.bar'; + RESET enable_seqscan; DROP INDEX jidx; diff --git a/src/test/regress/sql/jsonb_jsonpath.sql b/src/test/regress/sql/jsonb_jsonpath.sql new file mode 100644 index 0000000000..1aa852f217 --- /dev/null +++ b/src/test/regress/sql/jsonb_jsonpath.sql @@ -0,0 +1,454 @@ +select jsonb '{"a": 12}' @? '$.a.b'; +select jsonb '{"a": 12}' @? '$.b'; +select jsonb '{"a": {"a": 12}}' @? '$.a.a'; +select jsonb '{"a": {"a": 12}}' @? '$.*.a'; +select jsonb '{"b": {"a": 12}}' @? '$.*.a'; +select jsonb '{}' @? '$.*'; +select jsonb '{"a": 1}' @? '$.*'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{1}'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{2}'; +select jsonb '{"a": {"b": 1}}' @? 'lax $.**{3}'; +select jsonb '[]' @? '$.[*]'; +select jsonb '[1]' @? '$.[*]'; +select jsonb '[1]' @? '$.[1]'; +select jsonb '[1]' @? 'strict $.[1]'; +select jsonb '[1]' @? '$.[0]'; +select jsonb '[1]' @? '$.[0.3]'; +select jsonb '[1]' @? '$.[0.5]'; +select jsonb '[1]' @? '$.[0.9]'; +select jsonb '[1]' @? '$.[1.2]'; +select jsonb '[1]' @? 'strict $.[1.2]'; +select jsonb '{}' @? 'strict $.[0.3]'; +select jsonb '{}' @? 'lax $.[0.3]'; +select jsonb '{}' @? 'strict $.[1.2]'; +select jsonb '{}' @? 'lax $.[1.2]'; +select jsonb '{}' @? 'strict $.[-2 to 3]'; +select jsonb '{}' @? 'lax $.[-2 to 3]'; +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] > @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,5]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,"5"]}' @? 'strict $ ? (@.a[*] >= @.b[*])'; +select jsonb '{"a": [1,2,3], "b": [3,4,null]}' @? '$ ? (@.a[*] >= @.b[*])'; +select jsonb '1' @? '$ ? ((@ == "1") is unknown)'; +select jsonb '1' @? '$ ? ((@ == 1) is unknown)'; +select jsonb '[{"a": 1}, {"a": 2}]' @? '$[0 to 1] ? (@.a > 1)'; + +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.a'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.b'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* '$.*'; +select jsonb '{"a": 12, "b": {"a": 13}}' @* 'lax $.*.a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[*].*'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[1].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[2].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0,1].a'; +select jsonb '[12, {"a": 13}, {"b": 14}]' @* 'lax $.[0 to 10].a'; +select jsonb '[12, {"a": 13}, {"b": 14}, "ccc", true]' @* '$.[2.5 - 1 to @.size() - 2]'; +select jsonb '1' @* 'lax $[0]'; +select jsonb '1' @* 'lax $[*]'; +select jsonb '{}' @* 'lax $[0]'; +select jsonb '[1]' @* 'lax $[0]'; +select jsonb '[1]' @* 'lax $[*]'; +select jsonb '[1,2,3]' @* 'lax $[*]'; +select jsonb '[]' @* '$[last]'; +select jsonb '[]' @* 'strict $[last]'; +select jsonb '[1]' @* '$[last]'; +select jsonb '{}' @* 'lax $[last]'; +select jsonb '[1,2,3]' @* '$[last]'; +select jsonb '[1,2,3]' @* '$[last - 1]'; +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "number")]'; +select jsonb '[1,2,3]' @* '$[last ? (@.type() == "string")]'; + +select * from jsonpath_query(jsonb '{"a": 10}', '$'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '{"a": 10}', '$ ? (.a < $value)', '{"value" : 8}'); +select * from jsonpath_query(jsonb '{"a": 10}', '$.a ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[*] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[0,1] ? (@ < $value)', '{"value" : 13}'); +select * from jsonpath_query(jsonb '[10,11,12,13,14,15]', '$.[0 to 2] ? (@ < $value)', '{"value" : 15}'); +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$.[*] ? (@ == "1")'); +select * from jsonpath_query(jsonb '[1,"1",2,"2",null]', '$.[*] ? (@ == $value)', '{"value" : "1"}'); +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ != null)'); +select * from jsonpath_query(jsonb '[1, "2", null]', '$[*] ? (@ == null)'); + +select jsonb '{"a": {"b": 1}}' @* 'lax $.**'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{2,}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{3,}'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{0,}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,}.b ? (@ > 0)'; +select jsonb '{"a": {"b": 1}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{0,}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{1,2}.b ? (@ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @* 'lax $.**{2,3}.b ? (@ > 0)'; + +select jsonb '{"a": {"b": 1}}' @? '$.**.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{0}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{0,}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1,}.b ? ( @ > 0)'; +select jsonb '{"a": {"b": 1}}' @? '$.**{1,2}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{0,}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1,}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{1,2}.b ? ( @ > 0)'; +select jsonb '{"a": {"c": {"b": 1}}}' @? '$.**{2,3}.b ? ( @ > 0)'; + +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x))'; +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.y))'; +select jsonb '{"g": {"x": 2}}' @* '$.g ? (exists (@.x ? (@ >= 2) ))'; + +--test ternary logic +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true && $y == true) || + @ == false && !($x == true && $y == true) || + @ == null && ($x == true && $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x && y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + +select + x, y, + jsonpath_query( + jsonb '[true, false, null]', + '$[*] ? (@ == true && ($x == true || $y == true) || + @ == false && !($x == true || $y == true) || + @ == null && ($x == true || $y == true) is unknown)', + jsonb_build_object('x', x, 'y', y) + ) as "x || y" +from + (values (jsonb 'true'), ('false'), ('"null"')) x(x), + (values (jsonb 'true'), ('false'), ('"null"')) y(y); + +select jsonb '{"a": 1, "b":1}' @? '$ ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$ ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.c ? ($.c.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.* ? (.a == .b)'; +select jsonb '{"a": 1, "b":1}' @? '$.** ? (.a == .b)'; +select jsonb '{"c": {"a": 1, "b":1}}' @? '$.** ? (.a == .b)'; + +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == 1 + 1)'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (1 + 1))'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == .b + 1)'; +select jsonb '{"c": {"a": 2, "b":1}}' @* '$.** ? (.a == (.b + 1))'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - 1)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -1)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == -.b)'; +select jsonb '{"c": {"a": -1, "b":1}}' @? '$.** ? (.a == - .b)'; +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - .b)'; +select jsonb '{"c": {"a": 2, "b":1}}' @? '$.** ? (.a == 1 - - .b)'; +select jsonb '{"c": {"a": 0, "b":1}}' @? '$.** ? (.a == 1 - +.b)'; +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +2)'; +select jsonb '[1,2,3]' @? '$ ? (+@[*] > +3)'; +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -2)'; +select jsonb '[1,2,3]' @? '$ ? (-@[*] < -3)'; +select jsonb '1' @? '$ ? ($ > 0)'; + +-- unwrapping of operator arguments in lax mode +select jsonb '{"a": [2]}' @* 'lax $.a * 3'; +select jsonb '{"a": [2]}' @* 'lax $.a + 3'; +select jsonb '{"a": [2, 3, 4]}' @* 'lax -$.a'; +-- should fail +select jsonb '{"a": [1, 2]}' @* 'lax $.a * 3'; + +-- extension: boolean expressions +select jsonb '2' @* '$ > 1'; +select jsonb '2' @* '$ <= 1'; +select jsonb '2' @* '$ == "2"'; + +select jsonb '2' @~ '$ > 1'; +select jsonb '2' @~ '$ <= 1'; +select jsonb '2' @~ '$ == "2"'; +select jsonb '2' @~ '1'; +select jsonb '{}' @~ '$'; +select jsonb '[]' @~ '$'; +select jsonb '[1,2,3]' @~ '$[*]'; +select jsonb '[]' @~ '$[*]'; +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] > $x) [1]', '{"x": 1}'); +select jsonpath_predicate(jsonb '[[1, true], [2, false]]', 'strict $[*] ? (@[0] < $x) [1]', '{"x": 2}'); + +select jsonb '[null,1,true,"a",[],{}]' @* '$.type()'; +select jsonb '[null,1,true,"a",[],{}]' @* 'lax $.type()'; +select jsonb '[null,1,true,"a",[],{}]' @* '$[*].type()'; +select jsonb 'null' @* 'null.type()'; +select jsonb 'null' @* 'true.type()'; +select jsonb 'null' @* '123.type()'; +select jsonb 'null' @* '"123".type()'; +select jsonb 'null' @* 'aaa.type()'; + +select jsonb '{"a": 2}' @* '($.a - 5).abs() + 10'; +select jsonb '{"a": 2.5}' @* '-($.a * $.a).floor() + 10'; +select jsonb '[1, 2, 3]' @* '($[*] > 2) ? (@ == true)'; +select jsonb '[1, 2, 3]' @* '($[*] > 3).type()'; +select jsonb '[1, 2, 3]' @* '($[*].a > 3).type()'; +select jsonb '[1, 2, 3]' @* 'strict ($[*].a > 3).type()'; + +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'strict $[*].size()'; +select jsonb '[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]' @* 'lax $[*].size()'; + +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].abs()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].floor()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs()'; +select jsonb '[0, 1, -2, -3.4, 5.6]' @* '$[*].ceiling().abs().type()'; + +select jsonb '[{},1]' @* '$[*].keyvalue()'; +select jsonb '{}' @* '$.keyvalue()'; +select jsonb '{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}' @* '$.keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* '$[*].keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'strict $.keyvalue()'; +select jsonb '[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]' @* 'lax $.keyvalue()'; + +select jsonb 'null' @* '$.double()'; +select jsonb 'true' @* '$.double()'; +select jsonb '[]' @* '$.double()'; +select jsonb '[]' @* 'strict $.double()'; +select jsonb '{}' @* '$.double()'; +select jsonb '1.23' @* '$.double()'; +select jsonb '"1.23"' @* '$.double()'; +select jsonb '"1.23aaa"' @* '$.double()'; + +select jsonb '["", "a", "abc", "abcabc"]' @* '$[*] ? (@ starts with "abc")'; +select jsonb '["", "a", "abc", "abcabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["", "a", "abd", "abdabc"]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? (@[*] starts with "abc")'; +select jsonb '["abc", "abcabc", null, 1]' @* 'strict $ ? ((@[*] starts with "abc") is unknown)'; +select jsonb '[[null, 1, "abc", "abcabc"]]' @* 'lax $ ? (@[*] starts with "abc")'; +select jsonb '[[null, 1, "abd", "abdabc"]]' @* 'lax $ ? ((@[*] starts with "abc") is unknown)'; +select jsonb '[null, 1, "abd", "abdabc"]' @* 'lax $[*] ? ((@ starts with "abc") is unknown)'; + +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c")'; +select jsonb '[null, 1, "abc", "abd", "aBdC", "abdacb", "babc"]' @* 'lax $[*] ? (@ like_regex "^ab.*c" flag "i")'; + +select jsonb 'null' @* '$.datetime()'; +select jsonb 'true' @* '$.datetime()'; +select jsonb '[]' @* '$.datetime()'; +select jsonb '[]' @* 'strict $.datetime()'; +select jsonb '{}' @* '$.datetime()'; +select jsonb '""' @* '$.datetime()'; + +-- Standard extension: UNIX epoch to timestamptz +select jsonb '0' @* '$.datetime()'; +select jsonb '0' @* '$.datetime().type()'; +select jsonb '1490216035.5' @* '$.datetime()'; + +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy")'; +select jsonb '"10-03-2017"' @* '$.datetime("dd-mm-yyyy").type()'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy").type()'; + +select jsonb '"10-03-2017 12:34"' @* ' $.datetime("dd-mm-yyyy HH24:MI").type()'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()'; +select jsonb '"12:34:56"' @* '$.datetime("HH24:MI:SS").type()'; +select jsonb '"12:34:56 +05:20"' @* '$.datetime("HH24:MI:SS TZH:TZM").type()'; + +set time zone '+00'; + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone '+10'; + +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI")'; +select jsonb '"10-03-2017 12:34"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 -05"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH")'; +select jsonb '"10-03-2017 12:34 +05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"10-03-2017 12:34 -05:20"' @* '$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI")'; +select jsonb '"12:34"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 -05"' @* '$.datetime("HH24:MI TZH")'; +select jsonb '"12:34 +05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; +select jsonb '"12:34 -05:20"' @* '$.datetime("HH24:MI TZH:TZM")'; + +set time zone default; + +select jsonb '"2017-03-10"' @* '$.datetime().type()'; +select jsonb '"2017-03-10"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56 +3"' @* '$.datetime()'; +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime().type()'; +select jsonb '"2017-03-10 12:34:56 +3:10"' @* '$.datetime()'; +select jsonb '"12:34:56"' @* '$.datetime().type()'; +select jsonb '"12:34:56"' @* '$.datetime()'; +select jsonb '"12:34:56 +3"' @* '$.datetime().type()'; +select jsonb '"12:34:56 +3"' @* '$.datetime()'; +select jsonb '"12:34:56 +3:10"' @* '$.datetime().type()'; +select jsonb '"12:34:56 +3:10"' @* '$.datetime()'; + +set time zone '+00'; + +-- date comparison +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))'; +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))'; +select jsonb + '["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03 +04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03 +04", "2017-03-10 03:00:00 +03"]' @* + '$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))'; + +-- time comparison +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))'; +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))'; +select jsonb + '["12:34:00", "12:35:00", "12:36:00", "12:35:00 +00", "12:35:00 +01", "13:35:00 +01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +01"]' @* + '$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))'; + +-- timetz comparison +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))'; +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))'; +select jsonb + '["12:34:00 +01", "12:35:00 +01", "12:36:00 +01", "12:35:00 +02", "12:35:00 -02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00 +1"]' @* + '$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))'; + +-- timestamp comparison +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; +select jsonb + '["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00 +01", "2017-03-10 13:35:00 +01", "2017-03-10 12:35:00 -01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))'; + +-- timestamptz comparison +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; +select jsonb + '["2017-03-10 12:34:00 +01", "2017-03-10 12:35:00 +01", "2017-03-10 12:36:00 +01", "2017-03-10 12:35:00 +02", "2017-03-10 12:35:00 -02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56 +01"]' @* + '$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))'; + +set time zone default; + +-- jsonpath operators + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*]'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '$[*] ? (@.a > 10)'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @* '[$[*].a]'; + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*].a ? (@ > 1)'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @? '$[*] ? (@.a > 2)'; + +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 1'; +SELECT jsonb '[{"a": 1}, {"a": 2}]' @~ '$[*].a > 2'; + +-- extension: map item method +select jsonb '1' @* 'strict $.map(@ + 10)'; +select jsonb '1' @* 'lax $.map(@ + 10)'; +select jsonb '[1, 2, 3]' @* '$.map(@ + 10)'; +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.map(@.map(@ + 10))'; + +-- extension: reduce/fold item methods +select jsonb '1' @* 'strict $.reduce($1 + $2)'; +select jsonb '1' @* 'lax $.reduce($1 + $2)'; +select jsonb '1' @* 'strict $.fold($1 + $2, 10)'; +select jsonb '1' @* 'lax $.fold($1 + $2, 10)'; +select jsonb '[1, 2, 3]' @* '$.reduce($1 + $2)'; +select jsonb '[1, 2, 3]' @* '$.fold($1 + $2, 100)'; +select jsonb '[]' @* '$.reduce($1 + $2)'; +select jsonb '[]' @* '$.fold($1 + $2, 100)'; +select jsonb '[1]' @* '$.reduce($1 + $2)'; +select jsonb '[1, 2, 3]' @* '$.foldl([$1, $2], [])'; +select jsonb '[1, 2, 3]' @* '$.foldr([$2, $1], [])'; +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '$.fold($1 + $2.fold($1 + $2, 100), 1000)'; + +-- extension: min/max item methods +select jsonb '1' @* 'strict $.min()'; +select jsonb '1' @* 'lax $.min()'; +select jsonb '[]' @* '$.min()'; +select jsonb '[]' @* '$.max()'; +select jsonb '[null]' @* '$.min()'; +select jsonb '[null]' @* '$.max()'; +select jsonb '[1, 2, 3]' @* '$.min()'; +select jsonb '[1, 2, 3]' @* '$.max()'; +select jsonb '[2, 3, 5, null, 1, 4, null]' @* '$.min()'; +select jsonb '[2, 3, 5, null, 1, 4, null]' @* '$.max()'; +select jsonb '["aa", null, "a", "bbb"]' @* '$.min()'; +select jsonb '["aa", null, "a", "bbb"]' @* '$.max()'; +select jsonb '[1, null, "2"]' @* '$.max()'; + +-- extension: path sequences +select jsonb '[1,2,3,4,5]' @* '10, 20, $[*], 30'; +select jsonb '[1,2,3,4,5]' @* 'lax 10, 20, $[*].a, 30'; +select jsonb '[1,2,3,4,5]' @* 'strict 10, 20, $[*].a, 30'; +select jsonb '[1,2,3,4,5]' @* '-(10, 20, $[1 to 3], 30)'; +select jsonb '[1,2,3,4,5]' @* 'lax (10, 20, $[1 to 3], 30).map(@ + 100)'; +select jsonb '[1,2,3,4,5]' @* '$[(0, $[*], 5) ? (@ == 3)]'; +select jsonb '[1,2,3,4,5]' @* '$[(0, $[*], 3) ? (@ == 3)]'; + +-- extension: array constructors +select jsonb '[1, 2, 3]' @* '[]'; +select jsonb '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5]'; +select jsonb '[1, 2, 3]' @* '[1, 2, $.map(@ + 100)[*], 4, 5][*]'; +select jsonb '[1, 2, 3]' @* '[(1, (2, $.map(@ + 100)[*])), (4, 5)]'; +select jsonb '[1, 2, 3]' @* '[[1, 2], [$.map(@ + 100)[*], 4], 5, [(1,2)?(@ > 5)]]'; +select jsonb '[1, 2, 3]' @* 'strict [1, 2, $.map(@.a)[*], 4, 5]'; +select jsonb '[[1, 2], [3, 4, 5], [], [6, 7]]' @* '[$[*].map(@ + 10)[*] ? (@ > 13)]'; + +-- extension: object constructors +select jsonb '[1, 2, 3]' @* '{}'; +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}'; +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$[*], 4, 5]}.*'; +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": ($[*], 4, 5)}'; +select jsonb '[1, 2, 3]' @* '{a: 2 + 3, "b": [$.map({x: @, y: @ < 3})[*], {z: "foo"}]}'; + +-- extension: object subscripting +select jsonb '{"a": 1}' @? '$["a"]'; +select jsonb '{"a": 1}' @? '$["b"]'; +select jsonb '{"a": 1}' @? 'strict $["b"]'; +select jsonb '{"a": 1}' @? '$["b", "a"]'; + +select jsonb '{"a": 1}' @* '$["a"]'; +select jsonb '{"a": 1}' @* 'strict $["b"]'; +select jsonb '{"a": 1}' @* 'lax $["b"]'; +select jsonb '{"a": 1, "b": 2}' @* 'lax $["b", "c", "b", "a", 0 to 3]'; + +select jsonb 'null' @* '{"a": 1}["a"]'; +select jsonb 'null' @* '{"a": 1}["b"]'; diff --git a/src/test/regress/sql/jsonb_sqljson.sql b/src/test/regress/sql/jsonb_sqljson.sql new file mode 100644 index 0000000000..87036d4f70 --- /dev/null +++ b/src/test/regress/sql/jsonb_sqljson.sql @@ -0,0 +1,828 @@ +-- JSON_EXISTS + +SELECT JSON_EXISTS(NULL::jsonb, '$'); + +SELECT JSON_EXISTS(jsonb '[]', '$'); +SELECT JSON_EXISTS(JSON_OBJECT(RETURNING jsonb), '$'); + +SELECT JSON_EXISTS(jsonb '1', '$'); +SELECT JSON_EXISTS(jsonb 'null', '$'); +SELECT JSON_EXISTS(jsonb '[]', '$'); + +SELECT JSON_EXISTS(jsonb '1', '$.a'); +SELECT JSON_EXISTS(jsonb '1', 'strict $.a'); +SELECT JSON_EXISTS(jsonb '1', 'strict $.a' ERROR ON ERROR); +SELECT JSON_EXISTS(jsonb 'null', '$.a'); +SELECT JSON_EXISTS(jsonb '[]', '$.a'); +SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'strict $.a'); +SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'lax $.a'); +SELECT JSON_EXISTS(jsonb '{}', '$.a'); +SELECT JSON_EXISTS(jsonb '{"b": 1, "a": 2}', '$.a'); + +SELECT JSON_EXISTS(jsonb '1', '$.a.b'); +SELECT JSON_EXISTS(jsonb '{"a": {"b": 1}}', '$.a.b'); +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.a.b'); + +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING 1 AS x); +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING '1' AS x); +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 2 AS y); +SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 1 AS y); + +-- extension: boolean expressions +SELECT JSON_EXISTS(jsonb '1', '$ > 2'); +SELECT JSON_EXISTS(jsonb '1', '$.a > 2' ERROR ON ERROR); + +-- JSON_VALUE + +SELECT JSON_VALUE(NULL::jsonb, '$'); + +SELECT JSON_VALUE(jsonb 'null', '$'); +SELECT JSON_VALUE(jsonb 'null', '$' RETURNING int); + +SELECT JSON_VALUE(jsonb 'true', '$'); +SELECT JSON_VALUE(jsonb 'true', '$' RETURNING bool); + +SELECT JSON_VALUE(jsonb '123', '$'); +SELECT JSON_VALUE(jsonb '123', '$' RETURNING int) + 234; +SELECT JSON_VALUE(jsonb '123', '$' RETURNING text); +/* jsonb bytea ??? */ +SELECT JSON_VALUE(jsonb '123', '$' RETURNING bytea); + +SELECT JSON_VALUE(jsonb '1.23', '$'); +SELECT JSON_VALUE(jsonb '1.23', '$' RETURNING int); +SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING numeric); +SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING int ERROR ON ERROR); + +SELECT JSON_VALUE(jsonb '"aaa"', '$'); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING text); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(5)); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(2)); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING json); +SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING jsonb); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int DEFAULT 111 ON ERROR); +SELECT JSON_VALUE(jsonb '"123"', '$' RETURNING int) + 234; + +SELECT JSON_VALUE(jsonb '"2017-02-20"', '$' RETURNING date) + 9; + +-- Test NULL checks execution in domain types +CREATE DOMAIN sqljsonb_int_not_null AS int NOT NULL; +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null); +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null NULL ON ERROR); +SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null DEFAULT NULL ON ERROR); + +SELECT JSON_VALUE(jsonb '[]', '$'); +SELECT JSON_VALUE(jsonb '[]', '$' ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '{}', '$'); +SELECT JSON_VALUE(jsonb '{}', '$' ERROR ON ERROR); + +SELECT JSON_VALUE(jsonb '1', '$.a'); +SELECT JSON_VALUE(jsonb '1', 'strict $.a' ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 'error' ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 2 ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT 2 ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' NULL ON EMPTY DEFAULT '2' ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON EMPTY DEFAULT '3' ON ERROR); +SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY DEFAULT '3' ON ERROR); + +SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' DEFAULT '0' ON ERROR); +SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int ERROR ON ERROR); +SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); +SELECT JSON_VALUE(jsonb '["1"]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR); + +SELECT + x, + JSON_VALUE( + jsonb '{"a": 1, "b": 2}', + '$.* ? (@ > $x)' PASSING x AS x + RETURNING int + DEFAULT -1 ON EMPTY + DEFAULT -2 ON ERROR + ) y +FROM + generate_series(0, 2) x; + +SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a); +SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a RETURNING point); + +-- JSON_QUERY + +SELECT + JSON_QUERY(js, '$'), + JSON_QUERY(js, '$' WITHOUT WRAPPER), + JSON_QUERY(js, '$' WITH CONDITIONAL WRAPPER), + JSON_QUERY(js, '$' WITH UNCONDITIONAL ARRAY WRAPPER), + JSON_QUERY(js, '$' WITH ARRAY WRAPPER) +FROM + (VALUES + (jsonb 'null'), + ('12.3'), + ('true'), + ('"aaa"'), + ('[1, null, "2"]'), + ('{"a": 1, "b": [2]}') + ) foo(js); + +SELECT + JSON_QUERY(js, 'strict $[*]') AS "unspec", + JSON_QUERY(js, 'strict $[*]' WITHOUT WRAPPER) AS "without", + JSON_QUERY(js, 'strict $[*]' WITH CONDITIONAL WRAPPER) AS "with cond", + JSON_QUERY(js, 'strict $[*]' WITH UNCONDITIONAL ARRAY WRAPPER) AS "with uncond", + JSON_QUERY(js, 'strict $[*]' WITH ARRAY WRAPPER) AS "with" +FROM + (VALUES + (jsonb '1'), + ('[]'), + ('[null]'), + ('[12.3]'), + ('[true]'), + ('["aaa"]'), + ('[[1, 2, 3]]'), + ('[{"a": 1, "b": [2]}]'), + ('[1, "2", null, [3]]') + ) foo(js); + +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES ON SCALAR STRING); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES ON SCALAR STRING); +SELECT JSON_QUERY(jsonb '"aaa"', '$' OMIT QUOTES ERROR ON ERROR); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING json OMIT QUOTES ERROR ON ERROR); +SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING bytea FORMAT JSON OMIT QUOTES ERROR ON ERROR); + +-- QUOTES behavior should not be specified when WITH WRAPPER used: +-- Should fail +SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER OMIT QUOTES); +SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER KEEP QUOTES); +SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTES); +SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTES); +-- Should succeed +SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER OMIT QUOTES); +SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER KEEP QUOTES); + +SELECT JSON_QUERY(jsonb '[]', '$[*]'); +SELECT JSON_QUERY(jsonb '[]', '$[*]' NULL ON EMPTY); +SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY ARRAY ON EMPTY); +SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY OBJECT ON EMPTY); +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY); + +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY NULL ON ERROR); +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY ARRAY ON ERROR); +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY ERROR ON ERROR); +SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON ERROR); + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' ERROR ON ERROR); + +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json FORMAT JSON); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb FORMAT JSON); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(10)); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(3)); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text FORMAT JSON); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea); +SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea FORMAT JSON); + +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea FORMAT JSON EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING json EMPTY OBJECT ON ERROR); +SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING jsonb EMPTY OBJECT ON ERROR); + +SELECT + x, y, + JSON_QUERY( + jsonb '[1,2,3,4,5,null]', + '$[*] ? (@ >= $x && @ <= $y)' + PASSING x AS x, y AS y + WITH CONDITIONAL WRAPPER + EMPTY ARRAY ON EMPTY + ) list +FROM + generate_series(0, 4) x, + generate_series(0, 4) y; + +-- Conversion to record types +CREATE TYPE sqljsonb_rec AS (a int, t text, js json, jb jsonb, jsa json[]); +CREATE TYPE sqljsonb_reca AS (reca sqljsonb_rec[]); + +SELECT JSON_QUERY(jsonb '[{"a": 1, "b": "foo", "t": "aaa", "js": [1, "2", {}], "jb": {"x": [1, "2", {}]}}, {"a": 2}]', '$[0]' RETURNING sqljsonb_rec); +SELECT * FROM unnest((JSON_QUERY(jsonb '{"jsa": [{"a": 1, "b": ["foo"]}, {"a": 2, "c": {}}, 123]}', '$' RETURNING sqljsonb_rec)).jsa); +SELECT * FROM unnest((JSON_QUERY(jsonb '{"reca": [{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]}', '$' RETURNING sqljsonb_reca)).reca); + +-- Conversion to array types +SELECT JSON_QUERY(jsonb '[1,2,null,"3"]', '$[*]' RETURNING int[] WITH WRAPPER); +SELECT * FROM unnest(JSON_QUERY(jsonb '[{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]', '$' RETURNING sqljsonb_rec[])); + +-- Conversion to domain types +SELECT JSON_QUERY(jsonb '{"a": 1}', '$.a' RETURNING sqljsonb_int_not_null); +SELECT JSON_QUERY(jsonb '{"a": 1}', '$.b' RETURNING sqljsonb_int_not_null); + +-- Test constraints + +CREATE TABLE test_jsonb_constraints ( + js text, + i int, + x jsonb DEFAULT JSON_QUERY(jsonb '[1,2]', '$[*]' WITH WRAPPER) + CONSTRAINT test_jsonb_constraint1 + CHECK (js IS JSON) + CONSTRAINT test_jsonb_constraint2 + CHECK (JSON_EXISTS(js::jsonb, '$.a' PASSING i + 5 AS int, i::text AS txt, array[1,2,3] as arr)) + CONSTRAINT test_jsonb_constraint3 + CHECK (JSON_VALUE(js::jsonb, '$.a' RETURNING int DEFAULT ('12' || i)::int ON EMPTY ERROR ON ERROR) > i) + CONSTRAINT test_jsonb_constraint4 + CHECK (JSON_QUERY(js::jsonb, '$.a' WITH CONDITIONAL WRAPPER EMPTY OBJECT ON ERROR) < jsonb '[10]') + CONSTRAINT test_jsonb_constraint5 + CHECK (JSON_QUERY(js::jsonb, '$.a' RETURNING char(5) OMIT QUOTES EMPTY ARRAY ON EMPTY) > 'a') +); + +\d test_jsonb_constraints + +SELECT check_clause +FROM information_schema.check_constraints +WHERE constraint_name LIKE 'test_jsonb_constraint%'; + +SELECT adsrc FROM pg_attrdef WHERE adrelid = 'test_jsonb_constraints'::regclass; + +INSERT INTO test_jsonb_constraints VALUES ('', 1); +INSERT INTO test_jsonb_constraints VALUES ('1', 1); +INSERT INTO test_jsonb_constraints VALUES ('[]'); +INSERT INTO test_jsonb_constraints VALUES ('{"b": 1}', 1); +INSERT INTO test_jsonb_constraints VALUES ('{"a": 1}', 1); +INSERT INTO test_jsonb_constraints VALUES ('{"a": 7}', 1); +INSERT INTO test_jsonb_constraints VALUES ('{"a": 10}', 1); + +DROP TABLE test_jsonb_constraints; + +-- JSON_TABLE + +-- Should fail (JSON_TABLE can be used only in FROM clause) +SELECT JSON_TABLE('[]', '$'); + +-- Should fail (no columns) +SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ()); + +-- NULL => empty table +SELECT * FROM JSON_TABLE(NULL::jsonb, '$' COLUMNS (foo int)) bar; + +-- +SELECT * FROM JSON_TABLE(jsonb '123', '$' + COLUMNS (item int PATH '$', foo int)) bar; + +-- JSON_TABLE: basic functionality +SELECT * +FROM + (VALUES + ('1'), + ('[]'), + ('{}'), + ('[1, 1.23, "2", "aaaaaaa", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""]') + ) vals(js) + LEFT OUTER JOIN +-- JSON_TABLE is implicitly lateral + JSON_TABLE( + vals.js::jsonb, 'lax $[*]' + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa' + ) + ) jt + ON true; + +-- JSON_TABLE: Test backward parsing + +CREATE VIEW jsonb_table_view AS +SELECT * FROM + JSON_TABLE( + jsonb 'null', 'lax $[*]' PASSING 1 + 2 AS a, json '"foo"' AS "b c" + COLUMNS ( + id FOR ORDINALITY, + id2 FOR ORDINALITY, -- allowed additional ordinality columns + "int" int PATH '$', + "text" text PATH '$', + "char(4)" char(4) PATH '$', + "bool" bool PATH '$', + "numeric" numeric PATH '$', + js json PATH '$', + jb jsonb PATH '$', + jst text FORMAT JSON PATH '$', + jsc char(4) FORMAT JSON PATH '$', + jsv varchar(4) FORMAT JSON PATH '$', + jsb jsonb FORMAT JSON PATH '$', + aaa int, -- implicit path '$."aaa"', + aaa1 int PATH '$.aaa', + NESTED PATH '$[1]' AS p1 COLUMNS ( + a1 int, + NESTED PATH '$[*]' AS "p1 1" COLUMNS ( + a11 text + ), + b1 text + ), + NESTED PATH '$[2]' AS p2 COLUMNS ( + NESTED PATH '$[*]' AS "p2:1" COLUMNS ( + a21 text + ), + NESTED PATH '$[*]' AS p22 COLUMNS ( + a22 text + ) + ) + ) + ); + +\sv jsonb_table_view + +EXPLAIN (COSTS OFF, VERBOSE) SELECT * FROM jsonb_table_view; + +-- JSON_TABLE: ON EMPTY/ON ERROR behavior +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js), + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$')) jt; + +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$') ERROR ON ERROR) jt + ON true; + +SELECT * +FROM + (VALUES ('1'), ('"err"')) vals(js) + LEFT OUTER JOIN + JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$' ERROR ON ERROR)) jt + ON true; + +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH '$.a' ERROR ON EMPTY)) jt; +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'strict $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; +SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'lax $.a' ERROR ON EMPTY) ERROR ON ERROR) jt; + +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH '$' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'strict $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; +SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'lax $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt; + +-- JSON_TABLE: nested paths and plans + +-- Should fail (JSON_TABLE columns shall contain explicit AS path +-- specifications if explicit PLAN clause is used) +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' -- AS required here + COLUMNS ( + foo int PATH '$' + ) + PLAN DEFAULT (UNION) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS path1 + COLUMNS ( + NESTED PATH '$' COLUMNS ( -- AS required here + foo int PATH '$' + ) + ) + PLAN DEFAULT (UNION) +) jt; + +-- Should fail (column names anf path names shall be distinct) +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS a + COLUMNS ( + a int + ) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' AS a + COLUMNS ( + b int, + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' + COLUMNS ( + b int, + NESTED PATH '$' AS b + COLUMNS ( + c int + ) + ) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb '[]', '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + b int + ), + NESTED PATH '$' + COLUMNS ( + NESTED PATH '$' AS a + COLUMNS ( + c int + ) + ) + ) +) jt; + +-- JSON_TABLE: plan validation + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p1) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER p3) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p13)) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER (p1 CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 UNION p11) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER p11) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', '$[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS p2)) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', 'strict $[*]' AS p0 + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21))) +) jt; + +SELECT * FROM JSON_TABLE( + jsonb 'null', 'strict $[*]' -- without root path name + COLUMNS ( + NESTED PATH '$' AS p1 COLUMNS ( + NESTED PATH '$' AS p11 COLUMNS ( foo int ), + NESTED PATH '$' AS p12 COLUMNS ( bar int ) + ), + NESTED PATH '$' AS p2 COLUMNS ( + NESTED PATH '$' AS p21 COLUMNS ( baz int ) + ) + ) + PLAN ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21)) +) jt; + +-- JSON_TABLE: plan execution + +CREATE TEMP TABLE jsonb_table_test (js jsonb); + +INSERT INTO jsonb_table_test +VALUES ( + '[ + {"a": 1, "b": [], "c": []}, + {"a": 2, "b": [1, 2, 3], "c": [10, null, 20]}, + {"a": 3, "b": [1, 2], "c": []}, + {"x": "4", "b": [1, 2], "c": 123} + ]' +); + +-- unspecified plan (outer, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + ) jt; + +-- default plan (outer, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, union) + ) jt; + +-- specific plan (p outer (pb union pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb union pc)) + ) jt; + +-- specific plan (p outer (pc union pb)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pc union pb)) + ) jt; + +-- default plan (inner, union) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (inner) + ) jt; + +-- specific plan (p inner (pb union pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb union pc)) + ) jt; + +-- default plan (inner, cross) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (cross, inner) + ) jt; + +-- specific plan (p inner (pb cross pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p inner (pb cross pc)) + ) jt; + +-- default plan (outer, cross) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan default (outer, cross) + ) jt; + +-- specific plan (p outer (pb cross pc)) +select + jt.* +from + jsonb_table_test jtt, + json_table ( + jtt.js,'strict $[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on empty, + nested path 'strict $.b[*]' as pb columns ( b int path '$' ), + nested path 'strict $.c[*]' as pc columns ( c int path '$' ) + ) + plan (p outer (pb cross pc)) + ) jt; + + +select + jt.*, b1 + 100 as b +from + json_table (jsonb + '[ + {"a": 1, "b": [[1, 10], [2], [3, 30, 300]], "c": [1, null, 2]}, + {"a": 2, "b": [10, 20], "c": [1, null, 2]}, + {"x": "3", "b": [11, 22, 33, 44]} + ]', + '$[*]' as p + columns ( + n for ordinality, + a int path 'lax $.a' default -1 on error, + nested path 'strict $.b[*]' as pb columns ( + b text format json path '$', + nested path 'strict $[*]' as pb1 columns ( + b1 int path '$' + ) + ), + nested path 'strict $.c[*]' as pc columns ( + c text format json path '$', + nested path 'strict $[*]' as pc1 columns ( + c1 int path '$' + ) + ) + ) + --plan default(outer, cross) + plan(p outer ((pb inner pb1) cross (pc outer pc1))) + ) jt; + +-- Should succeed (JSON arguments are passed to root and nested paths) +SELECT * +FROM + generate_series(1, 4) x, + generate_series(1, 3) y, + JSON_TABLE(jsonb + '[[1,2,3],[2,3,4,5],[3,4,5,6]]', + 'strict $[*] ? (@.[*] < $x)' + PASSING x AS x, y AS y + COLUMNS ( + y text FORMAT JSON PATH '$', + NESTED PATH 'strict $[*] ? (@ >= $y)' + COLUMNS ( + z int PATH '$' + ) + ) + ) jt; + +-- Should fail (JSON arguments are not passed to column paths) +SELECT * +FROM JSON_TABLE( + jsonb '[1,2,3]', + '$[*] ? (@ < $x)' + PASSING 10 AS x + COLUMNS (y text FORMAT JSON PATH '$ ? (@ < $x)') + ) jt; diff --git a/src/test/regress/sql/jsonpath.sql b/src/test/regress/sql/jsonpath.sql new file mode 100644 index 0000000000..5406107151 --- /dev/null +++ b/src/test/regress/sql/jsonpath.sql @@ -0,0 +1,162 @@ +--jsonpath io + +select ''::jsonpath; +select '$'::jsonpath; +select 'strict $'::jsonpath; +select 'lax $'::jsonpath; +select '$.a'::jsonpath; +select '$.a.v'::jsonpath; +select '$.a.*'::jsonpath; +select '$.*.[*]'::jsonpath; +select '$.*[*]'::jsonpath; +select '$.a.[*]'::jsonpath; +select '$.a[*]'::jsonpath; +select '$.a.[*][*]'::jsonpath; +select '$.a.[*].[*]'::jsonpath; +select '$.a[*][*]'::jsonpath; +select '$.a[*].[*]'::jsonpath; +select '$[*]'::jsonpath; +select '$[0]'::jsonpath; +select '$[*][0]'::jsonpath; +select '$[*].a'::jsonpath; +select '$[*][0].a.b'::jsonpath; +select '$.a.**.b'::jsonpath; +select '$.a.**{2}.b'::jsonpath; +select '$.a.**{2,2}.b'::jsonpath; +select '$.a.**{2,5}.b'::jsonpath; +select '$.a.**{,5}.b'::jsonpath; +select '$.a.**{5,}.b'::jsonpath; +select '$+1'::jsonpath; +select '$-1'::jsonpath; +select '$--+1'::jsonpath; +select '$.a/+-1'::jsonpath; + +select '$.g ? ($.a == 1)'::jsonpath; +select '$.g ? (@ == 1)'::jsonpath; +select '$.g ? (a == 1)'::jsonpath; +select '$.g ? (.a == 1)'::jsonpath; +select '$.g ? (@.a == 1)'::jsonpath; +select '$.g ? (@.a == 1 || a == 4)'::jsonpath; +select '$.g ? (@.a == 1 && a == 4)'::jsonpath; +select '$.g ? (@.a == 1 || a == 4 && b == 7)'::jsonpath; +select '$.g ? (@.a == 1 || !(a == 4) && b == 7)'::jsonpath; +select '$.g ? (@.a == 1 || !(x >= 123 || a == 4) && b == 7)'::jsonpath; +select '$.g ? (.x >= @[*]?(@.a > "abc"))'::jsonpath; +select '$.g ? ((x >= 123 || a == 4) is unknown)'::jsonpath; +select '$.g ? (exists (.x))'::jsonpath; +select '$.g ? (exists (@.x ? (@ == 14)))'::jsonpath; +select '$.g ? (exists (.x ? (@ == 14)))'::jsonpath; +select '$.g ? ((x >= 123 || a == 4) && exists (.x ? (@ == 14)))'::jsonpath; +select '$.g ? (+x >= +-(+a + 2))'::jsonpath; + +select '$a'::jsonpath; +select '$a.b'::jsonpath; +select '$a[*]'::jsonpath; +select '$.g ? (zip == $zip)'::jsonpath; +select '$.a.[1,2, 3 to 16]'::jsonpath; +select '$.a[1,2, 3 to 16]'::jsonpath; +select '$.a[$a + 1, ($b[*]) to -(@[0] * 2)]'::jsonpath; +select '$.a[$.a.size() - 3]'::jsonpath; +select 'last'::jsonpath; +select '"last"'::jsonpath; +select '$.last'::jsonpath; +select '$ ? (last > 0)'::jsonpath; +select '$[last]'::jsonpath; +select '$[@ ? (last > 0)]'::jsonpath; + +select 'null.type()'::jsonpath; +select '1.type()'::jsonpath; +select '"aaa".type()'::jsonpath; +select 'aaa.type()'::jsonpath; +select 'true.type()'::jsonpath; +select '$.datetime()'::jsonpath; +select '$.datetime("datetime template")'::jsonpath; +select '$.reduce($1 + $2 + @[1])'::jsonpath; +select '$.fold($1 + $2 + @[1], 2 + 3)'::jsonpath; +select '$.min().abs() + 5'::jsonpath; +select '$.max().floor()'::jsonpath; + +select '$ ? (@ starts with "abc")'::jsonpath; +select '$ ? (@ starts with $var)'::jsonpath; + +select '$ ? (@ like_regex "pattern")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "i")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "is")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "isim")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "xsms")'::jsonpath; +select '$ ? (@ like_regex "pattern" flag "a")'::jsonpath; + +select '$ < 1'::jsonpath; +select '($ < 1) || $.a.b <= $x'::jsonpath; +select '@ + 1'::jsonpath; + +select '($).a.b'::jsonpath; +select '($.a.b).c.d'::jsonpath; +select '($.a.b + -$.x.y).c.d'::jsonpath; +select '(-+$.a.b).c.d'::jsonpath; +select '1 + ($.a.b + 2).c.d'::jsonpath; +select '1 + ($.a.b > 2).c.d'::jsonpath; + +select '1, 2 + 3, $.a[*] + 5'::jsonpath; +select '(1, 2, $.a)'::jsonpath; +select '(1, 2, $.a).a[*]'::jsonpath; +select '(1, 2, $.a) == 5'::jsonpath; +select '$[(1, 2, $.a) to (3, 4)]'::jsonpath; +select '$[(1, (2, $.a)), 3, (4, 5)]'::jsonpath; + +select '[]'::jsonpath; +select '[[1, 2], ([(3, 4, 5), 6], []), $.a[*]]'::jsonpath; + +select '{}'::jsonpath; +select '{a: 1 + 2}'::jsonpath; +select '{a: 1 + 2, b : (1,2), c: [$[*],4,5], d: { "e e e": "f f f" }}'::jsonpath; + +select '$ ? (a < 1)'::jsonpath; +select '$ ? (a < -1)'::jsonpath; +select '$ ? (a < +1)'::jsonpath; +select '$ ? (a < .1)'::jsonpath; +select '$ ? (a < -.1)'::jsonpath; +select '$ ? (a < +.1)'::jsonpath; +select '$ ? (a < 0.1)'::jsonpath; +select '$ ? (a < -0.1)'::jsonpath; +select '$ ? (a < +0.1)'::jsonpath; +select '$ ? (a < 10.1)'::jsonpath; +select '$ ? (a < -10.1)'::jsonpath; +select '$ ? (a < +10.1)'::jsonpath; +select '$ ? (a < 1e1)'::jsonpath; +select '$ ? (a < -1e1)'::jsonpath; +select '$ ? (a < +1e1)'::jsonpath; +select '$ ? (a < .1e1)'::jsonpath; +select '$ ? (a < -.1e1)'::jsonpath; +select '$ ? (a < +.1e1)'::jsonpath; +select '$ ? (a < 0.1e1)'::jsonpath; +select '$ ? (a < -0.1e1)'::jsonpath; +select '$ ? (a < +0.1e1)'::jsonpath; +select '$ ? (a < 10.1e1)'::jsonpath; +select '$ ? (a < -10.1e1)'::jsonpath; +select '$ ? (a < +10.1e1)'::jsonpath; +select '$ ? (a < 1e-1)'::jsonpath; +select '$ ? (a < -1e-1)'::jsonpath; +select '$ ? (a < +1e-1)'::jsonpath; +select '$ ? (a < .1e-1)'::jsonpath; +select '$ ? (a < -.1e-1)'::jsonpath; +select '$ ? (a < +.1e-1)'::jsonpath; +select '$ ? (a < 0.1e-1)'::jsonpath; +select '$ ? (a < -0.1e-1)'::jsonpath; +select '$ ? (a < +0.1e-1)'::jsonpath; +select '$ ? (a < 10.1e-1)'::jsonpath; +select '$ ? (a < -10.1e-1)'::jsonpath; +select '$ ? (a < +10.1e-1)'::jsonpath; +select '$ ? (a < 1e+1)'::jsonpath; +select '$ ? (a < -1e+1)'::jsonpath; +select '$ ? (a < +1e+1)'::jsonpath; +select '$ ? (a < .1e+1)'::jsonpath; +select '$ ? (a < -.1e+1)'::jsonpath; +select '$ ? (a < +.1e+1)'::jsonpath; +select '$ ? (a < 0.1e+1)'::jsonpath; +select '$ ? (a < -0.1e+1)'::jsonpath; +select '$ ? (a < +0.1e+1)'::jsonpath; +select '$ ? (a < 10.1e+1)'::jsonpath; +select '$ ? (a < -10.1e+1)'::jsonpath; +select '$ ? (a < +10.1e+1)'::jsonpath; diff --git a/src/test/regress/sql/sqljson.sql b/src/test/regress/sql/sqljson.sql new file mode 100644 index 0000000000..cbf3d6ca65 --- /dev/null +++ b/src/test/regress/sql/sqljson.sql @@ -0,0 +1,300 @@ +-- JSON_OBJECT() +SELECT JSON_OBJECT(); +SELECT JSON_OBJECT(RETURNING json); +SELECT JSON_OBJECT(RETURNING json FORMAT JSON); +SELECT JSON_OBJECT(RETURNING jsonb); +SELECT JSON_OBJECT(RETURNING jsonb FORMAT JSON); +SELECT JSON_OBJECT(RETURNING text); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +SELECT JSON_OBJECT(RETURNING bytea); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF16); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF32); + +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING UTF8); + +SELECT JSON_OBJECT(NULL: 1); +SELECT JSON_OBJECT('a': 2 + 3); +SELECT JSON_OBJECT('a' VALUE 2 + 3); +--SELECT JSON_OBJECT(KEY 'a' VALUE 2 + 3); +SELECT JSON_OBJECT('a' || 2: 1); +SELECT JSON_OBJECT(('a' || 2) VALUE 1); +--SELECT JSON_OBJECT('a' || 2 VALUE 1); +--SELECT JSON_OBJECT(KEY 'a' || 2 VALUE 1); +SELECT JSON_OBJECT('a': 2::text); +SELECT JSON_OBJECT('a' VALUE 2::text); +--SELECT JSON_OBJECT(KEY 'a' VALUE 2::text); +SELECT JSON_OBJECT(1::text: 2); +SELECT JSON_OBJECT((1::text) VALUE 2); +--SELECT JSON_OBJECT(1::text VALUE 2); +--SELECT JSON_OBJECT(KEY 1::text VALUE 2); +SELECT JSON_OBJECT(json '[1]': 123); +SELECT JSON_OBJECT(ARRAY[1,2,3]: 'aaa'); + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' +); + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' + RETURNING jsonb +); + +/* +SELECT JSON_OBJECT( + 'a': '123', + KEY 1.23 VALUE 123, + 'c' VALUE json '[1, true, {}]' +); +*/ + +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa')); +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa' RETURNING jsonb)); + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text)); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text) FORMAT JSON); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea)); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea) FORMAT JSON); + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2); +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 NULL ON NULL); +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 ABSENT ON NULL); + +SELECT JSON_OBJECT(1: 1, '1': NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '1': NULL NULL ON NULL WITH UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 NULL ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '2': NULL, '3': 1, 4: NULL, '5': 'a' ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + + +-- JSON_ARRAY() +SELECT JSON_ARRAY(); +SELECT JSON_ARRAY(RETURNING json); +SELECT JSON_ARRAY(RETURNING json FORMAT JSON); +SELECT JSON_ARRAY(RETURNING jsonb); +SELECT JSON_ARRAY(RETURNING jsonb FORMAT JSON); +SELECT JSON_ARRAY(RETURNING text); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +SELECT JSON_ARRAY(RETURNING bytea); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF8); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32); + +SELECT JSON_ARRAY('aaa', 111, true, array[1,2,3], NULL, json '{"a": [1]}', jsonb '["a",3]'); + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL); +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL); +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL); +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL RETURNING jsonb); + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' RETURNING text)); +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text)); +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text) FORMAT JSON); + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i)); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i)); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) RETURNING jsonb); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (3), (1), (NULL), (2)) foo(i) ORDER BY i); +-- Should fail +SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); +SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); +SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); + +-- JSON_ARRAYAGG() +SELECT JSON_ARRAYAGG(i) IS NULL, + JSON_ARRAYAGG(i RETURNING jsonb) IS NULL +FROM generate_series(1, 0) i; + +SELECT JSON_ARRAYAGG(i), + JSON_ARRAYAGG(i RETURNING jsonb) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(i ORDER BY i DESC) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(i::text::json) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(JSON_ARRAY(i, i + 1 RETURNING text) FORMAT JSON) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(NULL), + JSON_ARRAYAGG(NULL RETURNING jsonb) +FROM generate_series(1, 5); + +SELECT JSON_ARRAYAGG(NULL NULL ON NULL), + JSON_ARRAYAGG(NULL NULL ON NULL RETURNING jsonb) +FROM generate_series(1, 5); + +SELECT + JSON_ARRAYAGG(bar), + JSON_ARRAYAGG(bar RETURNING jsonb), + JSON_ARRAYAGG(bar ABSENT ON NULL), + JSON_ARRAYAGG(bar ABSENT ON NULL RETURNING jsonb), + JSON_ARRAYAGG(bar NULL ON NULL), + JSON_ARRAYAGG(bar NULL ON NULL RETURNING jsonb), + JSON_ARRAYAGG(foo), + JSON_ARRAYAGG(foo RETURNING jsonb), + JSON_ARRAYAGG(foo ORDER BY bar) FILTER (WHERE bar > 2), + JSON_ARRAYAGG(foo ORDER BY bar RETURNING jsonb) FILTER (WHERE bar > 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL)) foo(bar); + +SELECT + bar, JSON_ARRAYAGG(bar) FILTER (WHERE bar > 2) OVER (PARTITION BY foo.bar % 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL), (5), (4)) foo(bar); + +-- JSON_OBJECTAGG() +SELECT JSON_OBJECTAGG('key': 1) IS NULL, + JSON_OBJECTAGG('key': 1 RETURNING jsonb) IS NULL +WHERE FALSE; + +SELECT JSON_OBJECTAGG(NULL: 1); + +SELECT JSON_OBJECTAGG(NULL: 1 RETURNING jsonb); + +SELECT + JSON_OBJECTAGG(i: i), +-- JSON_OBJECTAGG(i VALUE i), +-- JSON_OBJECTAGG(KEY i VALUE i), + JSON_OBJECTAGG(i: i RETURNING jsonb) +FROM + generate_series(1, 5) i; + +SELECT + JSON_OBJECTAGG(k: v), + JSON_OBJECTAGG(k: v NULL ON NULL), + JSON_OBJECTAGG(k: v ABSENT ON NULL), + JSON_OBJECTAGG(k: v RETURNING jsonb), + JSON_OBJECTAGG(k: v NULL ON NULL RETURNING jsonb), + JSON_OBJECTAGG(k: v ABSENT ON NULL RETURNING jsonb) +FROM + (VALUES (1, 1), (1, NULL), (2, NULL), (3, 3)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (0, NULL), (3, NULL), (2, 2), (4, NULL)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +-- IS JSON predicate +SELECT NULL IS JSON; +SELECT NULL IS NOT JSON; +SELECT NULL::json IS JSON; +SELECT NULL::jsonb IS JSON; +SELECT NULL::text IS JSON; +SELECT NULL::bytea IS JSON; +SELECT NULL::int IS JSON; + +SELECT '' IS JSON; + +SELECT bytea '\x00' IS JSON; + +CREATE TABLE test_is_json (js text); + +INSERT INTO test_is_json VALUES + (NULL), + (''), + ('123'), + ('"aaa "'), + ('true'), + ('null'), + ('[]'), + ('[1, "2", {}]'), + ('{}'), + ('{ "a": 1, "b": null }'), + ('{ "a": 1, "a": null }'), + ('{ "a": 1, "b": [{ "a": 1 }, { "a": 2 }] }'), + ('{ "a": 1, "b": [{ "a": 1, "b": 0, "a": 2 }] }'), + ('aaa'), + ('{a:1}'), + ('["a",]'); + +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + test_is_json; + +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js::json FROM test_is_json WHERE js IS JSON) foo(js); + +SELECT + js0, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js, js::bytea FROM test_is_json WHERE js IS JSON) foo(js0, js); + +SELECT + js, + js IS JSON "IS JSON", + js IS NOT JSON "IS NOT JSON", + js IS JSON VALUE "IS VALUE", + js IS JSON OBJECT "IS OBJECT", + js IS JSON ARRAY "IS ARRAY", + js IS JSON SCALAR "IS SCALAR", + js IS JSON WITHOUT UNIQUE KEYS "WITHOUT UNIQUE", + js IS JSON WITH UNIQUE KEYS "WITH UNIQUE" +FROM + (SELECT js::jsonb FROM test_is_json WHERE js IS JSON) foo(js);