From ecf980b67baf3367c4ad332b5091404c7e775474 Mon Sep 17 00:00:00 2001 From: gentle_hu Date: Thu, 2 Sep 2021 20:28:19 +0800 Subject: [PATCH] codestyle --- src/common/backend/utils/adt/json.cpp | 521 ++++-------- src/common/backend/utils/adt/jsonb.cpp | 82 +- src/common/backend/utils/adt/jsonb_gin.cpp | 139 ++-- src/common/backend/utils/adt/jsonb_op.cpp | 121 ++- src/common/backend/utils/adt/jsonb_util.cpp | 310 +++---- src/common/backend/utils/adt/jsonfuncs.cpp | 844 +++++++------------- src/common/backend/utils/adt/numeric.cpp | 51 +- src/gausskernel/optimizer/util/pgxcship.cpp | 4 +- 8 files changed, 690 insertions(+), 1382 deletions(-) diff --git a/src/common/backend/utils/adt/json.cpp b/src/common/backend/utils/adt/json.cpp index 1ed9b4cae..3feec1f9e 100644 --- a/src/common/backend/utils/adt/json.cpp +++ b/src/common/backend/utils/adt/json.cpp @@ -3,7 +3,7 @@ * json.c * JSON data type support. * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Portions Copyright (c) 1996-2013, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * @@ -39,17 +39,17 @@ * mechanism, but is passed explicitly to the error reporting routine * for better diagnostics. */ -typedef enum /* contexts of JSON parser */ +typedef enum /* contexts of JSON parser */ { JSON_PARSE_VALUE, /* expecting a value */ - JSON_PARSE_STRING, /* expecting a string (for a field name) */ - JSON_PARSE_ARRAY_START, /* saw '[', expecting value or ']' */ - JSON_PARSE_ARRAY_NEXT, /* saw array element, expecting ',' or ']' */ - JSON_PARSE_OBJECT_START, /* saw '{', expecting label or '}' */ - JSON_PARSE_OBJECT_LABEL, /* saw object label, expecting ':' */ - JSON_PARSE_OBJECT_NEXT, /* saw object value, expecting ',' or '}' */ - JSON_PARSE_OBJECT_COMMA, /* saw object ',', expecting next label */ - JSON_PARSE_END /* saw the end of a document, expect nothing */ + JSON_PARSE_STRING, /* expecting a string (for a field name) */ + JSON_PARSE_ARRAY_START, /* saw '[', expecting value or ']' */ + JSON_PARSE_ARRAY_NEXT, /* saw array element, expecting ',' or ']' */ + JSON_PARSE_OBJECT_START, /* saw '{', expecting label or '}' */ + JSON_PARSE_OBJECT_LABEL, /* saw object label, expecting ':' */ + JSON_PARSE_OBJECT_NEXT, /* saw object value, expecting ',' or '}' */ + JSON_PARSE_OBJECT_COMMA, /* saw object ',', expecting next label */ + JSON_PARSE_END /* saw the end of a document, expect nothing */ } JsonParseContext; static inline void json_lex(JsonLexContext *lex); @@ -64,18 +64,13 @@ static void report_parse_error(JsonParseContext ctx, JsonLexContext *lex); static void report_invalid_token(JsonLexContext *lex); static int report_json_context(JsonLexContext *lex); static char *extract_mb_char(char *s); -static void composite_to_json(Datum composite, StringInfo result, - bool use_line_feeds); -static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, - Datum *vals, bool *nulls, int *valcount, - TYPCATEGORY tcategory, Oid typoutputfunc, - bool use_line_feeds); -static void array_to_json_internal(Datum array, StringInfo result, - bool use_line_feeds); -static void datum_to_json(Datum val, bool is_null, StringInfo result, - TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar); -static void add_json(Datum val, bool is_null, StringInfo result, - Oid val_type, bool key_scalar); +static void composite_to_json(Datum composite, StringInfo result, bool use_line_feeds); +static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, bool *nulls, + int *valcount, TYPCATEGORY tcategory, Oid typoutputfunc, bool use_line_feeds); +static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds); +static void datum_to_json(Datum val, bool is_null, StringInfo result, TYPCATEGORY tcategory, Oid typoutputfunc, + bool key_scalar); +static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar); /* the null action object used for pure validation */ static JsonSemAction nullSemAction = @@ -85,14 +80,12 @@ static JsonSemAction nullSemAction = }; /* Recursive Descent parser support routines */ - /* * lex_peek * * what is the current look_ahead token? */ -static inline JsonTokenType -lex_peek(JsonLexContext *lex) +static inline JsonTokenType lex_peek(JsonLexContext *lex) { return lex->token_type; } @@ -106,8 +99,7 @@ lex_peek(JsonLexContext *lex) * * returns true if the token matched, false otherwise. */ -static inline bool -lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme) +static inline bool lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme) { if (lex->token_type == token) { if (lexeme != NULL) { @@ -137,8 +129,7 @@ lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme) * move the lexer to the next token if the current look_ahead token matches * the parameter token. Otherwise, report an error. */ -static inline void -lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token) +static inline void lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token) { if (!lex_accept(lex, token, NULL)) report_parse_error(ctx, lex);; @@ -163,8 +154,7 @@ lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token) /* * Input. */ -Datum -json_in(PG_FUNCTION_ARGS) +Datum json_in(PG_FUNCTION_ARGS) { char* json = PG_GETARG_CSTRING(0); text *result = cstring_to_text(json); @@ -181,8 +171,7 @@ json_in(PG_FUNCTION_ARGS) /* * Output. */ -Datum -json_out(PG_FUNCTION_ARGS) +Datum json_out(PG_FUNCTION_ARGS) { /* we needn't detoast because text_to_cstring will handle that */ Datum txt = PG_GETARG_DATUM(0); @@ -193,8 +182,7 @@ json_out(PG_FUNCTION_ARGS) /* * Binary send. */ -Datum -json_send(PG_FUNCTION_ARGS) +Datum json_send(PG_FUNCTION_ARGS) { text *t = PG_GETARG_TEXT_PP(0); StringInfoData buf; @@ -207,8 +195,7 @@ json_send(PG_FUNCTION_ARGS) /* * Binary receive. */ -Datum -json_recv(PG_FUNCTION_ARGS) +Datum json_recv(PG_FUNCTION_ARGS) { StringInfo buf = (StringInfo) PG_GETARG_POINTER(0); char *str = NULL; @@ -236,16 +223,12 @@ json_recv(PG_FUNCTION_ARGS) * If you already have the json as a text* value, use the first of these * functions, otherwise use makeJsonLexContextCstringLen(). */ -JsonLexContext * -makeJsonLexContext(text *json, bool need_escapes) +JsonLexContext *makeJsonLexContext(text *json, bool need_escapes) { - return makeJsonLexContextCstringLen(VARDATA(json), - VARSIZE(json) - VARHDRSZ, - need_escapes); + return makeJsonLexContextCstringLen(VARDATA(json), VARSIZE(json) - VARHDRSZ, need_escapes); } -JsonLexContext * -makeJsonLexContextCstringLen(char *json, int len, bool need_escapes) +JsonLexContext *makeJsonLexContextCstringLen(char *json, int len, bool need_escapes) { JsonLexContext *lex = (JsonLexContext*)palloc0(sizeof(JsonLexContext)); @@ -268,14 +251,12 @@ makeJsonLexContextCstringLen(char *json, int len, bool need_escapes) * action routines to be called at appropriate spots during parsing, and a * pointer to a state object to be passed to those routines. */ -void -pg_parse_json(JsonLexContext *lex, JsonSemAction *sem) +void pg_parse_json(JsonLexContext *lex, JsonSemAction *sem) { JsonTokenType tok; /* get the initial token */ json_lex(lex); - tok = lex_peek(lex); /* parse by recursive descent */ @@ -288,24 +269,22 @@ pg_parse_json(JsonLexContext *lex, JsonSemAction *sem) parse_array(lex, sem); break; default: - parse_scalar(lex, sem); /* json can be a bare scalar */ + parse_scalar(lex, sem); /* json can be a bare scalar */ } lex_expect(JSON_PARSE_END, lex, JSON_TOKEN_END); - } /* - * Recursive Descent parse routines. There is one for each structural - * element in a json document: - * - scalar (string, number, true, false, null) - * - array ( [ ] ) - * - array element - * - object ( { } ) - * - object field + * Recursive Descent parse routines. There is one for each structural + * element in a json document: + * - scalar (string, number, true, false, null) + * - array ( [ ] ) + * - array element + * - object ( { } ) + * - object field */ -static inline void -parse_scalar(JsonLexContext *lex, JsonSemAction *sem) +static inline void parse_scalar(JsonLexContext *lex, JsonSemAction *sem) { char *val = NULL; json_scalar_action sfunc = sem->scalar; @@ -313,10 +292,8 @@ parse_scalar(JsonLexContext *lex, JsonSemAction *sem) JsonTokenType tok = lex_peek(lex); valaddr = sfunc == NULL ? NULL : &val; - /* a scalar must be a string, a number, true, false, or null */ - switch (tok) - { + switch (tok) { case JSON_TOKEN_TRUE: lex_accept(lex, JSON_TOKEN_TRUE, valaddr); break; @@ -341,14 +318,12 @@ parse_scalar(JsonLexContext *lex, JsonSemAction *sem) } } -static void -parse_object_field(JsonLexContext *lex, JsonSemAction *sem) +static void parse_object_field(JsonLexContext *lex, JsonSemAction *sem) { /* * an object field is "fieldname" : value where value can be a scalar, * object or array */ - char *fname = NULL; /* keep compiler quiet */ json_ofield_action ostart = sem->object_field_start; json_ofield_action oend = sem->object_field_end; @@ -359,22 +334,18 @@ parse_object_field(JsonLexContext *lex, JsonSemAction *sem) if (ostart != NULL || oend != NULL) { fnameaddr = &fname; } - if (!lex_accept(lex, JSON_TOKEN_STRING, fnameaddr)) { report_parse_error(JSON_PARSE_STRING, lex); } lex_expect(JSON_PARSE_OBJECT_LABEL, lex, JSON_TOKEN_COLON); - tok = lex_peek(lex); isnull = tok == JSON_TOKEN_NULL; if (ostart != NULL) { (*ostart) (sem->semstate, fname, isnull); } - - switch (tok) - { + switch (tok) { case JSON_TOKEN_OBJECT_START: parse_object(lex, sem); break; @@ -388,14 +359,12 @@ parse_object_field(JsonLexContext *lex, JsonSemAction *sem) if (oend != NULL) { (*oend) (sem->semstate, fname, isnull); } - if (fname != NULL) { pfree(fname); } } -static void -parse_object(JsonLexContext *lex, JsonSemAction *sem) +static void parse_object(JsonLexContext *lex, JsonSemAction *sem) { /* * an object is a possibly empty sequence of object fields, separated by @@ -409,7 +378,6 @@ parse_object(JsonLexContext *lex, JsonSemAction *sem) (*ostart) (sem->semstate); } - /* * Data inside an object is at a higher nesting level than the object * itself. Note that we increment this after we call the semantic routine @@ -422,8 +390,7 @@ parse_object(JsonLexContext *lex, JsonSemAction *sem) lex_expect(JSON_PARSE_OBJECT_START, lex, JSON_TOKEN_OBJECT_START); tok = lex_peek(lex); - switch (tok) - { + switch (tok) { case JSON_TOKEN_STRING: parse_object_field(lex, sem); while (lex_accept(lex, JSON_TOKEN_COMMA, NULL)) @@ -437,7 +404,6 @@ parse_object(JsonLexContext *lex, JsonSemAction *sem) } lex_expect(JSON_PARSE_OBJECT_NEXT, lex, JSON_TOKEN_OBJECT_END); - lex->lex_level--; if (oend != NULL) { @@ -445,15 +411,12 @@ parse_object(JsonLexContext *lex, JsonSemAction *sem) } } -static void -parse_array_element(JsonLexContext *lex, JsonSemAction *sem) +static void parse_array_element(JsonLexContext *lex, JsonSemAction *sem) { json_aelem_action astart = sem->array_element_start; json_aelem_action aend = sem->array_element_end; JsonTokenType tok = lex_peek(lex); - bool isnull; - isnull = tok == JSON_TOKEN_NULL; if (astart != NULL) { @@ -461,8 +424,7 @@ parse_array_element(JsonLexContext *lex, JsonSemAction *sem) } /* an array element is any object, array or scalar */ - switch (tok) - { + switch (tok) { case JSON_TOKEN_OBJECT_START: parse_object(lex, sem); break; @@ -478,8 +440,7 @@ parse_array_element(JsonLexContext *lex, JsonSemAction *sem) } } -static void -parse_array(JsonLexContext *lex, JsonSemAction *sem) +static void parse_array(JsonLexContext *lex, JsonSemAction *sem) { /* * an array is a possibly empty sequence of array elements, separated by @@ -499,18 +460,13 @@ parse_array(JsonLexContext *lex, JsonSemAction *sem) * array end. */ lex->lex_level++; - lex_expect(JSON_PARSE_ARRAY_START, lex, JSON_TOKEN_ARRAY_START); if (lex_peek(lex) != JSON_TOKEN_ARRAY_END) { - parse_array_element(lex, sem); - while (lex_accept(lex, JSON_TOKEN_COMMA, NULL)) parse_array_element(lex, sem); } - lex_expect(JSON_PARSE_ARRAY_NEXT, lex, JSON_TOKEN_ARRAY_END); - lex->lex_level--; if (aend != NULL) { @@ -521,11 +477,10 @@ parse_array(JsonLexContext *lex, JsonSemAction *sem) /* * Lex one token from the input stream. */ -static inline void -json_lex(JsonLexContext *lex) +static inline void json_lex(JsonLexContext *lex) { char *s = NULL; - int len; + int len; /* Skip leading whitespace. */ s = lex->token_terminator; @@ -604,60 +559,59 @@ json_lex(JsonLexContext *lex) lex->token_type = JSON_TOKEN_NUMBER; break; default: { - char *p = NULL; + char *p = NULL; - /* - * We're not dealing with a string, number, legal - * punctuation mark, or end of string. The only legal - * tokens we might find here are true, false, and null, - * but for error reporting purposes we scan until we see a - * non-alphanumeric character. That way, we can report - * the whole word as an unexpected token, rather than just - * some unintuitive prefix thereof. - */ - for (p = s; p - s < lex->input_length - len && JSON_ALPHANUMERIC_CHAR(*p); p++) - /* skip */ ; + /* + * We're not dealing with a string, number, legal + * punctuation mark, or end of string. The only legal + * tokens we might find here are true, false, and null, + * but for error reporting purposes we scan until we see a + * non-alphanumeric character. That way, we can report + * the whole word as an unexpected token, rather than just + * some unintuitive prefix thereof. + */ + for (p = s; p - s < lex->input_length - len && JSON_ALPHANUMERIC_CHAR(*p); p++) + ; /* skip */ - /* - * We got some sort of unexpected punctuation or an - * otherwise unexpected character, so just complain about - * that one character. - */ - if (p == s) { - lex->prev_token_terminator = lex->token_terminator; - lex->token_terminator = s + 1; - report_invalid_token(lex); - } - - /* - * We've got a real alphanumeric token here. If it - * happens to be true, false, or null, all is well. If - * not, error out. - */ + /* + * We got some sort of unexpected punctuation or an + * otherwise unexpected character, so just complain about + * that one character. + */ + if (p == s) { lex->prev_token_terminator = lex->token_terminator; - lex->token_terminator = p; - if (p - s == 4) { - if (memcmp(s, "true", 4) == 0) - lex->token_type = JSON_TOKEN_TRUE; - else if (memcmp(s, "null", 4) == 0) - lex->token_type = JSON_TOKEN_NULL; - else - report_invalid_token(lex); - } else if (p - s == 5 && memcmp(s, "false", 5) == 0) { - lex->token_type = JSON_TOKEN_FALSE; - } else { - report_invalid_token(lex); - } + lex->token_terminator = s + 1; + report_invalid_token(lex); } - } /* end of switch */ + + /* + * We've got a real alphanumeric token here. If it + * happens to be true, false, or null, all is well. If + * not, error out. + */ + lex->prev_token_terminator = lex->token_terminator; + lex->token_terminator = p; + if (p - s == 4) { + if (memcmp(s, "true", 4) == 0) + lex->token_type = JSON_TOKEN_TRUE; + else if (memcmp(s, "null", 4) == 0) + lex->token_type = JSON_TOKEN_NULL; + else + report_invalid_token(lex); + } else if (p - s == 5 && memcmp(s, "false", 5) == 0) { + lex->token_type = JSON_TOKEN_FALSE; + } else { + report_invalid_token(lex); + } + } + } /* end of switch */ } } /* * The next token in the input stream is known to be a string; lex it. */ -static inline void -json_lex_string(JsonLexContext *lex) +static inline void json_lex_string(JsonLexContext *lex) { char *s = NULL; int len; @@ -686,8 +640,7 @@ json_lex_string(JsonLexContext *lex) ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Character with value 0x%02x must be escaped.", - (unsigned char) *s), + errdetail("Character with value 0x%02x must be escaped.", (unsigned char) *s), report_json_context(lex))); } else if (*s == '\\') { /* OK, we have an escape character. */ @@ -728,25 +681,24 @@ json_lex_string(JsonLexContext *lex) if (ch >= 0xd800 && ch <= 0xdbff) { if (hi_surrogate != -1) { ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type json"), - errdetail("Unicode high surrogate must not follow a high surrogate."), - report_json_context(lex))); + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type json"), + errdetail("Unicode high surrogate must not follow a high surrogate."), + report_json_context(lex))); } hi_surrogate = (ch & 0x3ff) << 10; continue; } else if (ch >= 0xdc00 && ch <= 0xdfff) { if (hi_surrogate == -1) { ereport(ERROR, - (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), - errmsg("invalid input syntax for type json"), - errdetail("Unicode low surrogate must follow a high surrogate."), - report_json_context(lex))); + (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), + errmsg("invalid input syntax for type json"), + errdetail("Unicode low surrogate must follow a high surrogate."), + report_json_context(lex))); } ch = 0x10000 + hi_surrogate + (ch & 0x3ff); hi_surrogate = -1; } - if (hi_surrogate != -1) { ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), @@ -754,7 +706,6 @@ json_lex_string(JsonLexContext *lex) errdetail("Unicode low surrogate must follow a high surrogate."), report_json_context(lex))); } - /* * For UTF8, replace the escape sequence by the actual * utf8 character in lex->strval. Do this also for other @@ -763,7 +714,6 @@ json_lex_string(JsonLexContext *lex) * \u0000, since that would result in an impermissible nul * byte. */ - if (ch == 0) { appendStringInfoString(lex->strval, "\\u0000"); } else if (GetDatabaseEncoding() == PG_UTF8) { @@ -784,7 +734,6 @@ json_lex_string(JsonLexContext *lex) errdetail("Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8."), report_json_context(lex))); } - } } else if (lex->strval != NULL) { if (hi_surrogate != -1) { @@ -842,7 +791,6 @@ json_lex_string(JsonLexContext *lex) extract_mb_char(s)), report_json_context(lex))); } - } else if (lex->strval != NULL) { if (hi_surrogate != -1) { ereport(ERROR, @@ -851,7 +799,6 @@ json_lex_string(JsonLexContext *lex) errdetail("Unicode low surrogate must follow a high surrogate."), report_json_context(lex))); } - appendStringInfoChar(lex->strval, *s); } else { int step = pg_mblen(s) - 1; @@ -899,8 +846,7 @@ json_lex_string(JsonLexContext *lex) * *------------------------------------------------------------------------- */ -static inline void -json_lex_number(JsonLexContext *lex, char *s, bool *num_err) +static inline void json_lex_number(JsonLexContext *lex, char *s, bool *num_err) { bool error = false; char *p = NULL; @@ -909,7 +855,6 @@ json_lex_number(JsonLexContext *lex, char *s, bool *num_err) len = s - lex->input; /* Part (1): leading sign indicator. */ /* Caller already did this for us; so do nothing. */ - /* Part (2): parse main digit string. */ if (*s == '0') { s++; @@ -963,7 +908,6 @@ json_lex_number(JsonLexContext *lex, char *s, bool *num_err) for (p = s; len < lex->input_length && JSON_ALPHANUMERIC_CHAR(*p); p++, len++) { error = true; } - if (num_err != NULL) { /* let the caller handle the error */ *num_err = error; @@ -981,8 +925,7 @@ json_lex_number(JsonLexContext *lex, char *s, bool *num_err) * * lex->token_start and lex->token_terminator must identify the current token. */ -static void -report_parse_error(JsonParseContext ctx, JsonLexContext *lex) +static void report_parse_error(JsonParseContext ctx, JsonLexContext *lex) { char *token = NULL; int toklen; @@ -1010,8 +953,7 @@ report_parse_error(JsonParseContext ctx, JsonLexContext *lex) ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected end of input, but found \"%s\".", - token), + errdetail("Expected end of input, but found \"%s\".", token), report_json_context(lex))); } else { switch (ctx) { @@ -1019,64 +961,56 @@ report_parse_error(JsonParseContext ctx, JsonLexContext *lex) ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected JSON value, but found \"%s\".", - token), + errdetail("Expected JSON value, but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_STRING: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected string, but found \"%s\".", - token), + errdetail("Expected string, but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_ARRAY_START: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected array element or \"]\", but found \"%s\".", - token), + errdetail("Expected array element or \"]\", but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_ARRAY_NEXT: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected \",\" or \"]\", but found \"%s\".", - token), + errdetail("Expected \",\" or \"]\", but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_OBJECT_START: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected string or \"}\", but found \"%s\".", - token), + errdetail("Expected string or \"}\", but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_OBJECT_LABEL: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected \":\", but found \"%s\".", - token), + errdetail("Expected \":\", but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_OBJECT_NEXT: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected \",\" or \"}\", but found \"%s\".", - token), + errdetail("Expected \",\" or \"}\", but found \"%s\".", token), report_json_context(lex))); break; case JSON_PARSE_OBJECT_COMMA: ereport(ERROR, (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION), errmsg("invalid input syntax for type json"), - errdetail("Expected string, but found \"%s\".", - token), + errdetail("Expected string, but found \"%s\".", token), report_json_context(lex))); break; default: @@ -1090,8 +1024,7 @@ report_parse_error(JsonParseContext ctx, JsonLexContext *lex) * * lex->token_start and lex->token_terminator must identify the token. */ -static void -report_invalid_token(JsonLexContext *lex) +static void report_invalid_token(JsonLexContext *lex) { char *token = NULL; int toklen; @@ -1121,8 +1054,7 @@ report_invalid_token(JsonLexContext *lex) * The return value isn't meaningful, but we make it non-void so that this * can be invoked inside ereport(). */ -static int -report_json_context(JsonLexContext *lex) +static int report_json_context(JsonLexContext *lex) { const char *context_start = NULL; const char *context_end = NULL; @@ -1180,17 +1112,17 @@ report_json_context(JsonLexContext *lex) * suffixing "..." if not ending at end of line. */ prefix = (context_start > line_start) ? "..." : ""; - suffix = (lex->token_type != JSON_TOKEN_END && context_end - lex->input < lex->input_length && *context_end != '\n' && *context_end != '\r') ? "..." : ""; + suffix = (lex->token_type != JSON_TOKEN_END && + context_end - lex->input < lex->input_length && + *context_end != '\n' && *context_end != '\r') ? "..." : ""; - return errcontext("JSON data, line %d: %s%s%s", - line_number, prefix, ctxt, suffix); + return errcontext("JSON data, line %d: %s%s%s", line_number, prefix, ctxt, suffix); } /* * Extract a single, possibly multi-byte char from the input string. */ -static char * -extract_mb_char(char *s) +static char *extract_mb_char(char *s) { char *res = NULL; int len; @@ -1211,9 +1143,8 @@ extract_mb_char(char *s) * Hand off a non-scalar datum to composite_to_json or array_to_json_internal * as appropriate. */ -static void -datum_to_json(Datum val, bool is_null, StringInfo result, - TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar) +static void datum_to_json(Datum val, bool is_null, StringInfo result, + TYPCATEGORY tcategory, Oid typoutputfunc, bool key_scalar) { char *outputstr = NULL; text *jsontext = NULL; @@ -1292,18 +1223,14 @@ datum_to_json(Datum val, bool is_null, StringInfo result, * If it's the innermost dimension, output the values, otherwise call * ourselves recursively to process the next dimension. */ -static void -array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, - bool *nulls, int *valcount, TYPCATEGORY tcategory, - Oid typoutputfunc, bool use_line_feeds) +static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, + bool *nulls, int *valcount, TYPCATEGORY tcategory, Oid typoutputfunc, bool use_line_feeds) { int i; const char *sep = NULL; Assert(dim < ndims); - sep = use_line_feeds ? ",\n " : ","; - appendStringInfoChar(result, '['); for (i = 1; i <= dims[dim]; i++) { @@ -1324,15 +1251,13 @@ array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals, valcount, tcategory, typoutputfunc, false); } } - appendStringInfoChar(result, ']'); } /* * Turn an array into JSON. */ -static void -array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) +static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) { ArrayType *v = DatumGetArrayTypeP(array); Oid element_type = ARR_ELEMTYPE(v); @@ -1367,9 +1292,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) HeapTuple tuple; Form_pg_cast castForm; - tuple = SearchSysCache2(CASTSOURCETARGET, - ObjectIdGetDatum(element_type), - ObjectIdGetDatum(JSONOID)); + tuple = SearchSysCache2(CASTSOURCETARGET, ObjectIdGetDatum(element_type), ObjectIdGetDatum(JSONOID)); if (HeapTupleIsValid(tuple)) { castForm = (Form_pg_cast) GETSTRUCT(tuple); @@ -1381,9 +1304,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) } } - deconstruct_array(v, element_type, typlen, typbyval, - typalign, &elements, &nulls, - &nitems); + deconstruct_array(v, element_type, typlen, typbyval, typalign, &elements, &nulls, &nitems); if (castfunc != InvalidOid) { tcategory = TYPCATEGORY_JSON_CAST; @@ -1395,8 +1316,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) tcategory = TypeCategory(element_type); } - array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory, - typoutputfunc, use_line_feeds); + array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory, typoutputfunc, use_line_feeds); pfree(elements); pfree(nulls); @@ -1405,8 +1325,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds) /* * Turn a composite / record into JSON. */ -static void -composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) +static void composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) { HeapTupleHeader td; Oid tupType; @@ -1419,7 +1338,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) const char *sep = NULL; sep = use_line_feeds ? ",\n " : ","; - td = DatumGetHeapTupleHeader(composite); /* Extract rowtype info and find a tupdesc */ @@ -1431,7 +1349,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) tmptup.t_len = HeapTupleHeaderGetDatumLength(td); tmptup.t_data = td; tuple = &tmptup; - appendStringInfoChar(result, '{'); for (i = 0; i < tupdesc->natts; i++) { @@ -1446,7 +1363,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) if (tupdesc->attrs[i]->attisdropped) { continue; } - if (needsep) { appendStringInfoString(result, sep); } @@ -1455,18 +1371,13 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) attname = NameStr(tupdesc->attrs[i]->attname); escape_json(result, attname); appendStringInfoChar(result, ':'); - val = heap_getattr(tuple, i + 1, tupdesc, &isnull); - - getTypeOutputInfo(tupdesc->attrs[i]->atttypid, - &typoutput, &typisvarlena); + getTypeOutputInfo(tupdesc->attrs[i]->atttypid, &typoutput, &typisvarlena); if (tupdesc->attrs[i]->atttypid > FirstNormalObjectId) { HeapTuple cast_tuple; Form_pg_cast castForm; - - cast_tuple = SearchSysCache2(CASTSOURCETARGET, - ObjectIdGetDatum(tupdesc->attrs[i]->atttypid), + cast_tuple = SearchSysCache2(CASTSOURCETARGET, ObjectIdGetDatum(tupdesc->attrs[i]->atttypid), ObjectIdGetDatum(JSONOID)); if (HeapTupleIsValid(cast_tuple)) { castForm = (Form_pg_cast) GETSTRUCT(cast_tuple); @@ -1474,7 +1385,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) if (castForm->castmethod == COERCION_METHOD_FUNCTION) { castfunc = typoutput = castForm->castfunc; } - ReleaseSysCache(cast_tuple); } } @@ -1485,16 +1395,13 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) tcategory = TYPCATEGORY_ARRAY; } else if (tupdesc->attrs[i]->atttypid == RECORDOID) { tcategory = TYPCATEGORY_COMPOSITE; - } else if (tupdesc->attrs[i]->atttypid == JSONOID || - tupdesc->attrs[i]->atttypid == JSONBOID) { + } else if (tupdesc->attrs[i]->atttypid == JSONOID || tupdesc->attrs[i]->atttypid == JSONBOID) { tcategory = TYPCATEGORY_JSON; } else { tcategory = TypeCategory(tupdesc->attrs[i]->atttypid); } - datum_to_json(val, isnull, result, tcategory, typoutput, false); } - appendStringInfoChar(result, '}'); ReleaseTupleDesc(tupdesc); } @@ -1503,8 +1410,7 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds) * append Json for orig_val to result. If it's a field key, make sure it's * of an acceptable type and is quoted. */ -static void -add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar) +static void add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scalar) { TYPCATEGORY tcategory; Oid typoutput; @@ -1517,27 +1423,20 @@ add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scal errmsg("could not determine input data type"))); } - getTypeOutputInfo(val_type, &typoutput, &typisvarlena); - if (val_type > FirstNormalObjectId) { HeapTuple tuple; Form_pg_cast castForm; - tuple = SearchSysCache2(CASTSOURCETARGET, - ObjectIdGetDatum(val_type), - ObjectIdGetDatum(JSONOID)); + tuple = SearchSysCache2(CASTSOURCETARGET, ObjectIdGetDatum(val_type), ObjectIdGetDatum(JSONOID)); if (HeapTupleIsValid(tuple)) { castForm = (Form_pg_cast) GETSTRUCT(tuple); - if (castForm->castmethod == COERCION_METHOD_FUNCTION) { castfunc = typoutput = castForm->castfunc; } - ReleaseSysCache(tuple); } } - if (castfunc != InvalidOid) { tcategory = TYPCATEGORY_JSON_CAST; } else if (val_type == RECORDARRAYOID) { @@ -1557,7 +1456,7 @@ add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scal tcategory == TYPCATEGORY_JSON_CAST)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("key value must be scalar, not array, composite or json"))); + errmsg("key value must be scalar, not array, composite or json"))); } datum_to_json(val, is_null, result, tcategory, typoutput, key_scalar); @@ -1566,14 +1465,12 @@ add_json(Datum val, bool is_null, StringInfo result, Oid val_type, bool key_scal /* * SQL function array_to_json(row) */ -extern Datum -array_to_json(PG_FUNCTION_ARGS) +extern Datum array_to_json(PG_FUNCTION_ARGS) { Datum array = PG_GETARG_DATUM(0); StringInfo result; result = makeStringInfo(); - array_to_json_internal(array, result, false); PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); @@ -1582,58 +1479,48 @@ array_to_json(PG_FUNCTION_ARGS) /* * SQL function array_to_json(row, prettybool) */ -extern Datum -array_to_json_pretty(PG_FUNCTION_ARGS) +extern Datum array_to_json_pretty(PG_FUNCTION_ARGS) { Datum array = PG_GETARG_DATUM(0); bool use_line_feeds = PG_GETARG_BOOL(1); StringInfo result; result = makeStringInfo(); - array_to_json_internal(array, result, use_line_feeds); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } /* * SQL function row_to_json(row) */ -extern Datum -row_to_json(PG_FUNCTION_ARGS) +extern Datum row_to_json(PG_FUNCTION_ARGS) { Datum array = PG_GETARG_DATUM(0); StringInfo result; result = makeStringInfo(); - composite_to_json(array, result, false); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } /* * SQL function row_to_json(row, prettybool) */ -extern Datum -row_to_json_pretty(PG_FUNCTION_ARGS) +extern Datum row_to_json_pretty(PG_FUNCTION_ARGS) { Datum array = PG_GETARG_DATUM(0); bool use_line_feeds = PG_GETARG_BOOL(1); StringInfo result; result = makeStringInfo(); - composite_to_json(array, result, use_line_feeds); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } /* * SQL function to_json(anyvalue) */ -Datum -to_json(PG_FUNCTION_ARGS) +Datum to_json(PG_FUNCTION_ARGS) { Datum val = PG_GETARG_DATUM(0); Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0); @@ -1650,23 +1537,16 @@ to_json(PG_FUNCTION_ARGS) } result = makeStringInfo(); - getTypeOutputInfo(val_type, &typoutput, &typisvarlena); - if (val_type > FirstNormalObjectId) { HeapTuple tuple; Form_pg_cast castForm; - - tuple = SearchSysCache2(CASTSOURCETARGET, - ObjectIdGetDatum(val_type), - ObjectIdGetDatum(JSONOID)); + tuple = SearchSysCache2(CASTSOURCETARGET, ObjectIdGetDatum(val_type), ObjectIdGetDatum(JSONOID)); if (HeapTupleIsValid(tuple)) { castForm = (Form_pg_cast) GETSTRUCT(tuple); - if (castForm->castmethod == COERCION_METHOD_FUNCTION) { castfunc = typoutput = castForm->castfunc; } - ReleaseSysCache(tuple); } } @@ -1682,21 +1562,18 @@ to_json(PG_FUNCTION_ARGS) } else { tcategory = TypeCategory(val_type); } - datum_to_json(val, false, result, tcategory, typoutput, false); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } /* * json_agg transition function */ -Datum -json_agg_transfn(PG_FUNCTION_ARGS) +Datum json_agg_transfn(PG_FUNCTION_ARGS) { Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1); MemoryContext aggcontext, - oldcontext; + oldcontext; StringInfo state; Datum val; TYPCATEGORY tcategory; @@ -1740,23 +1617,17 @@ json_agg_transfn(PG_FUNCTION_ARGS) } val = PG_GETARG_DATUM(1); - getTypeOutputInfo(val_type, &typoutput, &typisvarlena); - if (val_type > FirstNormalObjectId) { HeapTuple tuple; Form_pg_cast castForm; - tuple = SearchSysCache2(CASTSOURCETARGET, - ObjectIdGetDatum(val_type), - ObjectIdGetDatum(JSONOID)); + tuple = SearchSysCache2(CASTSOURCETARGET, ObjectIdGetDatum(val_type), ObjectIdGetDatum(JSONOID)); if (HeapTupleIsValid(tuple)) { castForm = (Form_pg_cast) GETSTRUCT(tuple); - if (castForm->castmethod == COERCION_METHOD_FUNCTION) { castfunc = typoutput = castForm->castfunc; } - ReleaseSysCache(tuple); } } @@ -1776,9 +1647,7 @@ json_agg_transfn(PG_FUNCTION_ARGS) (tcategory == TYPCATEGORY_ARRAY || tcategory == TYPCATEGORY_COMPOSITE)) { appendStringInfoString(state, "\n "); } - datum_to_json(val, false, state, tcategory, typoutput, false); - /* * The transition type for array_agg() is declared to be "internal", which * is a pass-by-value type the same size as a pointer. So we can safely @@ -1790,22 +1659,17 @@ json_agg_transfn(PG_FUNCTION_ARGS) /* * json_agg final function */ -Datum -json_agg_finalfn(PG_FUNCTION_ARGS) +Datum json_agg_finalfn(PG_FUNCTION_ARGS) { StringInfo state; /* cannot be called directly because of internal-type argument */ Assert(AggCheckCallContext(fcinfo, NULL)); - state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0); - if (state == NULL) { PG_RETURN_NULL(); } - appendStringInfoChar(state, ']'); - PG_RETURN_TEXT_P(cstring_to_text_with_len(state->data, state->len)); } @@ -1814,8 +1678,7 @@ json_agg_finalfn(PG_FUNCTION_ARGS) * * aggregate two input columns as a single json value. */ -Datum -json_object_agg_transfn(PG_FUNCTION_ARGS) +Datum json_object_agg_transfn(PG_FUNCTION_ARGS) { Oid val_type; MemoryContext aggcontext, oldcontext; @@ -1837,7 +1700,6 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) oldcontext = MemoryContextSwitchTo(aggcontext); state = makeStringInfo(); MemoryContextSwitchTo(oldcontext); - appendStringInfoString(state, "{ "); } else { state = (StringInfo) PG_GETARG_POINTER(0); @@ -1849,9 +1711,7 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("field name must not be null"))); } - val_type = get_fn_expr_argtype(fcinfo->flinfo, 1); - /* * turn a constant (more or less literal) value that's of unknown type * into text. Unknowns come in as a cstring pointer. @@ -1869,9 +1729,7 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) errmsg("arg 1: could not determine data type"))); } add_json(arg, false, state, val_type, true); - appendStringInfoString(state, " : "); - val_type = get_fn_expr_argtype(fcinfo->flinfo, 2); /* see comments above */ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, 2)) { @@ -1899,30 +1757,25 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) * json_object_agg final function. * */ -Datum -json_object_agg_finalfn(PG_FUNCTION_ARGS) +Datum json_object_agg_finalfn(PG_FUNCTION_ARGS) { StringInfo state; /* cannot be called directly because of internal-type argument */ Assert(AggCheckCallContext(fcinfo, NULL)); - state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0); - if (state == NULL) { PG_RETURN_TEXT_P(cstring_to_text("{}")); } appendStringInfoString(state, " }"); - PG_RETURN_TEXT_P(cstring_to_text_with_len(state->data, state->len)); } /* * SQL function json_build_object(variadic "any") */ -Datum -json_build_object(PG_FUNCTION_ARGS) +Datum json_build_object(PG_FUNCTION_ARGS) { int nargs = PG_NARGS(); int i; @@ -1931,7 +1784,6 @@ json_build_object(PG_FUNCTION_ARGS) StringInfo result; Oid val_type; - if (nargs % 2 != 0) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -1939,20 +1791,15 @@ json_build_object(PG_FUNCTION_ARGS) } result = makeStringInfo(); - appendStringInfoChar(result, '{'); - for (i = 0; i < nargs; i += 2) { - /* process key */ - if (PG_ARGISNULL(i)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("arg %d: key cannot be null", i + 1))); } val_type = get_fn_expr_argtype(fcinfo->flinfo, i); - /* * turn a constant (more or less literal) value that's of unknown type * into text. Unknowns come in as a cstring pointer. @@ -1975,11 +1822,8 @@ json_build_object(PG_FUNCTION_ARGS) appendStringInfoString(result, sep); sep = ", "; add_json(arg, false, result, val_type, true); - appendStringInfoString(result, " : "); - /* process value */ - val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1); /* see comments above */ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1)) { @@ -1998,10 +1842,8 @@ json_build_object(PG_FUNCTION_ARGS) errmsg("arg %d: could not determine data type", i + 2))); } add_json(arg, PG_ARGISNULL(i + 1), result, val_type, false); - } appendStringInfoChar(result, '}'); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); } @@ -2009,8 +1851,7 @@ json_build_object(PG_FUNCTION_ARGS) /* * degenerate case of json_build_object where it gets 0 arguments. */ -Datum -json_build_object_noargs(PG_FUNCTION_ARGS) +Datum json_build_object_noargs(PG_FUNCTION_ARGS) { PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2)); } @@ -2018,8 +1859,7 @@ json_build_object_noargs(PG_FUNCTION_ARGS) /* * SQL function json_build_array(variadic "any") */ -Datum -json_build_array(PG_FUNCTION_ARGS) +Datum json_build_array(PG_FUNCTION_ARGS) { int nargs = PG_NARGS(); int i; @@ -2028,11 +1868,8 @@ json_build_array(PG_FUNCTION_ARGS) StringInfo result; Oid val_type; - result = makeStringInfo(); - appendStringInfoChar(result, '['); - for (i = 0; i < nargs; i++) { val_type = get_fn_expr_argtype(fcinfo->flinfo, i); arg = PG_GETARG_DATUM(i + 1); @@ -2057,16 +1894,13 @@ json_build_array(PG_FUNCTION_ARGS) add_json(arg, PG_ARGISNULL(i), result, val_type, false); } appendStringInfoChar(result, ']'); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); - } /* * degenerate case of json_build_array where it gets 0 arguments. */ -Datum -json_build_array_noargs(PG_FUNCTION_ARGS) +Datum json_build_array_noargs(PG_FUNCTION_ARGS) { PG_RETURN_TEXT_P(cstring_to_text_with_len("[]", 2)); } @@ -2078,53 +1912,43 @@ json_build_array_noargs(PG_FUNCTION_ARGS) * for a json object. * */ -Datum -json_object(PG_FUNCTION_ARGS) +Datum json_object(PG_FUNCTION_ARGS) { ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0); int ndims = ARR_NDIM(in_array); StringInfoData result; Datum *in_datums = NULL; - bool *in_nulls = NULL; - int in_count, count, i; - text *rval = NULL; - char *v = NULL; + bool *in_nulls = NULL; + int in_count, count, i; + text *rval = NULL; + char *v = NULL; switch (ndims) { case 0: PG_RETURN_DATUM(CStringGetTextDatum("{}")); break; - case 1: if ((ARR_DIMS(in_array)[0]) % 2) ereport(ERROR, (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), errmsg("array must have even number of elements"))); break; - case 2: if ((ARR_DIMS(in_array)[1]) != 2) ereport(ERROR, (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), errmsg("array must have two columns"))); break; - default: ereport(ERROR, (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), errmsg("wrong number of array subscripts"))); } - deconstruct_array(in_array, - TEXTOID, -1, false, 'i', - &in_datums, &in_nulls, &in_count); - + deconstruct_array(in_array, TEXTOID, -1, false, 'i', &in_datums, &in_nulls, &in_count); count = in_count / 2; - initStringInfo(&result); - appendStringInfoChar(&result, '{'); - for (i = 0; i < count; ++i) { if (in_nulls[i * 2]) { ereport(ERROR, @@ -2154,15 +1978,12 @@ json_object(PG_FUNCTION_ARGS) } appendStringInfoChar(&result, '}'); - pfree(in_datums); pfree(in_nulls); rval = cstring_to_text_with_len(result.data, result.len); pfree(result.data); - PG_RETURN_TEXT_P(rval); - } /* @@ -2171,8 +1992,7 @@ json_object(PG_FUNCTION_ARGS) * take separate name and value arrays of text to construct a json object * pairwise. */ -Datum -json_object_two_arg(PG_FUNCTION_ARGS) +Datum json_object_two_arg(PG_FUNCTION_ARGS) { ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0); ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1); @@ -2181,11 +2001,11 @@ json_object_two_arg(PG_FUNCTION_ARGS) StringInfoData result; Datum *key_datums = NULL; Datum *val_datums = NULL; - bool *key_nulls = NULL; - bool *val_nulls = NULL; - int key_count, val_count, i; - text *rval = NULL; - char *v = NULL; + bool *key_nulls = NULL; + bool *val_nulls = NULL; + int key_count, val_count, i; + text *rval = NULL; + char *v = NULL; if (nkdims > 1 || nkdims != nvdims) { ereport(ERROR, @@ -2197,14 +2017,8 @@ json_object_two_arg(PG_FUNCTION_ARGS) PG_RETURN_DATUM(CStringGetTextDatum("{}")); } - deconstruct_array(key_array, - TEXTOID, -1, false, 'i', - &key_datums, &key_nulls, &key_count); - - deconstruct_array(val_array, - TEXTOID, -1, false, 'i', - &val_datums, &val_nulls, &val_count); - + deconstruct_array(key_array, TEXTOID, -1, false, 'i', &key_datums, &key_nulls, &key_count); + deconstruct_array(val_array, TEXTOID, -1, false, 'i', &val_datums, &val_nulls, &val_count); if (key_count != val_count) { ereport(ERROR, (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR), @@ -2212,9 +2026,7 @@ json_object_two_arg(PG_FUNCTION_ARGS) } initStringInfo(&result); - appendStringInfoChar(&result, '{'); - for (i = 0; i < key_count; ++i) { if (key_nulls[i]) { ereport(ERROR, @@ -2243,27 +2055,23 @@ json_object_two_arg(PG_FUNCTION_ARGS) pfree(v); } } - appendStringInfoChar(&result, '}'); pfree(key_datums); pfree(key_nulls); pfree(val_datums); pfree(val_nulls); - rval = cstring_to_text_with_len(result.data, result.len); pfree(result.data); PG_RETURN_TEXT_P(rval); - } /* * Produce a JSON string literal, properly escaping characters in the text. */ -void -escape_json(StringInfo buf, const char *str) +void escape_json(StringInfo buf, const char *str) { const char *p = NULL; int charlen = 0; @@ -2321,8 +2129,7 @@ escape_json(StringInfo buf, const char *str) * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END, * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END. */ -Datum -json_typeof(PG_FUNCTION_ARGS) +Datum json_typeof(PG_FUNCTION_ARGS) { text *json = NULL; diff --git a/src/common/backend/utils/adt/jsonb.cpp b/src/common/backend/utils/adt/jsonb.cpp index f1f6b4dac..983c47aac 100644 --- a/src/common/backend/utils/adt/jsonb.cpp +++ b/src/common/backend/utils/adt/jsonb.cpp @@ -1,15 +1,15 @@ -/*------------------------------------------------------------------------- +/* ------------------------------------------------------------------------- * - * jsonb.c + * jsonb.cpp * I/O routines for jsonb type * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Copyright (c) 2014, PostgreSQL Global Development Group * * IDENTIFICATION - * src/backend/utils/adt/jsonb.c + * src/common/backend/utils/adt/jsonb.cpp * - *------------------------------------------------------------------------- + * ------------------------------------------------------------------------- */ #include "postgres.h" @@ -19,8 +19,7 @@ #include "utils/jsonapi.h" #include "utils/jsonb.h" -typedef struct JsonbInState -{ +typedef struct JsonbInState { JsonbParseState *parseState; JsonbValue *res; } JsonbInState; @@ -32,17 +31,16 @@ static void jsonb_in_object_end(void *pstate); static void jsonb_in_array_start(void *pstate); static void jsonb_in_array_end(void *pstate); static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull); -static void jsonb_put_escaped_value(StringInfo out, JsonbValue * scalarVal); +static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal); static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype); char *JsonbToCString(StringInfo out, char *in, int estimated_len); /* * jsonb type input function */ -Datum -jsonb_in(PG_FUNCTION_ARGS) +Datum jsonb_in(PG_FUNCTION_ARGS) { - char *json = PG_GETARG_CSTRING(0); + char *json = PG_GETARG_CSTRING(0); json = json == NULL ? pstrdup("") : json; return jsonb_from_cstring(json, strlen(json)); @@ -56,8 +54,7 @@ jsonb_in(PG_FUNCTION_ARGS) * can change the binary format sent in future if necessary. For now, * only version 1 is supported. */ -Datum -jsonb_recv(PG_FUNCTION_ARGS) +Datum jsonb_recv(PG_FUNCTION_ARGS) { StringInfo buf = (StringInfo) PG_GETARG_POINTER(0); int version = pq_getmsgint(buf, 1); @@ -75,8 +72,7 @@ jsonb_recv(PG_FUNCTION_ARGS) /* * jsonb type output function */ -Datum -jsonb_out(PG_FUNCTION_ARGS) +Datum jsonb_out(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); char *out = NULL; @@ -91,8 +87,7 @@ jsonb_out(PG_FUNCTION_ARGS) * * Just send jsonb as a version number, then a string of text */ -Datum -jsonb_send(PG_FUNCTION_ARGS) +Datum jsonb_send(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); StringInfoData buf; @@ -116,8 +111,7 @@ jsonb_send(PG_FUNCTION_ARGS) * This function is here because the analog json function is in json.c, since * it uses the json parser internals not exposed elsewhere. */ -Datum -jsonb_typeof(PG_FUNCTION_ARGS) +Datum jsonb_typeof(PG_FUNCTION_ARGS) { Jsonb *in = PG_GETARG_JSONB(0); JsonbIterator *it = NULL; @@ -130,9 +124,7 @@ jsonb_typeof(PG_FUNCTION_ARGS) result = "array"; } else { Assert(JB_ROOT_IS_SCALAR(in)); - it = JsonbIteratorInit(VARDATA_ANY(in)); - /* * A root scalar is stored as an array of one element, so we get the * array and then its first (and only) member. @@ -168,8 +160,7 @@ jsonb_typeof(PG_FUNCTION_ARGS) * * Uses the json parser (with hooks) to construct a jsonb. */ -static inline Datum -jsonb_from_cstring(char *json, int len) +static inline Datum jsonb_from_cstring(char *json, int len) { JsonLexContext *lex = NULL; JsonbInState state; @@ -182,7 +173,6 @@ jsonb_from_cstring(char *json, int len) lex = makeJsonLexContextCstringLen(json, len, true); sem.semstate = (void *) &state; - sem.object_start = jsonb_in_object_start; sem.array_start = jsonb_in_array_start; sem.object_end = jsonb_in_object_end; @@ -196,8 +186,7 @@ jsonb_from_cstring(char *json, int len) PG_RETURN_POINTER(JsonbValueToJsonb(state.res)); } -static size_t -checkStringLen(size_t len) +static size_t checkStringLen(size_t len) { if (len > JENTRY_POSMASK) { ereport(ERROR, @@ -210,40 +199,31 @@ checkStringLen(size_t len) return len; } -static void -jsonb_in_object_start(void *pstate) +static void jsonb_in_object_start(void *pstate) { JsonbInState *_state = (JsonbInState *) pstate; - _state->res = pushJsonbValue(&_state->parseState, WJB_BEGIN_OBJECT, NULL); } -static void -jsonb_in_object_end(void *pstate) +static void jsonb_in_object_end(void *pstate) { JsonbInState *_state = (JsonbInState *) pstate; - _state->res = pushJsonbValue(&_state->parseState, WJB_END_OBJECT, NULL); } -static void -jsonb_in_array_start(void *pstate) +static void jsonb_in_array_start(void *pstate) { JsonbInState *_state = (JsonbInState *) pstate; - _state->res = pushJsonbValue(&_state->parseState, WJB_BEGIN_ARRAY, NULL); } -static void -jsonb_in_array_end(void *pstate) +static void jsonb_in_array_end(void *pstate) { JsonbInState *_state = (JsonbInState *) pstate; - _state->res = pushJsonbValue(&_state->parseState, WJB_END_ARRAY, NULL); } -static void -jsonb_in_object_field_start(void *pstate, char *fname, bool isnull) +static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull) { JsonbInState *_state = (JsonbInState *) pstate; JsonbValue v; @@ -257,8 +237,7 @@ jsonb_in_object_field_start(void *pstate, char *fname, bool isnull) _state->res = pushJsonbValue(&_state->parseState, WJB_KEY, &v); } -static void -jsonb_put_escaped_value(StringInfo out, JsonbValue * scalarVal) +static void jsonb_put_escaped_value(StringInfo out, JsonbValue * scalarVal) { switch (scalarVal->type) { case jbvNull: @@ -286,16 +265,13 @@ jsonb_put_escaped_value(StringInfo out, JsonbValue * scalarVal) /* * For jsonb we always want the de-escaped value - that's what's in token */ -static void -jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype) +static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype) { JsonbInState *_state = (JsonbInState *) pstate; JsonbValue v; v.estSize = sizeof(JEntry); - switch (tokentype) { - case JSON_TOKEN_STRING: Assert (token != NULL); v.type = jbvString; @@ -311,7 +287,7 @@ jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype) Assert (token != NULL); v.type = jbvNumeric; v.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(token), 0, -1)); - v.estSize += VARSIZE_ANY(v.numeric) + sizeof(JEntry) /* alignment */ ; + v.estSize += VARSIZE_ANY(v.numeric) + sizeof(JEntry); /* alignment */ break; case JSON_TOKEN_TRUE: v.type = jbvBool; @@ -343,7 +319,6 @@ jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype) _state->res = pushJsonbValue(&_state->parseState, WJB_END_ARRAY, NULL); } else { JsonbValue *o = &_state->parseState->contVal; - switch (o->type) { case jbvArray: _state->res = pushJsonbValue(&_state->parseState, WJB_ELEM, &v); @@ -368,8 +343,7 @@ jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype) * caller wants access to the len attribute without having to call strlen, e.g. * if they are converting it to a text* object. */ -char * -JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) +char *JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) { bool first = true; JsonbIterator *it = NULL; @@ -382,11 +356,9 @@ JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) out = makeStringInfo(); enlargeStringInfo(out, (estimated_len >= 0) ? estimated_len : 64); - it = JsonbIteratorInit(in); - while (redo_switch || - ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)) { + while (redo_switch || ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)) { redo_switch = false; switch (type) { case WJB_BEGIN_ARRAY: @@ -421,7 +393,6 @@ JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) jsonb_put_escaped_value(out, &v); } else { Assert(type == WJB_BEGIN_OBJECT || type == WJB_BEGIN_ARRAY); - /* * We need to rerun the current switch() since we need to * output the object which we just got from the iterator @@ -435,7 +406,6 @@ JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) appendBinaryStringInfo(out, ", ", 2); else first = false; - jsonb_put_escaped_value(out, &v); break; case WJB_END_ARRAY: @@ -453,8 +423,6 @@ JsonbToCString(StringInfo out, JsonbSuperHeader in, int estimated_len) elog(ERROR, "unknown flag of jsonb iterator"); } } - Assert(level == 0); - return out->data; } diff --git a/src/common/backend/utils/adt/jsonb_gin.cpp b/src/common/backend/utils/adt/jsonb_gin.cpp index 0c146c5de..c69de8ea0 100644 --- a/src/common/backend/utils/adt/jsonb_gin.cpp +++ b/src/common/backend/utils/adt/jsonb_gin.cpp @@ -1,19 +1,18 @@ -/*------------------------------------------------------------------------- +/* ------------------------------------------------------------------------- * - * jsonb_gin.c + * jsonb_gin.cpp * GIN support functions for jsonb * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Copyright (c) 2014, PostgreSQL Global Development Group * * * IDENTIFICATION - * src/backend/utils/adt/jsonb_gin.c + * src/common/backend/utils/adt/jsonb_gin.cpp * - *------------------------------------------------------------------------- + * ------------------------------------------------------------------------- */ #include "postgres.h" - #include "access/gin.h" #include "access/skey.h" #include "catalog/pg_collation.h" @@ -21,22 +20,20 @@ #include "utils/builtins.h" #include "utils/jsonb.h" -typedef struct PathHashStack -{ +typedef struct PathHashStack { uint32 hash; struct PathHashStack *parent; } PathHashStack; static text *make_text_key(const char *str, int len, char flag); -static text *make_scalar_key(const JsonbValue * scalarVal, char flag); +static text *make_scalar_key(const JsonbValue *scalarVal, char flag); /* * * jsonb_ops GIN opclass support functions * */ -Datum -gin_compare_jsonb(PG_FUNCTION_ARGS) +Datum gin_compare_jsonb(PG_FUNCTION_ARGS) { text *arg1 = PG_GETARG_TEXT_PP(0); text *arg2 = PG_GETARG_TEXT_PP(1); @@ -48,21 +45,18 @@ gin_compare_jsonb(PG_FUNCTION_ARGS) a1p = VARDATA_ANY(arg1); a2p = VARDATA_ANY(arg2); - len1 = VARSIZE_ANY_EXHDR(arg1); len2 = VARSIZE_ANY_EXHDR(arg2); /* Compare text as bttextcmp does, but always using C collation */ result = varstr_cmp(a1p, len1, a2p, len2, C_COLLATION_OID); - PG_FREE_IF_COPY(arg1, 0); PG_FREE_IF_COPY(arg2, 1); PG_RETURN_INT32(result); } -Datum -gin_extract_jsonb(PG_FUNCTION_ARGS) +Datum gin_extract_jsonb(PG_FUNCTION_ARGS) { Jsonb *jb = (Jsonb *) PG_GETARG_JSONB(0); int32 *nentries = (int32 *) PG_GETARG_POINTER(1); @@ -79,9 +73,7 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) } entries = (Datum *) palloc(sizeof(Datum) * total); - it = JsonbIteratorInit(VARDATA(jb)); - while ((r = JsonbIteratorNext(&it, &v, false)) != WJB_DONE) { if (i >= total) { total *= 2; @@ -141,12 +133,10 @@ gin_extract_jsonb(PG_FUNCTION_ARGS) } *nentries = i; - PG_RETURN_POINTER(entries); } -Datum -gin_extract_jsonb_query(PG_FUNCTION_ARGS) +Datum gin_extract_jsonb_query(PG_FUNCTION_ARGS) { int32 *nentries = (int32 *) PG_GETARG_POINTER(1); StrategyNumber strategy = PG_GETARG_UINT16(2); @@ -156,9 +146,7 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) if (strategy == JsonbContainsStrategyNumber) { /* Query is a jsonb, so just apply gin_extract_jsonb... */ entries = (Datum *) - DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb, - PG_GETARG_DATUM(0), - PointerGetDatum(nentries))); + DatumGetPointer(DirectFunctionCall2(gin_extract_jsonb, PG_GETARG_DATUM(0), PointerGetDatum(nentries))); /* ...although "contains {}" requires a full index scan */ if (entries == NULL) { *searchMode = GIN_SEARCH_MODE_ALL; @@ -169,11 +157,9 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) *nentries = 1; entries = (Datum *) palloc(sizeof(Datum)); - item = make_text_key(VARDATA_ANY(query), VARSIZE_ANY_EXHDR(query), - JKEYELEM); + item = make_text_key(VARDATA_ANY(query), VARSIZE_ANY_EXHDR(query), JKEYELEM); entries[0] = PointerGetDatum(item); - } else if (strategy == JsonbExistsAnyStrategyNumber || - strategy == JsonbExistsAllStrategyNumber) { + } else if (strategy == JsonbExistsAnyStrategyNumber || strategy == JsonbExistsAllStrategyNumber) { ArrayType *query = PG_GETARG_ARRAYTYPE_P(0); Datum *key_datums = NULL; bool *key_nulls = NULL; @@ -181,11 +167,7 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) int i, j; text *item = NULL; - - deconstruct_array(query, - TEXTOID, -1, false, 'i', - &key_datums, &key_nulls, &key_count); - + deconstruct_array(query, TEXTOID, -1, false, 'i', &key_datums, &key_nulls, &key_count); entries = (Datum *) palloc(sizeof(Datum) * key_count); for (i = 0, j = 0; i < key_count; ++i) { @@ -193,9 +175,7 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) if (key_nulls[i]) { continue; } - item = make_text_key(VARDATA(key_datums[i]), - VARSIZE(key_datums[i]) - VARHDRSZ, - JKEYELEM); + item = make_text_key(VARDATA(key_datums[i]), VARSIZE(key_datums[i]) - VARHDRSZ, JKEYELEM); entries[j++] = PointerGetDatum(item); } @@ -212,16 +192,13 @@ gin_extract_jsonb_query(PG_FUNCTION_ARGS) PG_RETURN_POINTER(entries); } -Datum -gin_consistent_jsonb(PG_FUNCTION_ARGS) +Datum gin_consistent_jsonb(PG_FUNCTION_ARGS) { bool *check = (bool *) PG_GETARG_POINTER(0); StrategyNumber strategy = PG_GETARG_UINT16(1); - - /* Jsonb *query = PG_GETARG_JSONB(2); */ + /* example: Jsonb *query = PG_GETARG_JSONB(2); */ int32 nkeys = PG_GETARG_INT32(3); - - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ + /* example: Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ bool *recheck = (bool *) PG_GETARG_POINTER(5); bool res = true; int32 i; @@ -267,21 +244,16 @@ gin_consistent_jsonb(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -gin_triconsistent_jsonb(PG_FUNCTION_ARGS) +Datum gin_triconsistent_jsonb(PG_FUNCTION_ARGS) { GinLogicValue *check = (GinLogicValue *) PG_GETARG_POINTER(0); - StrategyNumber strategy = PG_GETARG_UINT16(1); - /* Jsonb *query = PG_GETARG_JSONB(2); */ - int32 nkeys = PG_GETARG_INT32(3); - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ - GinLogicValue res = GIN_TRUE; - + StrategyNumber strategy = PG_GETARG_UINT16(1); + int32 nkeys = PG_GETARG_INT32(3); + GinLogicValue res = GIN_TRUE; int32 i; if (strategy == JsonbContainsStrategyNumber) { bool has_maybe = false; - /* * All extracted keys must be present. Combination of GIN_MAYBE and * GIN_TRUE gives GIN_MAYBE result because then all keys may be @@ -307,8 +279,7 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) if (!has_maybe && res == GIN_TRUE) { res = GIN_MAYBE; } - } else if (strategy == JsonbExistsStrategyNumber || - strategy == JsonbExistsAnyStrategyNumber) { + } else if (strategy == JsonbExistsStrategyNumber || strategy == JsonbExistsAnyStrategyNumber) { /* Existence of key guaranteed in default search mode */ res = GIN_FALSE; for (i = 0; i < nkeys; i++) { @@ -339,21 +310,16 @@ gin_triconsistent_jsonb(PG_FUNCTION_ARGS) } /* - * * jsonb_hash_ops GIN opclass support functions - * */ -Datum -gin_consistent_jsonb_hash(PG_FUNCTION_ARGS) +Datum gin_consistent_jsonb_hash(PG_FUNCTION_ARGS) { - bool *check = (bool *) PG_GETARG_POINTER(0); + bool *check = (bool *) PG_GETARG_POINTER(0); StrategyNumber strategy = PG_GETARG_UINT16(1); - /* Jsonb *query = PG_GETARG_JSONB(2); */ - int32 nkeys = PG_GETARG_INT32(3); - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ - bool *recheck = (bool *) PG_GETARG_POINTER(5); - bool res = true; - int32 i; + int32 nkeys = PG_GETARG_INT32(3); + bool *recheck = (bool *) PG_GETARG_POINTER(5); + bool res = true; + int32 i; if (strategy != JsonbContainsStrategyNumber) { elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -379,17 +345,14 @@ gin_consistent_jsonb_hash(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -gin_triconsistent_jsonb_hash(PG_FUNCTION_ARGS) +Datum gin_triconsistent_jsonb_hash(PG_FUNCTION_ARGS) { GinLogicValue *check = (GinLogicValue *) PG_GETARG_POINTER(0); - StrategyNumber strategy = PG_GETARG_UINT16(1); - /* Jsonb *query = PG_GETARG_JSONB(2); */ - int32 nkeys = PG_GETARG_INT32(3); - /* Pointer *extra_data = (Pointer *) PG_GETARG_POINTER(4); */ - GinLogicValue res = GIN_TRUE; - int32 i; - bool has_maybe = false; + StrategyNumber strategy = PG_GETARG_UINT16(1); + int32 nkeys = PG_GETARG_INT32(3); + GinLogicValue res = GIN_TRUE; + int32 i; + bool has_maybe = false; if (strategy != JsonbContainsStrategyNumber) { elog(ERROR, "unrecognized strategy number: %d", strategy); @@ -425,8 +388,7 @@ gin_triconsistent_jsonb_hash(PG_FUNCTION_ARGS) PG_RETURN_GIN_LOGIC_VALUE(res); } -Datum -gin_extract_jsonb_hash(PG_FUNCTION_ARGS) +Datum gin_extract_jsonb_hash(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); int32 *nentries = (int32 *) PG_GETARG_POINTER(1); @@ -445,16 +407,13 @@ gin_extract_jsonb_hash(PG_FUNCTION_ARGS) } entries = (Datum *) palloc(sizeof(Datum) * total); - it = JsonbIteratorInit(VARDATA(jb)); - tail.parent = NULL; tail.hash = 0; stack = &tail; while ((r = JsonbIteratorNext(&it, &v, false)) != WJB_DONE) { PathHashStack *tmp = NULL; - if (i >= total) { total *= 2; entries = (Datum *) repalloc(entries, sizeof(Datum) * total); @@ -516,12 +475,10 @@ gin_extract_jsonb_hash(PG_FUNCTION_ARGS) } *nentries = i; - PG_RETURN_POINTER(entries); } -Datum -gin_extract_jsonb_query_hash(PG_FUNCTION_ARGS) +Datum gin_extract_jsonb_query_hash(PG_FUNCTION_ARGS) { int32 *nentries = (int32 *) PG_GETARG_POINTER(1); StrategyNumber strategy = PG_GETARG_UINT16(2); @@ -550,29 +507,26 @@ gin_extract_jsonb_query_hash(PG_FUNCTION_ARGS) * Build a text value from a cstring and flag suitable for storage as a key * value */ -static text * -make_text_key(const char *str, int len, char flag) +static text *make_text_key(const char *str, int len, char flag) { - text *item = NULL; + text *item = NULL; - item = (text *) palloc(VARHDRSZ + len + 1); + item = (text *)palloc(VARHDRSZ + len + 1); SET_VARSIZE(item, VARHDRSZ + len + 1); *VARDATA(item) = flag; - - memcpy(VARDATA(item) + 1, str, len); - + errno_t rc = memcpy_s(VARDATA(item) + 1, len, str, len); + securec_check(rc, "\0", "\0"); return item; } /* * Create a textual representation of a jsonbValue for GIN storage. */ -static text * -make_scalar_key(const JsonbValue * scalarVal, char flag) +static text *make_scalar_key(const JsonbValue *scalarVal, char flag) { - text *item = NULL; - char *cstr = NULL; + text *item = NULL; + char *cstr = NULL; switch (scalarVal->type) { case jbvNull: @@ -596,8 +550,7 @@ make_scalar_key(const JsonbValue * scalarVal, char flag) pfree(cstr); break; case jbvString: - item = make_text_key(scalarVal->string.val, scalarVal->string.len, - flag); + item = make_text_key(scalarVal->string.val, scalarVal->string.len, flag); break; default: elog(ERROR, "invalid jsonb scalar type"); diff --git a/src/common/backend/utils/adt/jsonb_op.cpp b/src/common/backend/utils/adt/jsonb_op.cpp index d386420c5..952b136d5 100644 --- a/src/common/backend/utils/adt/jsonb_op.cpp +++ b/src/common/backend/utils/adt/jsonb_op.cpp @@ -1,24 +1,23 @@ -/*------------------------------------------------------------------------- +/* ------------------------------------------------------------------------- * - * jsonb_op.c + * jsonb_op.cpp * Special operators for jsonb only, used by various index access methods * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Copyright (c) 2014, PostgreSQL Global Development Group * * * IDENTIFICATION - * src/backend/utils/adt/jsonb_op.c + * src/common/backend/utils/adt/jsonb_op.cpp * - *------------------------------------------------------------------------- + * ------------------------------------------------------------------------- */ #include "postgres.h" #include "miscadmin.h" #include "utils/jsonb.h" -Datum -jsonb_exists(PG_FUNCTION_ARGS) +Datum jsonb_exists(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); text *key = PG_GETARG_TEXT_PP(1); @@ -35,22 +34,18 @@ jsonb_exists(PG_FUNCTION_ARGS) kval.string.val = VARDATA_ANY(key); kval.string.len = VARSIZE_ANY_EXHDR(key); - v = findJsonbValueFromSuperHeader(VARDATA(jb), - JB_FOBJECT | JB_FARRAY, - NULL, - &kval); + v = findJsonbValueFromSuperHeader(VARDATA(jb), JB_FOBJECT | JB_FARRAY, NULL, &kval); PG_RETURN_BOOL(v != NULL); } -Datum -jsonb_exists_any(PG_FUNCTION_ARGS) +Datum jsonb_exists_any(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); ArrayType *keys = PG_GETARG_ARRAYTYPE_P(1); JsonbValue *arrKey = arrayToJsonbSortedArray(keys); - uint32 *plowbound = NULL, - lowbound = 0; + uint32 *plowbound = NULL; + uint32 lowbound = 0; int i; if (arrKey == NULL || arrKey->object.nPairs == 0) { @@ -68,10 +63,8 @@ jsonb_exists_any(PG_FUNCTION_ARGS) * the lower bound of the last search. */ for (i = 0; i < arrKey->array.nElems; i++) { - if (findJsonbValueFromSuperHeader(VARDATA(jb), - JB_FOBJECT | JB_FARRAY, - plowbound, - arrKey->array.elems + i) != NULL) { + if (findJsonbValueFromSuperHeader(VARDATA(jb), JB_FOBJECT | JB_FARRAY, + plowbound, arrKey->array.elems + i) != NULL) { PG_RETURN_BOOL(true); } } @@ -79,8 +72,7 @@ jsonb_exists_any(PG_FUNCTION_ARGS) PG_RETURN_BOOL(false); } -Datum -jsonb_exists_all(PG_FUNCTION_ARGS) +Datum jsonb_exists_all(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); ArrayType *keys = PG_GETARG_ARRAYTYPE_P(1); @@ -104,10 +96,8 @@ jsonb_exists_all(PG_FUNCTION_ARGS) * the lower bound of the last search. */ for (i = 0; i < arrKey->array.nElems; i++) { - if (findJsonbValueFromSuperHeader(VARDATA(jb), - JB_FOBJECT | JB_FARRAY, - plowbound, - arrKey->array.elems + i) == NULL) { + if (findJsonbValueFromSuperHeader(VARDATA(jb), JB_FOBJECT | JB_FARRAY, + plowbound, arrKey->array.elems + i) == NULL) { PG_RETURN_BOOL(false); } } @@ -115,11 +105,10 @@ jsonb_exists_all(PG_FUNCTION_ARGS) PG_RETURN_BOOL(true); } -Datum -jsonb_contains(PG_FUNCTION_ARGS) +Datum jsonb_contains(PG_FUNCTION_ARGS) { - Jsonb *val = PG_GETARG_JSONB(0); - Jsonb *tmpl = PG_GETARG_JSONB(1); + Jsonb *val = PG_GETARG_JSONB(0); + Jsonb *tmpl = PG_GETARG_JSONB(1); JsonbIterator *it1 = NULL; JsonbIterator *it2 = NULL; @@ -134,12 +123,11 @@ jsonb_contains(PG_FUNCTION_ARGS) PG_RETURN_BOOL(JsonbDeepContains(&it1, &it2)); } -Datum -jsonb_contained(PG_FUNCTION_ARGS) +Datum jsonb_contained(PG_FUNCTION_ARGS) { /* Commutator of "contains" */ - Jsonb *tmpl = PG_GETARG_JSONB(0); - Jsonb *val = PG_GETARG_JSONB(1); + Jsonb *tmpl = PG_GETARG_JSONB(0); + Jsonb *val = PG_GETARG_JSONB(1); JsonbIterator *it1 = NULL; JsonbIterator *it2 = NULL; @@ -154,12 +142,11 @@ jsonb_contained(PG_FUNCTION_ARGS) PG_RETURN_BOOL(JsonbDeepContains(&it1, &it2)); } -Datum -jsonb_ne(PG_FUNCTION_ARGS) +Datum jsonb_ne(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) != 0); @@ -171,12 +158,11 @@ jsonb_ne(PG_FUNCTION_ARGS) /* * B-Tree operator class operators, support function */ -Datum -jsonb_lt(PG_FUNCTION_ARGS) +Datum jsonb_lt(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) < 0); @@ -185,12 +171,11 @@ jsonb_lt(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -jsonb_gt(PG_FUNCTION_ARGS) +Datum jsonb_gt(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) > 0); @@ -199,12 +184,11 @@ jsonb_gt(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -jsonb_le(PG_FUNCTION_ARGS) +Datum jsonb_le(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) <= 0); @@ -213,13 +197,11 @@ jsonb_le(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -jsonb_ge(PG_FUNCTION_ARGS) +Datum jsonb_ge(PG_FUNCTION_ARGS) { - - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) >= 0); @@ -228,12 +210,11 @@ jsonb_ge(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -jsonb_eq(PG_FUNCTION_ARGS) +Datum jsonb_eq(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - bool res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + bool res = false; res = (compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)) == 0); @@ -242,12 +223,11 @@ jsonb_eq(PG_FUNCTION_ARGS) PG_RETURN_BOOL(res); } -Datum -jsonb_cmp(PG_FUNCTION_ARGS) +Datum jsonb_cmp(PG_FUNCTION_ARGS) { - Jsonb *jba = PG_GETARG_JSONB(0); - Jsonb *jbb = PG_GETARG_JSONB(1); - int res; + Jsonb *jba = PG_GETARG_JSONB(0); + Jsonb *jbb = PG_GETARG_JSONB(1); + int res = false; res = compareJsonbSuperHeaderValue(VARDATA(jba), VARDATA(jbb)); @@ -259,8 +239,7 @@ jsonb_cmp(PG_FUNCTION_ARGS) /* * Hash operator class jsonb hashing function */ -Datum -jsonb_hash(PG_FUNCTION_ARGS) +Datum jsonb_hash(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); JsonbIterator *it = NULL; diff --git a/src/common/backend/utils/adt/jsonb_util.cpp b/src/common/backend/utils/adt/jsonb_util.cpp index 7e6e2fec9..042a6103a 100644 --- a/src/common/backend/utils/adt/jsonb_util.cpp +++ b/src/common/backend/utils/adt/jsonb_util.cpp @@ -3,7 +3,7 @@ * jsonb_util.c * Utilities for jsonb datatype * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Copyright (c) 2014, PostgreSQL Global Development Group * * @@ -35,8 +35,7 @@ * object. */ #define JSONB_MAX_ELEMS (Min(MaxAllocSize / sizeof(JsonbValue), JENTRY_POSMASK)) -#define JSONB_MAX_PAIRS (Min(MaxAllocSize / sizeof(JsonbPair), \ - JENTRY_POSMASK)) +#define JSONB_MAX_PAIRS (Min(MaxAllocSize / sizeof(JsonbPair), JENTRY_POSMASK)) /* * State used while converting an arbitrary JsonbValue into a Jsonb value @@ -44,8 +43,7 @@ * * ConvertLevel: Bookkeeping around particular level when converting. */ -typedef struct convertLevel -{ +typedef struct convertLevel { uint32 i; /* Iterates once per element, or once per pair */ uint32 *header; /* Pointer to current container header */ JEntry *meta; /* This level's metadata */ @@ -55,44 +53,39 @@ typedef struct convertLevel /* * convertState: Overall bookkeeping state for conversion */ -typedef struct convertState -{ +typedef struct convertState { /* Preallocated buffer in which to form varlena/Jsonb value */ Jsonb *buffer; /* Pointer into buffer */ char *ptr; /* State for */ - convertLevel *allState, /* Overall state array */ - *contPtr; /* Cur container pointer (in allState) */ + convertLevel *allState; /* Overall state array */ + convertLevel *contPtr; /* Cur container pointer (in allState) */ /* Current size of buffer containing allState array */ Size levelSz; } convertState; -static int compareJsonbScalarValue(JsonbValue * a, JsonbValue * b); -static int lexicalCompareJsonbStringValue(const void *a, const void *b); -static Size convertJsonb(JsonbValue * val, Jsonb* buffer); -static inline short addPaddingInt(convertState * cstate); -static void walkJsonbValueConversion(JsonbValue * val, convertState * cstate, - uint32 nestlevel); -static void putJsonbValueConversion(convertState * cstate, JsonbValue * val, - uint32 flags, uint32 level); -static void putScalarConversion(convertState * cstate, JsonbValue * scalarVal, - uint32 level, uint32 i); -static void iteratorFromContainerBuf(JsonbIterator * it, char *buffer); -static bool formIterIsContainer(JsonbIterator ** it, JsonbValue * val, - JEntry * ent, bool skipNested); -static JsonbIterator *freeAndGetParent(JsonbIterator * it); -static JsonbParseState *pushState(JsonbParseState ** pstate); -static void appendKey(JsonbParseState * pstate, JsonbValue * scalarVal); -static void appendValue(JsonbParseState * pstate, JsonbValue * scalarVal); -static void appendElement(JsonbParseState * pstate, JsonbValue * scalarVal); -static int lengthCompareJsonbStringValue(const void *a, const void *b, void *arg); -static int lengthCompareJsonbPair(const void *a, const void *b, void *arg); -static void uniqueifyJsonbObject(JsonbValue * object); -static void uniqueifyJsonbArray(JsonbValue * array); +static int compareJsonbScalarValue(JsonbValue* a, JsonbValue* b); +static int lexicalCompareJsonbStringValue(const void* a, const void* b); +static Size convertJsonb(JsonbValue* val, Jsonb* buffer); +static inline short addPaddingInt(convertState* cstate); +static void walkJsonbValueConversion(JsonbValue* val, convertState* cstate, uint32 nestlevel); +static void putJsonbValueConversion(convertState* cstate, JsonbValue* val, uint32 flags, uint32 level); +static void putScalarConversion(convertState* cstate, JsonbValue* scalarVal, uint32 level, uint32 i); +static void iteratorFromContainerBuf(JsonbIterator* it, char* buffer); +static bool formIterIsContainer(JsonbIterator** it, JsonbValue* val, JEntry* ent, bool skipNested); +static JsonbIterator* freeAndGetParent(JsonbIterator* it); +static JsonbParseState* pushState(JsonbParseState** pstate); +static void appendKey(JsonbParseState* pstate, JsonbValue* scalarVal); +static void appendValue(JsonbParseState* pstate, JsonbValue* scalarVal); +static void appendElement(JsonbParseState* pstate, JsonbValue* scalarVal); +static int lengthCompareJsonbStringValue(const void* a, const void* b, void* binequal); +static int lengthCompareJsonbPair(const void* a, const void* b, void* binequal); +static void uniqueifyJsonbObject(JsonbValue* object); +static void uniqueifyJsonbArray(JsonbValue* array); /* * Turn an in-memory JsonbValue into a Jsonb for on-disk storage. @@ -107,11 +100,10 @@ static void uniqueifyJsonbArray(JsonbValue * array); * values, or simple containers of scalar values, where it would be * inconvenient to deal with a great amount of other state. */ -Jsonb * -JsonbValueToJsonb(JsonbValue * val) +Jsonb* JsonbValueToJsonb(JsonbValue *val) { - Jsonb *out = NULL; - Size sz; + Jsonb *out = NULL; + Size sz; if (IsAJsonbScalar(val)) { /* Scalar value */ @@ -131,7 +123,7 @@ JsonbValueToJsonb(JsonbValue * val) sz = convertJsonb(res, out); Assert(sz <= (uint)res->estSize); SET_VARSIZE(out, sz + VARHDRSZ); - } else if (val->type == jbvObject || val->type == jbvArray){ + } else if (val->type == jbvObject || val->type == jbvArray) { out = (Jsonb*)palloc(VARHDRSZ + val->estSize); sz = convertJsonb(val, out); Assert(sz <= (uint)val->estSize); @@ -140,7 +132,7 @@ JsonbValueToJsonb(JsonbValue * val) Assert(val->type == jbvBinary); out = (Jsonb*)palloc(VARHDRSZ + val->binary.len); SET_VARSIZE(out, VARHDRSZ + val->binary.len); - + errno_t rc = memcpy_s(VARDATA(out), VARHDRSZ + val->binary.len, val->binary.data, val->binary.len); securec_check(rc, "\0", "\0"); } @@ -158,8 +150,7 @@ JsonbValueToJsonb(JsonbValue * val) * called from B-Tree support function 1, we're careful about not leaking * memory here. */ -int -compareJsonbSuperHeaderValue(JsonbSuperHeader a, JsonbSuperHeader b) +int compareJsonbSuperHeaderValue(JsonbSuperHeader a, JsonbSuperHeader b) { JsonbIterator *ita = NULL; JsonbIterator *itb = NULL; @@ -190,13 +181,13 @@ compareJsonbSuperHeaderValue(JsonbSuperHeader a, JsonbSuperHeader b) break; } if (ra == WJB_END_ARRAY || ra == WJB_END_OBJECT) { - /* - * There is no array or object to compare at this stage of - * processing. jbvArray/jbvObject values are compared - * initially, at the WJB_BEGIN_ARRAY and WJB_BEGIN_OBJECT - * tokens. - */ - continue; + /* + * There is no array or object to compare at this stage of + * processing. jbvArray/jbvObject values are compared + * initially, at the WJB_BEGIN_ARRAY and WJB_BEGIN_OBJECT + * tokens. + */ + continue; } if (va.type == vb.type) { @@ -215,7 +206,7 @@ compareJsonbSuperHeaderValue(JsonbSuperHeader a, JsonbSuperHeader b) * special case here though, since we still want the * general type-based comparisons to apply, and as far * as we're concerned a pseudo array is just a scalar. - */ + */ if (va.array.rawScalar != vb.array.rawScalar) { res = (va.array.rawScalar) ? -1 : 1; } else if (va.array.nElems != vb.array.nElems) { @@ -311,23 +302,21 @@ compareJsonbSuperHeaderValue(JsonbSuperHeader a, JsonbSuperHeader b) * presumably anyone exploiting this is only interested in matching Object keys * with a String. lowbound is given in units of pairs, not underlying values. */ -JsonbValue * -findJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 flags, - uint32 *lowbound, JsonbValue * key) +JsonbValue *findJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 flags, uint32 *lowbound, JsonbValue *key) { - uint32 superheader = *(uint32 *) sheader; - JEntry *array = (JEntry *) (sheader + sizeof(uint32)); - uint count = (superheader & JB_CMASK); - JsonbValue *result = (JsonbValue*)palloc(sizeof(JsonbValue)); + uint32 superheader = *(uint32 *)sheader; + JEntry *array = (JEntry *)(sheader + sizeof(uint32)); + uint count = (superheader & JB_CMASK); + JsonbValue *result = (JsonbValue*)palloc(sizeof(JsonbValue)); Assert((flags & ~(JB_FARRAY | JB_FOBJECT)) == 0); if (flags & JB_FARRAY & superheader) { - char *data = (char *) (array + (superheader & JB_CMASK)); - uint i; + char *data = (char *)(array + (superheader & JB_CMASK)); + uint i; for (i = 0; i < count; i++) { - JEntry *e = array + i; + JEntry *e = array + i; if (JBE_ISNULL(*e) && key->type == jbvNull) { result->type = jbvNull; @@ -446,8 +435,7 @@ findJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 flags, * * Returns palloc()'d copy of value. */ -JsonbValue * -getIthJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 i) +JsonbValue *getIthJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 i) { uint32 superheader = *(uint32 *) sheader; JsonbValue *result = NULL; @@ -509,8 +497,7 @@ getIthJsonbValueFromSuperHeader(JsonbSuperHeader sheader, uint32 i) * JsonbValue. There is one exception -- WJB_BEGIN_ARRAY callers may pass a * "raw scalar" pseudo array to append that. */ -JsonbValue * -pushJsonbValue(JsonbParseState ** pstate, int seq, JsonbValue * scalarVal) +JsonbValue *pushJsonbValue(JsonbParseState **pstate, int seq, JsonbValue *scalarVal) { JsonbValue *result = NULL; @@ -531,8 +518,7 @@ pushJsonbValue(JsonbParseState ** pstate, int seq, JsonbValue * scalarVal) } else { (*pstate)->size = 4; } - (*pstate)->contVal.array.elems = (JsonbValue*)palloc(sizeof(JsonbValue) * - (*pstate)->size); + (*pstate)->contVal.array.elems = (JsonbValue*)palloc(sizeof(JsonbValue) * (*pstate)->size); break; case WJB_BEGIN_OBJECT: Assert(!scalarVal); @@ -542,21 +528,18 @@ pushJsonbValue(JsonbParseState ** pstate, int seq, JsonbValue * scalarVal) (*pstate)->contVal.estSize = 3 * sizeof(JEntry); (*pstate)->contVal.object.nPairs = 0; (*pstate)->size = 4; - (*pstate)->contVal.object.pairs = (JsonbPair*)palloc(sizeof(JsonbPair) * - (*pstate)->size); + (*pstate)->contVal.object.pairs = (JsonbPair*)palloc(sizeof(JsonbPair) * (*pstate)->size); break; case WJB_KEY: Assert(scalarVal->type == jbvString); appendKey(*pstate, scalarVal); break; case WJB_VALUE: - Assert(IsAJsonbScalar(scalarVal) || - scalarVal->type == jbvBinary); + Assert(IsAJsonbScalar(scalarVal) || scalarVal->type == jbvBinary); appendValue(*pstate, scalarVal); break; case WJB_ELEM: - Assert(IsAJsonbScalar(scalarVal) || - scalarVal->type == jbvBinary); + Assert(IsAJsonbScalar(scalarVal) || scalarVal->type == jbvBinary); appendElement(*pstate, scalarVal); break; case WJB_END_OBJECT: @@ -597,8 +580,7 @@ pushJsonbValue(JsonbParseState ** pstate, int seq, JsonbValue * scalarVal) * * See JsonbIteratorNext() for notes on memory management. */ -JsonbIterator * -JsonbIteratorInit(JsonbSuperHeader sheader) +JsonbIterator *JsonbIteratorInit(JsonbSuperHeader sheader) { JsonbIterator *it = (JsonbIterator*)palloc(sizeof(JsonbIterator)); @@ -636,8 +618,7 @@ JsonbIteratorInit(JsonbSuperHeader sheader) * or Object element/pair buffers, since their element/pair pointers are * garbage. */ -int -JsonbIteratorNext(JsonbIterator ** it, JsonbValue * val, bool skipNested) +int JsonbIteratorNext(JsonbIterator **it, JsonbValue *val, bool skipNested) { JsonbIterState state; @@ -675,8 +656,7 @@ JsonbIteratorNext(JsonbIterator ** it, JsonbValue * val, bool skipNested) */ *it = freeAndGetParent(*it); return WJB_END_ARRAY; - } else if (formIterIsContainer(it, val, &(*it)->meta[(*it)->i++], - skipNested)) { + } else if (formIterIsContainer(it, val, &(*it)->meta[(*it)->i++], skipNested)) { /* * New child iterator acquired within formIterIsContainer. * Recurse into container. Don't directly return jbvBinary @@ -736,8 +716,7 @@ JsonbIteratorNext(JsonbIterator ** it, JsonbValue * val, bool skipNested) * child iterator. If it is, don't bother !skipNested callers with * dealing with the jbvBinary representation. */ - if (formIterIsContainer(it, val, &(*it)->meta[((*it)->i++) * 2 + 1], - skipNested)) { + if (formIterIsContainer(it, val, &(*it)->meta[((*it)->i++) * 2 + 1], skipNested)) { return JsonbIteratorNext(it, val, skipNested); } else { return WJB_VALUE; @@ -761,8 +740,7 @@ JsonbIteratorNext(JsonbIterator ** it, JsonbValue * val, bool skipNested) * "val" is lhs Jsonb, and mContained is rhs Jsonb when called from top level. * We determine if mContained is contained within val. */ -bool -JsonbDeepContains(JsonbIterator ** val, JsonbIterator ** mContained) +bool JsonbDeepContains(JsonbIterator **val, JsonbIterator **mContained) { uint32 rval, rcont; @@ -810,10 +788,7 @@ JsonbDeepContains(JsonbIterator ** val, JsonbIterator ** mContained) Assert(rcont == WJB_KEY); /* First, find value by key... */ - lhsVal = findJsonbValueFromSuperHeader((*val)->buffer, - JB_FOBJECT, - NULL, - &vcontained); + lhsVal = findJsonbValueFromSuperHeader((*val)->buffer, JB_FOBJECT, NULL, &vcontained); if (!lhsVal) { return false; @@ -907,10 +882,7 @@ JsonbDeepContains(JsonbIterator ** val, JsonbIterator ** mContained) Assert(rcont == WJB_ELEM); if (IsAJsonbScalar(&vcontained)) { - if (!findJsonbValueFromSuperHeader((*val)->buffer, - JB_FARRAY, - NULL, - &vcontained)) { + if (!findJsonbValueFromSuperHeader((*val)->buffer, JB_FARRAY, NULL, &vcontained)) { return false; } } else { @@ -927,7 +899,7 @@ JsonbDeepContains(JsonbIterator ** val, JsonbIterator ** mContained) lhsConts = (JsonbValue*)palloc(sizeof(JsonbValue) * nLhsElems); for (i = 0; i < nLhsElems; i++) { - /* Store all lhs elements in temp array*/ + /* Store all lhs elements in temp array */ rcont = JsonbIteratorNext(val, &vval, true); Assert(rcont == WJB_ELEM); @@ -991,8 +963,7 @@ JsonbDeepContains(JsonbIterator ** val, JsonbIterator ** mContained) * in the array, so we enforce that the number of strings cannot exceed * JSONB_MAX_PAIRS. */ -JsonbValue * -arrayToJsonbSortedArray(ArrayType *array) +JsonbValue *arrayToJsonbSortedArray(ArrayType *array) { Datum *key_datums = NULL; bool *key_nulls = NULL; @@ -1002,8 +973,7 @@ arrayToJsonbSortedArray(ArrayType *array) j; /* Extract data for sorting */ - deconstruct_array(array, TEXTOID, -1, false, 'i', &key_datums, &key_nulls, - &elem_count); + deconstruct_array(array, TEXTOID, -1, false, 'i', &key_datums, &key_nulls, &elem_count); if (elem_count == 0) { return NULL; @@ -1049,8 +1019,7 @@ arrayToJsonbSortedArray(ArrayType *array) * Some callers may wish to independently XOR in JB_FOBJECT and JB_FARRAY * flags. */ -void -JsonbHashScalarValue(const JsonbValue * scalarVal, uint32 * hash) +void JsonbHashScalarValue(const JsonbValue * scalarVal, uint32 * hash) { int tmp; @@ -1065,14 +1034,12 @@ JsonbHashScalarValue(const JsonbValue * scalarVal, uint32 * hash) *hash ^= 0x01; return; case jbvString: - tmp = hash_any((unsigned char *) scalarVal->string.val, - scalarVal->string.len); + tmp = hash_any((unsigned char *) scalarVal->string.val, scalarVal->string.len); *hash ^= tmp; return; case jbvNumeric: /* Must be unaffected by trailing zeroes */ - tmp = DatumGetInt32(DirectFunctionCall1(hash_numeric, - NumericGetDatum(scalarVal->numeric))); + tmp = DatumGetInt32(DirectFunctionCall1(hash_numeric, NumericGetDatum(scalarVal->numeric))); *hash ^= tmp; return; case jbvBool: @@ -1090,8 +1057,7 @@ JsonbHashScalarValue(const JsonbValue * scalarVal, uint32 * hash) * never be used against Strings for anything other than searching for values * within a single jsonb. */ -static int -compareJsonbScalarValue(JsonbValue * aScalar, JsonbValue * bScalar) +static int compareJsonbScalarValue(JsonbValue * aScalar, JsonbValue * bScalar) { if (aScalar->type == bScalar->type) { switch (aScalar->type) { @@ -1123,8 +1089,7 @@ compareJsonbScalarValue(JsonbValue * aScalar, JsonbValue * bScalar) * Sorts strings lexically, using the default database collation. Used by * B-Tree operators, where a lexical sort order is generally expected. */ -static int -lexicalCompareJsonbStringValue(const void *a, const void *b) +static int lexicalCompareJsonbStringValue(const void *a, const void *b) { const JsonbValue *va = (const JsonbValue *) a; const JsonbValue *vb = (const JsonbValue *) b; @@ -1132,16 +1097,14 @@ lexicalCompareJsonbStringValue(const void *a, const void *b) Assert(va->type == jbvString); Assert(vb->type == jbvString); - return varstr_cmp(va->string.val, va->string.len, vb->string.val, - vb->string.len, DEFAULT_COLLATION_OID); + return varstr_cmp(va->string.val, va->string.len, vb->string.val, vb->string.len, DEFAULT_COLLATION_OID); } /* * Given a JsonbValue, convert to Jsonb and store in preallocated Jsonb buffer * sufficiently large to fit the value */ -static Size -convertJsonb(JsonbValue * val, Jsonb *buffer) +static Size convertJsonb(JsonbValue * val, Jsonb *buffer) { convertState state; Size len; @@ -1171,12 +1134,9 @@ convertJsonb(JsonbValue * val, Jsonb *buffer) * the top level calls putJsonbValueConversion once per sequential processing * token (in a manner similar to generic iteration). */ -static void -walkJsonbValueConversion(JsonbValue * val, convertState * cstate, - uint32 nestlevel) +static void walkJsonbValueConversion(JsonbValue * val, convertState * cstate, uint32 nestlevel) { int i; - check_stack_depth(); if (!val) @@ -1184,40 +1144,29 @@ walkJsonbValueConversion(JsonbValue * val, convertState * cstate, switch (val->type) { case jbvArray: - putJsonbValueConversion(cstate, val, WJB_BEGIN_ARRAY, nestlevel); for (i = 0; i < val->array.nElems; i++) { if (IsAJsonbScalar(&val->array.elems[i]) || val->array.elems[i].type == jbvBinary) { - putJsonbValueConversion(cstate, val->array.elems + i, - WJB_ELEM, nestlevel); + putJsonbValueConversion(cstate, val->array.elems + i, WJB_ELEM, nestlevel); } else { - walkJsonbValueConversion(val->array.elems + i, cstate, - nestlevel + 1); + walkJsonbValueConversion(val->array.elems + i, cstate, nestlevel + 1); } } putJsonbValueConversion(cstate, val, WJB_END_ARRAY, nestlevel); - break; case jbvObject: - putJsonbValueConversion(cstate, val, WJB_BEGIN_OBJECT, nestlevel); for (i = 0; i < val->object.nPairs; i++) { - putJsonbValueConversion(cstate, &val->object.pairs[i].key, - WJB_KEY, nestlevel); - + putJsonbValueConversion(cstate, &val->object.pairs[i].key, WJB_KEY, nestlevel); if (IsAJsonbScalar(&val->object.pairs[i].value) || val->object.pairs[i].value.type == jbvBinary) { - putJsonbValueConversion(cstate, - &val->object.pairs[i].value, - WJB_VALUE, nestlevel); + putJsonbValueConversion(cstate, &val->object.pairs[i].value, WJB_VALUE, nestlevel); } else { - walkJsonbValueConversion(&val->object.pairs[i].value, - cstate, nestlevel + 1); + walkJsonbValueConversion(&val->object.pairs[i].value, cstate, nestlevel + 1); } } putJsonbValueConversion(cstate, val, WJB_END_OBJECT, nestlevel); - break; default: elog(ERROR, "unknown type of jsonb container"); @@ -1228,13 +1177,11 @@ walkJsonbValueConversion(JsonbValue * val, convertState * cstate, * walkJsonbValueConversion() worker. Add padding sufficient to int-align our * access to conversion buffer. */ -static inline -short addPaddingInt(convertState * cstate) +static inline short addPaddingInt(convertState *cstate) { short padlen, p; - padlen = INTALIGN(cstate->ptr - VARDATA(cstate->buffer)) - - (cstate->ptr - VARDATA(cstate->buffer)); + padlen = INTALIGN(cstate->ptr - VARDATA(cstate->buffer)) - (cstate->ptr - VARDATA(cstate->buffer)); for (p = padlen; p > 0; p--) { *cstate->ptr = '\0'; @@ -1257,14 +1204,11 @@ short addPaddingInt(convertState * cstate) * rather, the function is called as required for the start of an Object/Array, * and the end (i.e. there is one call per sequential processing WJB_* token). */ -static void -putJsonbValueConversion(convertState * cstate, JsonbValue * val, uint32 flags, - uint32 level) +static void putJsonbValueConversion(convertState *cstate, JsonbValue *val, uint32 flags, uint32 level) { if (level == cstate->levelSz) { cstate->levelSz *= 2; - cstate->allState = (convertLevel*)repalloc(cstate->allState, - sizeof(convertLevel) * cstate->levelSz); + cstate->allState = (convertLevel*)repalloc(cstate->allState, sizeof(convertLevel) * cstate->levelSz); } cstate->contPtr = cstate->allState + level; @@ -1303,7 +1247,6 @@ putJsonbValueConversion(convertState * cstate, JsonbValue * val, uint32 flags, cstate->contPtr->i++; } else if (flags & WJB_KEY) { Assert(val->type == jbvString); - putScalarConversion(cstate, val, level, cstate->contPtr->i * 2); } else if (flags & WJB_VALUE) { putScalarConversion(cstate, val, level, cstate->contPtr->i * 2 + 1); @@ -1321,27 +1264,23 @@ putJsonbValueConversion(convertState * cstate, JsonbValue * val, uint32 flags, } len = cstate->ptr - (char *) cstate->contPtr->begin; - prevPtr = cstate->contPtr - 1; if (*prevPtr->header & JB_FARRAY) { i = prevPtr->i; - prevPtr->meta[i].header = JENTRY_ISNEST; if (i == 0) { prevPtr->meta[0].header |= JENTRY_ISFIRST | len; } else { - prevPtr->meta[i].header |= - (prevPtr->meta[i - 1].header & JENTRY_POSMASK) + len; + prevPtr->meta[i].header |= (prevPtr->meta[i - 1].header & JENTRY_POSMASK) + len; } } else if (*prevPtr->header & JB_FOBJECT) { i = 2 * prevPtr->i + 1; /* Value, not key */ prevPtr->meta[i].header = JENTRY_ISNEST; - prevPtr->meta[i].header |= - (prevPtr->meta[i - 1].header & JENTRY_POSMASK) + len; + prevPtr->meta[i].header |= (prevPtr->meta[i - 1].header & JENTRY_POSMASK) + len; } else { elog(ERROR, "invalid jsonb container type"); } @@ -1361,14 +1300,12 @@ putJsonbValueConversion(convertState * cstate, JsonbValue * val, uint32 flags, * walkJsonbValueConversion()). It handles the details with regard to Jentry * metadata peculiar to each scalar type. */ -static void -putScalarConversion(convertState * cstate, JsonbValue * scalarVal, uint32 level, - uint32 i) +static void putScalarConversion(convertState *cstate, JsonbValue *scalarVal, uint32 level, uint32 i) { - int strlen; - int numlen; - short padlen; - errno_t rc = 0; + int strlen; + int numlen; + short padlen; + errno_t rc = 0; cstate->contPtr = cstate->allState + level; @@ -1388,7 +1325,7 @@ putScalarConversion(convertState * cstate, JsonbValue * scalarVal, uint32 level, } break; case jbvString: - strlen = scalarVal->string.len > 0 ? scalarVal->string.len : 1; + strlen = scalarVal->string.len > 0 ? scalarVal->string.len : 1; rc = memcpy_s(cstate->ptr, strlen, scalarVal->string.val, strlen); securec_check(rc, "\0", "\0"); cstate->ptr += scalarVal->string.len; @@ -1397,8 +1334,7 @@ putScalarConversion(convertState * cstate, JsonbValue * scalarVal, uint32 level, cstate->contPtr->meta[0].header |= scalarVal->string.len; } else { cstate->contPtr->meta[i].header |= - (cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK) + - scalarVal->string.len; + (cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK) + scalarVal->string.len; } break; case jbvNumeric: @@ -1414,17 +1350,14 @@ putScalarConversion(convertState * cstate, JsonbValue * scalarVal, uint32 level, cstate->contPtr->meta[0].header |= padlen + numlen; } else { cstate->contPtr->meta[i].header |= - (cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK) - + padlen + numlen; + (cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK) + padlen + numlen; } break; case jbvBool: - cstate->contPtr->meta[i].header |= (scalarVal->boolean) ? - JENTRY_ISTRUE : JENTRY_ISFALSE; + cstate->contPtr->meta[i].header |= (scalarVal->boolean) ? JENTRY_ISTRUE : JENTRY_ISFALSE; if (i > 0) { - cstate->contPtr->meta[i].header |= - cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK; + cstate->contPtr->meta[i].header |= cstate->contPtr->meta[i - 1].header & JENTRY_POSMASK; } break; default: @@ -1436,8 +1369,7 @@ putScalarConversion(convertState * cstate, JsonbValue * scalarVal, uint32 level, * Given superheader pointer into buffer, initialize iterator. Must be a * container type. */ -static void -iteratorFromContainerBuf(JsonbIterator * it, JsonbSuperHeader sheader) +static void iteratorFromContainerBuf(JsonbIterator *it, JsonbSuperHeader sheader) { uint32 superheader = *(uint32 *) sheader; @@ -1462,8 +1394,7 @@ iteratorFromContainerBuf(JsonbIterator * it, JsonbSuperHeader sheader) * Offset reflects that nElems indicates JsonbPairs in an object. * Each key and each value contain Jentry metadata just the same. */ - it->dataProper = - (char *) it->meta + it->nElems * sizeof(JEntry) * 2; + it->dataProper = (char *) it->meta + it->nElems * sizeof(JEntry) * 2; break; default: elog(ERROR, "unknown type of jsonb container"); @@ -1488,40 +1419,33 @@ iteratorFromContainerBuf(JsonbIterator * it, JsonbSuperHeader sheader) * to do this. The point is that our JsonbValues scalars can be passed around * anywhere). */ -static bool -formIterIsContainer(JsonbIterator ** it, JsonbValue * val, JEntry * ent, - bool skipNested) +static bool formIterIsContainer(JsonbIterator **it, JsonbValue *val, JEntry *ent, bool skipNested) { if (JBE_ISNULL(*ent)) { val->type = jbvNull; val->estSize = sizeof(JEntry); - return false; } else if (JBE_ISSTRING(*ent)) { val->type = jbvString; val->string.val = (*it)->dataProper + JBE_OFF(*ent); val->string.len = JBE_LEN(*ent); val->estSize = sizeof(JEntry) + val->string.len; - return false; } else if (JBE_ISNUMERIC(*ent)) { val->type = jbvNumeric; val->numeric = (Numeric) ((*it)->dataProper + INTALIGN(JBE_OFF(*ent))); val->estSize = 2 * sizeof(JEntry) + VARSIZE_ANY(val->numeric); - return false; } else if (JBE_ISBOOL(*ent)) { val->type = jbvBool; val->boolean = JBE_ISBOOL_TRUE(*ent) != 0; val->estSize = sizeof(JEntry); - return false; } else if (skipNested) { val->type = jbvBinary; val->binary.data = (*it)->dataProper + INTALIGN(JBE_OFF(*ent)); val->binary.len = JBE_LEN(*ent) - (INTALIGN(JBE_OFF(*ent)) - JBE_OFF(*ent)); val->estSize = val->binary.len + 2 * sizeof(JEntry); - return false; } else { /* @@ -1531,13 +1455,9 @@ formIterIsContainer(JsonbIterator ** it, JsonbValue * val, JEntry * ent, * Get child iterator. */ JsonbIterator *child = (JsonbIterator*)palloc(sizeof(JsonbIterator)); - - iteratorFromContainerBuf(child, - (*it)->dataProper + INTALIGN(JBE_OFF(*ent))); - + iteratorFromContainerBuf(child, (*it)->dataProper + INTALIGN(JBE_OFF(*ent))); child->parent = *it; *it = child; - return true; } } @@ -1546,8 +1466,7 @@ formIterIsContainer(JsonbIterator ** it, JsonbValue * val, JEntry * ent, * JsonbIteratorNext() worker: Return parent, while freeing memory for current * iterator */ -static JsonbIterator * -freeAndGetParent(JsonbIterator * it) +static JsonbIterator *freeAndGetParent(JsonbIterator *it) { JsonbIterator *v = it->parent; @@ -1558,8 +1477,7 @@ freeAndGetParent(JsonbIterator * it) /* * pushJsonbValue() worker: Iteration-like forming of Jsonb */ -static JsonbParseState * -pushState(JsonbParseState ** pstate) +static JsonbParseState *pushState(JsonbParseState **pstate) { JsonbParseState *ns = (JsonbParseState*)palloc(sizeof(JsonbParseState)); @@ -1570,8 +1488,7 @@ pushState(JsonbParseState ** pstate) /* * pushJsonbValue() worker: Append a pair key to state when generating a Jsonb */ -static void -appendKey(JsonbParseState * pstate, JsonbValue * string) +static void appendKey(JsonbParseState *pstate, JsonbValue *string) { JsonbValue *object = &pstate->contVal; @@ -1587,8 +1504,7 @@ appendKey(JsonbParseState * pstate, JsonbValue * string) if ((uint)object->object.nPairs >= pstate->size) { pstate->size *= 2; - object->object.pairs = (JsonbPair*)repalloc(object->object.pairs, - sizeof(JsonbPair) * pstate->size); + object->object.pairs = (JsonbPair*)repalloc(object->object.pairs, sizeof(JsonbPair) * pstate->size); } object->object.pairs[object->object.nPairs].key = *string; @@ -1601,8 +1517,7 @@ appendKey(JsonbParseState * pstate, JsonbValue * string) * pushJsonbValue() worker: Append a pair value to state when generating a * Jsonb */ -static void -appendValue(JsonbParseState * pstate, JsonbValue * scalarVal) +static void appendValue(JsonbParseState *pstate, JsonbValue *scalarVal) { JsonbValue *object = &pstate->contVal; @@ -1615,8 +1530,7 @@ appendValue(JsonbParseState * pstate, JsonbValue * scalarVal) /* * pushJsonbValue() worker: Append an element to state when generating a Jsonb */ -static void -appendElement(JsonbParseState * pstate, JsonbValue * scalarVal) +static void appendElement(JsonbParseState *pstate, JsonbValue *scalarVal) { JsonbValue *array = &pstate->contVal; @@ -1631,8 +1545,7 @@ appendElement(JsonbParseState * pstate, JsonbValue * scalarVal) if ((uint)array->array.nElems >= pstate->size) { pstate->size *= 2; - array->array.elems = (JsonbValue*)repalloc(array->array.elems, - sizeof(JsonbValue) * pstate->size); + array->array.elems = (JsonbValue*)repalloc(array->array.elems, sizeof(JsonbValue) * pstate->size); } array->array.elems[array->array.nElems++] = *scalarVal; @@ -1655,8 +1568,7 @@ appendElement(JsonbParseState * pstate, JsonbValue * scalarVal) * to true iff a and b have full binary equality, since some callers have an * interest in whether the two values are equal or merely equivalent. */ -static int -lengthCompareJsonbStringValue(const void *a, const void *b, void *binequal) +static int lengthCompareJsonbStringValue(const void *a, const void *b, void *binequal) { const JsonbValue *va = (const JsonbValue *) a; const JsonbValue *vb = (const JsonbValue *) b; @@ -1688,12 +1600,11 @@ lengthCompareJsonbStringValue(const void *a, const void *b, void *binequal) * * Pairs with equals keys are ordered such that the order field is respected. */ -static int -lengthCompareJsonbPair(const void *a, const void *b, void *binequal) +static int lengthCompareJsonbPair(const void *a, const void *b, void *binequal) { const JsonbPair *pa = (const JsonbPair *) a; const JsonbPair *pb = (const JsonbPair *) b; - int res; + int res; res = lengthCompareJsonbStringValue(&pa->key, &pb->key, binequal); @@ -1711,16 +1622,14 @@ lengthCompareJsonbPair(const void *a, const void *b, void *binequal) /* * Sort and unique-ify pairs in JsonbValue object */ -static void -uniqueifyJsonbObject(JsonbValue * object) +static void uniqueifyJsonbObject(JsonbValue * object) { - bool hasNonUniq = false; + bool hasNonUniq = false; Assert(object->type == jbvObject); if (object->object.nPairs > 1) { - qsort_arg(object->object.pairs, object->object.nPairs, sizeof(JsonbPair), - lengthCompareJsonbPair, &hasNonUniq); + qsort_arg(object->object.pairs, object->object.nPairs, sizeof(JsonbPair), lengthCompareJsonbPair, &hasNonUniq); } if (hasNonUniq) { @@ -1750,8 +1659,7 @@ uniqueifyJsonbObject(JsonbValue * object) * * Sorting uses internal ordering. */ -static void -uniqueifyJsonbArray(JsonbValue * array) +static void uniqueifyJsonbArray(JsonbValue *array) { bool hasNonUniq = false; diff --git a/src/common/backend/utils/adt/jsonfuncs.cpp b/src/common/backend/utils/adt/jsonfuncs.cpp index bb1c83f9f..925d0c4bd 100644 --- a/src/common/backend/utils/adt/jsonfuncs.cpp +++ b/src/common/backend/utils/adt/jsonfuncs.cpp @@ -1,16 +1,16 @@ -/*------------------------------------------------------------------------- +/* ------------------------------------------------------------------------- * * jsonfuncs.c * Functions to process JSON data types. * - * Portions Copyright (c) 2020 Huawei Technologies Co.,Ltd. + * Portions Copyright (c) 2021 Huawei Technologies Co.,Ltd. * Portions Copyright (c) 1996-2014, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * IDENTIFICATION * src/backend/utils/adt/jsonfuncs.c * - *------------------------------------------------------------------------- + * ------------------------------------------------------------------------- */ #include "postgres.h" @@ -51,8 +51,7 @@ static void get_scalar(void *state, char *token, JsonTokenType tokentype); /* common worker function for json getter functions */ static inline Datum get_path_all(FunctionCallInfo fcinfo, bool as_text); static inline text *get_worker(text *json, char *field, int elem_index, - char **tpath, int *ipath, int npath, - bool normalize_results); + char **tpath, int *ipath, int npath, bool normalize_results); static inline Datum get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text); /* semantic action functions for json_array_length */ @@ -84,8 +83,7 @@ static void elements_scalar(void *state, char *token, JsonTokenType tokentype); static HTAB *get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text); /* common worker for populate_record and to_record */ -static inline Datum populate_record_worker(FunctionCallInfo fcinfo, - bool have_record_arg); +static inline Datum populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg); /* semantic action functions for get_json_object_as_hash */ static void hash_object_field_start(void *state, char *fname, bool isnull); @@ -103,25 +101,19 @@ static void populate_recordset_array_start(void *state); static void populate_recordset_array_element_start(void *state, bool isnull); /* worker function for populate_recordset and to_recordset */ -static inline Datum populate_recordset_worker(FunctionCallInfo fcinfo, - bool have_record_arg); +static inline Datum populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg); /* Worker that takes care of common setup for us */ -static JsonbValue *findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader, - uint32 flags, - char *key, - uint32 keylen); +static JsonbValue *findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader, uint32 flags, char *key, uint32 keylen); /* search type classification for json_get* functions */ -typedef enum -{ +typedef enum { JSON_SEARCH_OBJECT = 1, JSON_SEARCH_ARRAY, JSON_SEARCH_PATH } JsonSearch; /* state for json_object_keys */ -typedef struct OkeysState -{ +typedef struct OkeysState { JsonLexContext *lex; char **result; int result_size; @@ -130,8 +122,7 @@ typedef struct OkeysState } OkeysState; /* state for json_get* functions */ -typedef struct GetState -{ +typedef struct GetState { JsonLexContext *lex; JsonSearch search_type; int search_index; @@ -151,52 +142,47 @@ typedef struct GetState } GetState; /* state for json_array_length */ -typedef struct AlenState -{ +typedef struct AlenState { JsonLexContext *lex; - int count; + int count; } AlenState; /* state for json_each */ -typedef struct EachState -{ - JsonLexContext *lex; +typedef struct EachState { + JsonLexContext *lex; Tuplestorestate *tuple_store; - TupleDesc ret_tdesc; - MemoryContext tmp_cxt; - char *result_start; - bool normalize_results; - bool next_scalar; - char *normalized_scalar; + TupleDesc ret_tdesc; + MemoryContext tmp_cxt; + char *result_start; + bool normalize_results; + bool next_scalar; + char *normalized_scalar; } EachState; /* state for json_array_elements */ -typedef struct ElementsState -{ - JsonLexContext *lex; +typedef struct ElementsState { + JsonLexContext *lex; Tuplestorestate *tuple_store; - TupleDesc ret_tdesc; - MemoryContext tmp_cxt; - char *result_start; - bool normalize_results; - bool next_scalar; - char *normalized_scalar; + TupleDesc ret_tdesc; + MemoryContext tmp_cxt; + char *result_start; + bool normalize_results; + bool next_scalar; + char *normalized_scalar; } ElementsState; /* state for get_json_object_as_hash */ -typedef struct JhashState -{ +typedef struct JhashState { JsonLexContext *lex; - HTAB *hash; - char *saved_scalar; - char *save_json_start; - bool use_json_as_text; - char *function_name; + HTAB *hash; + char *saved_scalar; + char *save_json_start; + bool use_json_as_text; + char *function_name; } JHashState; /* used to build the hashtable */ -typedef struct JsonHashEntry -{ +typedef struct JsonHashEntry { char fname[NAMEDATALEN]; char *val; char *json; @@ -204,40 +190,36 @@ typedef struct JsonHashEntry } JsonHashEntry; /* these two are stolen from hstore / record_out, used in populate_record* */ -typedef struct ColumnIOData -{ +typedef struct ColumnIOData { Oid column_type; Oid typiofunc; Oid typioparam; FmgrInfo proc; } ColumnIOData; -typedef struct RecordIOData -{ - Oid record_type; - int32 record_typmod; - int ncolumns; +typedef struct RecordIOData { + Oid record_type; + int32 record_typmod; + int ncolumns; ColumnIOData columns[1]; /* VARIABLE LENGTH ARRAY */ } RecordIOData; /* state for populate_recordset */ -typedef struct PopulateRecordsetState -{ - JsonLexContext *lex; - HTAB *json_hash; - char *saved_scalar; - char *save_json_start; - bool use_json_as_text; +typedef struct PopulateRecordsetState { + JsonLexContext *lex; + HTAB *json_hash; + char *saved_scalar; + char *save_json_start; + bool use_json_as_text; Tuplestorestate *tuple_store; - TupleDesc ret_tdesc; - HeapTupleHeader rec; - RecordIOData *my_extra; - MemoryContext fn_mcxt; /* used to stash IO funcs */ + TupleDesc ret_tdesc; + HeapTupleHeader rec; + RecordIOData *my_extra; + MemoryContext fn_mcxt; /* used to stash IO funcs */ } PopulateRecordsetState; /* Turn a jsonb object into a record */ -static void make_row_from_rec_and_jsonb(Jsonb * element, - PopulateRecordsetState *state); +static void make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state); /* * SQL function json_object_keys @@ -251,17 +233,16 @@ static void make_row_from_rec_and_jsonb(Jsonb * element, * limited in size to NAMEDATALEN and the number of keys is unlikely to * be so huge that it has major memory implications. */ -Datum -jsonb_object_keys(PG_FUNCTION_ARGS) +Datum jsonb_object_keys(PG_FUNCTION_ARGS) { FuncCallContext *funcctx = NULL; - OkeysState *state = NULL; + OkeysState *state = NULL; int i; if (SRF_IS_FIRSTCALL()) { MemoryContext oldcontext; - Jsonb *jb = PG_GETARG_JSONB(0); - bool skipNested = false; + Jsonb *jb = PG_GETARG_JSONB(0); + bool skipNested = false; JsonbIterator *it = NULL; JsonbValue v; int r; @@ -292,8 +273,7 @@ jsonb_object_keys(PG_FUNCTION_ARGS) skipNested = true; if (r == WJB_KEY) { - char *cstr = NULL; - + char *cstr = NULL; cstr = (char *)palloc(v.string.len + 1 * sizeof(char)); errno_t rc = memcpy_s(cstr, v.string.len + 1 * sizeof(char), v.string.val, v.string.len); securec_check(rc, "\0", "\0"); @@ -302,18 +282,14 @@ jsonb_object_keys(PG_FUNCTION_ARGS) } } - MemoryContextSwitchTo(oldcontext); funcctx->user_fctx = (void *) state; - } funcctx = SRF_PERCALL_SETUP(); state = (OkeysState *) funcctx->user_fctx; - if (state->sent_count < state->result_count) { char *nxt = state->result[state->sent_count++]; - SRF_RETURN_NEXT(funcctx, CStringGetTextDatum(nxt)); } @@ -327,18 +303,16 @@ jsonb_object_keys(PG_FUNCTION_ARGS) SRF_RETURN_DONE(funcctx); } - -Datum -json_object_keys(PG_FUNCTION_ARGS) +Datum json_object_keys(PG_FUNCTION_ARGS) { FuncCallContext *funcctx = NULL; OkeysState *state = NULL; int i; if (SRF_IS_FIRSTCALL()) { - text *json = PG_GETARG_TEXT_P(0); + text *json = PG_GETARG_TEXT_P(0); JsonLexContext *lex = makeJsonLexContext(json, true); - JsonSemAction *sem = NULL; + JsonSemAction *sem = NULL; MemoryContext oldcontext; @@ -359,10 +333,8 @@ json_object_keys(PG_FUNCTION_ARGS) sem->scalar = okeys_scalar; sem->object_field_start = okeys_object_field_start; /* remainder are all NULL, courtesy of palloc0 above */ - pg_parse_json(lex, sem); /* keys are now in state->result */ - pfree(lex->strval->data); pfree(lex->strval); pfree(lex); @@ -370,15 +342,12 @@ json_object_keys(PG_FUNCTION_ARGS) MemoryContextSwitchTo(oldcontext); funcctx->user_fctx = (void *) state; - } funcctx = SRF_PERCALL_SETUP(); state = (OkeysState *) funcctx->user_fctx; - if (state->sent_count < state->result_count) { char *nxt = state->result[state->sent_count++]; - SRF_RETURN_NEXT(funcctx, CStringGetTextDatum(nxt)); } @@ -392,8 +361,7 @@ json_object_keys(PG_FUNCTION_ARGS) SRF_RETURN_DONE(funcctx); } -static void -okeys_object_field_start(void *state, char *fname, bool isnull) +static void okeys_object_field_start(void *state, char *fname, bool isnull) { OkeysState *_state = (OkeysState *) state; @@ -405,16 +373,14 @@ okeys_object_field_start(void *state, char *fname, bool isnull) /* enlarge result array if necessary */ if (_state->result_count >= _state->result_size) { _state->result_size *= 2; - _state->result = - (char **)repalloc(_state->result, sizeof(char *) * _state->result_size); + _state->result = (char **)repalloc(_state->result, sizeof(char *) * _state->result_size); } /* save a copy of the field name */ _state->result[_state->result_count++] = pstrdup(fname); } -static void -okeys_array_start(void *state) +static void okeys_array_start(void *state) { OkeysState *_state = (OkeysState *) state; @@ -426,8 +392,7 @@ okeys_array_start(void *state) } } -static void -okeys_scalar(void *state, char *token, JsonTokenType tokentype) +static void okeys_scalar(void *state, char *token, JsonTokenType tokentype) { OkeysState *_state = (OkeysState *) state; @@ -444,10 +409,7 @@ okeys_scalar(void *state, char *token, JsonTokenType tokentype) * these implement the -> ->> #> and #>> operators * and the json{b?}_extract_path*(json, text, ...) functions */ - - -Datum -json_object_field(PG_FUNCTION_ARGS) +Datum json_object_field(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); text *result = NULL; @@ -463,8 +425,7 @@ json_object_field(PG_FUNCTION_ARGS) } } -Datum -jsonb_object_field(PG_FUNCTION_ARGS) +Datum jsonb_object_field(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); char *key = text_to_cstring(PG_GETARG_TEXT_P(1)); @@ -485,9 +446,7 @@ jsonb_object_field(PG_FUNCTION_ARGS) } Assert(JB_ROOT_IS_OBJECT(jb)); - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; @@ -502,12 +461,10 @@ jsonb_object_field(PG_FUNCTION_ARGS) } } } - PG_RETURN_NULL(); } -Datum -json_object_field_text(PG_FUNCTION_ARGS) +Datum json_object_field_text(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); text *result = NULL; @@ -516,15 +473,14 @@ json_object_field_text(PG_FUNCTION_ARGS) result = get_worker(json, fnamestr, -1, NULL, NULL, -1, true); - if (result != NULL) { + if (result != NULL) { PG_RETURN_TEXT_P(result); } else { PG_RETURN_NULL(); } } -Datum -jsonb_object_field_text(PG_FUNCTION_ARGS) +Datum jsonb_object_field_text(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); char *key = text_to_cstring(PG_GETARG_TEXT_P(1)); @@ -545,9 +501,7 @@ jsonb_object_field_text(PG_FUNCTION_ARGS) } Assert(JB_ROOT_IS_OBJECT(jb)); - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; @@ -572,7 +526,6 @@ jsonb_object_field_text(PG_FUNCTION_ARGS) } else { StringInfo jtext = makeStringInfo(); Jsonb *tjb = JsonbValueToJsonb(&v); - (void) JsonbToCString(jtext, VARDATA(tjb), -1); result = cstring_to_text_with_len(jtext->data, jtext->len); } @@ -584,12 +537,11 @@ jsonb_object_field_text(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } -Datum -json_array_element(PG_FUNCTION_ARGS) +Datum json_array_element(PG_FUNCTION_ARGS) { - text *json = PG_GETARG_TEXT_P(0); - text *result = NULL; - int element = PG_GETARG_INT32(1); + text *json = PG_GETARG_TEXT_P(0); + text *result = NULL; + int element = PG_GETARG_INT32(1); result = get_worker(json, NULL, element, NULL, NULL, -1, false); @@ -600,8 +552,7 @@ json_array_element(PG_FUNCTION_ARGS) } } -Datum -jsonb_array_element(PG_FUNCTION_ARGS) +Datum jsonb_array_element(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); int element = PG_GETARG_INT32(1); @@ -638,8 +589,7 @@ jsonb_array_element(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } -Datum -json_array_element_text(PG_FUNCTION_ARGS) +Datum json_array_element_text(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); text *result = NULL; @@ -654,8 +604,7 @@ json_array_element_text(PG_FUNCTION_ARGS) } } -Datum -jsonb_array_element_text(PG_FUNCTION_ARGS) +Datum jsonb_array_element_text(PG_FUNCTION_ARGS) { Jsonb *jb = PG_GETARG_JSONB(0); int element = PG_GETARG_INT32(1); @@ -665,7 +614,6 @@ jsonb_array_element_text(PG_FUNCTION_ARGS) bool skipNested = false; int element_number = 0; - if (JB_ROOT_IS_SCALAR(jb)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -677,12 +625,9 @@ jsonb_array_element_text(PG_FUNCTION_ARGS) } Assert(JB_ROOT_IS_ARRAY(jb)); - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; - if (r == WJB_ELEM) { if (element_number++ == element) { /* @@ -690,7 +635,6 @@ jsonb_array_element_text(PG_FUNCTION_ARGS) * otherwise just return the text */ text *result = NULL; - if (v.type == jbvString) { result = cstring_to_text_with_len(v.string.val, v.string.len); } else if (v.type == jbvNull) { @@ -698,7 +642,6 @@ jsonb_array_element_text(PG_FUNCTION_ARGS) } else { StringInfo jtext = makeStringInfo(); Jsonb *tjb = JsonbValueToJsonb(&v); - (void) JsonbToCString(jtext, VARDATA(tjb), -1); result = cstring_to_text_with_len(jtext->data, jtext->len); } @@ -710,14 +653,12 @@ jsonb_array_element_text(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } -Datum -json_extract_path(PG_FUNCTION_ARGS) +Datum json_extract_path(PG_FUNCTION_ARGS) { return get_path_all(fcinfo, false); } -Datum -json_extract_path_text(PG_FUNCTION_ARGS) +Datum json_extract_path_text(PG_FUNCTION_ARGS) { return get_path_all(fcinfo, true); } @@ -725,8 +666,7 @@ json_extract_path_text(PG_FUNCTION_ARGS) /* * common routine for extract_path functions */ -static inline Datum -get_path_all(FunctionCallInfo fcinfo, bool as_text) +static inline Datum get_path_all(FunctionCallInfo fcinfo, bool as_text) { text *json = NULL; ArrayType *path = PG_GETARG_ARRAYTYPE_P(1); @@ -748,7 +688,6 @@ get_path_all(FunctionCallInfo fcinfo, bool as_text) errmsg("cannot call function with null path elements"))); } - deconstruct_array(path, TEXTOID, -1, false, 'i', &pathtext, &pathnulls, &npath); /* * If the array is empty, return NULL; this is dubious but it's what 9.3 @@ -760,14 +699,12 @@ get_path_all(FunctionCallInfo fcinfo, bool as_text) tpath = (char **)palloc(npath * sizeof(char *)); ipath = (int *)palloc(npath * sizeof(int)); - for (i = 0; i < npath; i++) { tpath[i] = TextDatumGetCString(pathtext[i]); if (*tpath[i] == '\0') { - ereport( - ERROR, + ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("cannot call function with empty path elements"))); + errmsg("cannot call function with empty path elements"))); } /* @@ -783,7 +720,6 @@ get_path_all(FunctionCallInfo fcinfo, bool as_text) } } - result = get_worker(json, NULL, -1, tpath, ipath, npath, as_text); if (result != NULL) { @@ -799,18 +735,12 @@ get_path_all(FunctionCallInfo fcinfo, bool as_text) * * common worker for all the json getter functions */ -static inline text * -get_worker(text *json, - char *field, - int elem_index, - char **tpath, - int *ipath, - int npath, - bool normalize_results) +static inline text *get_worker(text *json, char *field, int elem_index, char **tpath, + int *ipath, int npath, bool normalize_results) { - GetState *state = NULL; + GetState *state = NULL; JsonLexContext *lex = makeJsonLexContext(json, true); - JsonSemAction *sem = NULL; + JsonSemAction *sem = NULL; /* only allowed to use one of these */ Assert(elem_index < 0 || (tpath == NULL && ipath == NULL && field == NULL)); @@ -836,14 +766,12 @@ get_worker(text *json, state->pathok[0] = true; state->array_level_index = (int *)palloc(sizeof(int) * npath); state->path_level_index = ipath; - } else { /* single integer argument */ state->search_type = JSON_SEARCH_ARRAY; state->search_index = elem_index; state->array_index = -1; } - sem->semstate = (void *) state; /* @@ -861,16 +789,13 @@ get_worker(text *json, sem->array_element_start = get_array_element_start; sem->array_element_end = get_array_element_end; } - pg_parse_json(lex, sem); - return state->tresult; } -static void -get_object_start(void *state) +static void get_object_start(void *state) { - GetState *_state = (GetState *) state; + GetState *_state = (GetState *) state; /* json structure check */ if (_state->lex->lex_level == 0 && _state->search_type == JSON_SEARCH_ARRAY) { @@ -880,8 +805,7 @@ get_object_start(void *state) } } -static void -get_object_field_start(void *state, char *fname, bool isnull) +static void get_object_field_start(void *state, char *fname, bool isnull) { GetState *_state = (GetState *) state; bool get_next = false; @@ -889,18 +813,15 @@ get_object_field_start(void *state, char *fname, bool isnull) if (lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT && strcmp(fname, _state->search_term) == 0) { - _state->tresult = NULL; _state->result_start = NULL; get_next = true; } else if (_state->search_type == JSON_SEARCH_PATH && - lex_level <= _state->npath && - _state->pathok[_state->lex->lex_level - 1] && - strcmp(fname, _state->path[lex_level - 1]) == 0) { + lex_level <= _state->npath && + _state->pathok[_state->lex->lex_level - 1] && + strcmp(fname, _state->path[lex_level - 1]) == 0) { /* path search, path so far is ok, and we have a match */ - /* this object overrides any previous matching object */ - _state->tresult = NULL; _state->result_start = NULL; @@ -916,8 +837,7 @@ get_object_field_start(void *state, char *fname, bool isnull) } if (get_next) { - if (_state->normalize_results && - _state->lex->token_type == JSON_TOKEN_STRING) { + if (_state->normalize_results && _state->lex->token_type == JSON_TOKEN_STRING) { /* for as_text variants, tell get_scalar to set it for us */ _state->next_scalar = true; } else { @@ -927,22 +847,19 @@ get_object_field_start(void *state, char *fname, bool isnull) } } -static void -get_object_field_end(void *state, char *fname, bool isnull) +static void get_object_field_end(void *state, char *fname, bool isnull) { GetState *_state = (GetState *) state; bool get_last = false; int lex_level = _state->lex->lex_level; - /* same tests as in get_object_field_start, mutatis mutandis */ - if (lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT && - strcmp(fname, _state->search_term) == 0) { + if (lex_level == 1 && _state->search_type == JSON_SEARCH_OBJECT && strcmp(fname, _state->search_term) == 0) { get_last = true; } else if (_state->search_type == JSON_SEARCH_PATH && - lex_level <= _state->npath && - _state->pathok[lex_level - 1] && - strcmp(fname, _state->path[lex_level - 1]) == 0) { + lex_level <= _state->npath && + _state->pathok[lex_level - 1] && + strcmp(fname, _state->path[lex_level - 1]) == 0) { /* done with this field so reset pathok */ if (lex_level < _state->npath) { _state->pathok[lex_level] = false; @@ -960,7 +877,7 @@ get_object_field_end(void *state, char *fname, bool isnull) * start up to the end of the previous token (the lexer is by now * ahead of us on whatever came after what we're interested in). */ - int len = _state->lex->prev_token_terminator - _state->result_start; + int len = _state->lex->prev_token_terminator - _state->result_start; if (isnull && _state->normalize_results) { _state->tresult = (text *) NULL; @@ -976,8 +893,7 @@ get_object_field_end(void *state, char *fname, bool isnull) */ } -static void -get_array_start(void *state) +static void get_array_start(void *state) { GetState *_state = (GetState *) state; int lex_level = _state->lex->lex_level; @@ -993,14 +909,12 @@ get_array_start(void *state) * initialize array count for this nesting level Note: the lex_level seen * by array_start is one less than that seen by the elements of the array. */ - if (_state->search_type == JSON_SEARCH_PATH && - lex_level < _state->npath) { + if (_state->search_type == JSON_SEARCH_PATH && lex_level < _state->npath) { _state->array_level_index[lex_level] = -1; } } -static void -get_array_element_start(void *state, bool isnull) +static void get_array_element_start(void *state, bool isnull) { GetState *_state = (GetState *) state; bool get_next = false; @@ -1013,8 +927,8 @@ get_array_element_start(void *state, bool isnull) get_next = true; } } else if (_state->search_type == JSON_SEARCH_PATH && - lex_level <= _state->npath && - _state->pathok[lex_level - 1]) { + lex_level <= _state->npath && + _state->pathok[lex_level - 1]) { /* * path search, path so far is ok * @@ -1023,9 +937,7 @@ get_array_element_start(void *state, bool isnull) * * then check if we have a match. */ - - if (++_state->array_level_index[lex_level - 1] == - _state->path_level_index[lex_level - 1]) { + if (++_state->array_level_index[lex_level - 1] == _state->path_level_index[lex_level - 1]) { if (lex_level == _state->npath) { /* match and at end of path, so get value */ get_next = true; @@ -1048,23 +960,21 @@ get_array_element_start(void *state, bool isnull) } } -static void -get_array_element_end(void *state, bool isnull) +static void get_array_element_end(void *state, bool isnull) { GetState *_state = (GetState *) state; bool get_last = false; int lex_level = _state->lex->lex_level; /* same logic as in get_object_end, modified for arrays */ - if (lex_level == 1 && _state->search_type == JSON_SEARCH_ARRAY && _state->array_index == _state->search_index) { get_last = true; } else if (_state->search_type == JSON_SEARCH_PATH && - lex_level <= _state->npath && - _state->pathok[lex_level - 1] && - _state->array_level_index[lex_level - 1] == - _state->path_level_index[lex_level - 1]) { + lex_level <= _state->npath && + _state->pathok[lex_level - 1] && + _state->array_level_index[lex_level - 1] == + _state->path_level_index[lex_level - 1]) { /* done with this element so reset pathok */ if (lex_level < _state->npath) { _state->pathok[lex_level] = false; @@ -1075,7 +985,7 @@ get_array_element_end(void *state, bool isnull) } } if (get_last && _state->result_start != NULL) { - int len = _state->lex->prev_token_terminator - _state->result_start; + int len = _state->lex->prev_token_terminator - _state->result_start; if (isnull && _state->normalize_results) { _state->tresult = (text *) NULL; @@ -1085,8 +995,7 @@ get_array_element_end(void *state, bool isnull) } } -static void -get_scalar(void *state, char *token, JsonTokenType tokentype) +static void get_scalar(void *state, char *token, JsonTokenType tokentype) { GetState *_state = (GetState *) state; @@ -1104,20 +1013,17 @@ get_scalar(void *state, char *token, JsonTokenType tokentype) } -Datum -jsonb_extract_path(PG_FUNCTION_ARGS) +Datum jsonb_extract_path(PG_FUNCTION_ARGS) { return get_jsonb_path_all(fcinfo, false); } -Datum -jsonb_extract_path_text(PG_FUNCTION_ARGS) +Datum jsonb_extract_path_text(PG_FUNCTION_ARGS) { return get_jsonb_path_all(fcinfo, true); } -static inline Datum -get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text) +static inline Datum get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text) { Jsonb *jb = PG_GETARG_JSONB(0); ArrayType *path = PG_GETARG_ARRAYTYPE_P(1); @@ -1156,9 +1062,7 @@ get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text) for (i = 0; i < npath; i++) { if (have_object) { - jbvp = findJsonbValueFromSuperHeaderLen(superHeader, - JB_FOBJECT, - VARDATA_ANY(pathtext[i]), + jbvp = findJsonbValueFromSuperHeaderLen(superHeader, JB_FOBJECT, VARDATA_ANY(pathtext[i]), VARSIZE_ANY_EXHDR(pathtext[i])); } else if (have_array) { long lindex; @@ -1212,9 +1116,7 @@ get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text) res = JsonbValueToJsonb(jbvp); if (as_text) { - PG_RETURN_TEXT_P(cstring_to_text(JsonbToCString(NULL, - VARDATA(res), - VARSIZE(res)))); + PG_RETURN_TEXT_P(cstring_to_text(JsonbToCString(NULL, VARDATA(res), VARSIZE(res)))); } else { /* not text mode - just hand back the jsonb */ PG_RETURN_JSONB(res); @@ -1224,8 +1126,7 @@ get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text) /* * SQL function json_array_length(json) -> int */ -Datum -json_array_length(PG_FUNCTION_ARGS) +Datum json_array_length(PG_FUNCTION_ARGS) { text *json = NULL; @@ -1235,13 +1136,9 @@ json_array_length(PG_FUNCTION_ARGS) json = PG_GETARG_TEXT_P(0); lex = makeJsonLexContext(json, false); - state = (AlenState *)palloc0(sizeof(AlenState)); + state = (AlenState *)palloc0(sizeof(AlenState)); sem = (JsonSemAction *)palloc0(sizeof(JsonSemAction)); - /* palloc0 does this for us */ -#if 0 - state->count = 0; -#endif state->lex = lex; sem->semstate = (void *) state; @@ -1254,10 +1151,9 @@ json_array_length(PG_FUNCTION_ARGS) PG_RETURN_INT32(state->count); } -Datum -jsonb_array_length(PG_FUNCTION_ARGS) +Datum jsonb_array_length(PG_FUNCTION_ARGS) { - Jsonb *jb = PG_GETARG_JSONB(0); + Jsonb *jb = PG_GETARG_JSONB(0); if (JB_ROOT_IS_SCALAR(jb)) { ereport(ERROR, @@ -1277,10 +1173,9 @@ jsonb_array_length(PG_FUNCTION_ARGS) * a scalar or an object). */ -static void -alen_object_start(void *state) +static void alen_object_start(void *state) { - AlenState *_state = (AlenState *) state; + AlenState *_state = (AlenState *) state; /* json structure check */ if (_state->lex->lex_level == 0) { @@ -1290,10 +1185,9 @@ alen_object_start(void *state) } } -static void -alen_scalar(void *state, char *token, JsonTokenType tokentype) +static void alen_scalar(void *state, char *token, JsonTokenType tokentype) { - AlenState *_state = (AlenState *) state; + AlenState *_state = (AlenState *) state; /* json structure check */ if (_state->lex->lex_level == 0) { @@ -1303,8 +1197,7 @@ alen_scalar(void *state, char *token, JsonTokenType tokentype) } } -static void -alen_array_element_start(void *state, bool isnull) +static void alen_array_element_start(void *state, bool isnull) { AlenState *_state = (AlenState *) state; @@ -1324,32 +1217,27 @@ alen_array_element_start(void *state, bool isnull) * The construction of tuples is done using a temporary memory context * that is cleared out after each tuple is built. */ -Datum -json_each(PG_FUNCTION_ARGS) +Datum json_each(PG_FUNCTION_ARGS) { return each_worker(fcinfo, false); } -Datum -jsonb_each(PG_FUNCTION_ARGS) +Datum jsonb_each(PG_FUNCTION_ARGS) { return each_worker_jsonb(fcinfo, false); } -Datum -json_each_text(PG_FUNCTION_ARGS) +Datum json_each_text(PG_FUNCTION_ARGS) { return each_worker(fcinfo, true); } -Datum -jsonb_each_text(PG_FUNCTION_ARGS) +Datum jsonb_each_text(PG_FUNCTION_ARGS) { return each_worker_jsonb(fcinfo, true); } -static inline Datum -each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) +static inline Datum each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) { Jsonb *jb = PG_GETARG_JSONB(0); ReturnSetInfo *rsi = NULL; @@ -1395,8 +1283,7 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(ret_tdesc); tuple_store = - tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, - false, u_sess->attr.attr_memory.work_mem); + tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, false, u_sess->attr.attr_memory.work_mem); MemoryContextSwitchTo(old_cxt); @@ -1406,12 +1293,9 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; - if (r == WJB_KEY) { text *key = NULL; HeapTuple tuple; @@ -1420,7 +1304,6 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) /* Use the tmp context so we can clean up after each tuple is done */ old_cxt = MemoryContextSwitchTo(tmp_cxt); - key = cstring_to_text_with_len(v.string.val, v.string.len); /* @@ -1428,16 +1311,14 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) * matter what shape it is. */ r = JsonbIteratorNext(&it, &v, skipNested); - values[0] = PointerGetDatum(key); - if (as_text) { if (v.type == jbvNull) { /* a json null is an sql null in text mode */ nulls[1] = true; values[1] = (Datum) NULL; } else { - text *sv = NULL; + text *sv = NULL; if (v.type == jbvString) { /* In text mode, scalar strings should be dequoted */ @@ -1446,22 +1327,17 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) /* Turn anything else into a json string */ StringInfo jtext = makeStringInfo(); Jsonb *jb = JsonbValueToJsonb(&v); - (void) JsonbToCString(jtext, VARDATA(jb), 2 * v.estSize); sv = cstring_to_text_with_len(jtext->data, jtext->len); } - values[1] = PointerGetDatum(sv); } } else { /* Not in text mode, just return the Jsonb */ Jsonb *val = JsonbValueToJsonb(&v); - values[1] = PointerGetDatum(val); } - tuple = heap_form_tuple(ret_tdesc, values, nulls); - tuplestore_puttuple(tuple_store, tuple); /* clean up and switch back */ @@ -1469,33 +1345,27 @@ each_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) MemoryContextReset(tmp_cxt); } } - MemoryContextDelete(tmp_cxt); - rsi->setResult = tuple_store; rsi->setDesc = ret_tdesc; - PG_RETURN_NULL(); } -static inline Datum -each_worker(FunctionCallInfo fcinfo, bool as_text) +static inline Datum each_worker(FunctionCallInfo fcinfo, bool as_text) { - text *json = NULL; + text *json = NULL; JsonLexContext *lex = NULL; - JsonSemAction *sem = NULL; - ReturnSetInfo *rsi = NULL; - MemoryContext old_cxt; - TupleDesc tupdesc; - EachState *state = NULL; + JsonSemAction *sem = NULL; + ReturnSetInfo *rsi = NULL; + MemoryContext old_cxt; + TupleDesc tupdesc; + EachState *state = NULL; json = PG_GETARG_TEXT_P(0); - lex = makeJsonLexContext(json, true); state = (EachState *)palloc0(sizeof(EachState)); sem = (JsonSemAction *)palloc0(sizeof(JsonSemAction)); - rsi = (ReturnSetInfo *) fcinfo->resultinfo; if (!rsi || !IsA(rsi, ReturnSetInfo) || @@ -1507,20 +1377,15 @@ each_worker(FunctionCallInfo fcinfo, bool as_text) "cannot accept a set"))); } - rsi->returnMode = SFRM_Materialize; - (void) get_call_result_type(fcinfo, NULL, &tupdesc); /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); - state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); state->tuple_store = - tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, - false, u_sess->attr.attr_memory.work_mem); - + tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, false, u_sess->attr.attr_memory.work_mem); MemoryContextSwitchTo(old_cxt); sem->semstate = (void *) state; @@ -1528,30 +1393,22 @@ each_worker(FunctionCallInfo fcinfo, bool as_text) sem->scalar = each_scalar; sem->object_field_start = each_object_field_start; sem->object_field_end = each_object_field_end; - state->normalize_results = as_text; state->next_scalar = false; - state->lex = lex; state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, "json_each temporary cxt", ALLOCSET_DEFAULT_MINSIZE, ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); - pg_parse_json(lex, sem); - MemoryContextDelete(state->tmp_cxt); - rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; - PG_RETURN_NULL(); } - -static void -each_object_field_start(void *state, char *fname, bool isnull) +static void each_object_field_start(void *state, char *fname, bool isnull) { EachState *_state = (EachState *) state; @@ -1570,8 +1427,7 @@ each_object_field_start(void *state, char *fname, bool isnull) } } -static void -each_object_field_end(void *state, char *fname, bool isnull) +static void each_object_field_end(void *state, char *fname, bool isnull) { EachState *_state = (EachState *) state; MemoryContext old_cxt; @@ -1603,9 +1459,7 @@ each_object_field_end(void *state, char *fname, bool isnull) values[1] = PointerGetDatum(val); } - tuple = heap_form_tuple(_state->ret_tdesc, values, nulls); - tuplestore_puttuple(_state->tuple_store, tuple); /* clean up and switch back */ @@ -1613,8 +1467,7 @@ each_object_field_end(void *state, char *fname, bool isnull) MemoryContextReset(_state->tmp_cxt); } -static void -each_array_start(void *state) +static void each_array_start(void *state) { EachState *_state = (EachState *) state; @@ -1626,8 +1479,7 @@ each_array_start(void *state) } } -static void -each_scalar(void *state, char *token, JsonTokenType tokentype) +static void each_scalar(void *state, char *token, JsonTokenType tokentype) { EachState *_state = (EachState *) state; @@ -1651,33 +1503,29 @@ each_scalar(void *state, char *token, JsonTokenType tokentype) * * a lot of this processing is similar to the json_each* functions */ - -Datum -jsonb_array_elements(PG_FUNCTION_ARGS) +Datum jsonb_array_elements(PG_FUNCTION_ARGS) { return elements_worker_jsonb(fcinfo, false); } -Datum -jsonb_array_elements_text(PG_FUNCTION_ARGS) +Datum jsonb_array_elements_text(PG_FUNCTION_ARGS) { return elements_worker_jsonb(fcinfo, true); } -static inline Datum -elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) +static inline Datum elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) { - Jsonb *jb = PG_GETARG_JSONB(0); - ReturnSetInfo *rsi = NULL; + Jsonb *jb = PG_GETARG_JSONB(0); + ReturnSetInfo *rsi = NULL; Tuplestorestate *tuple_store = NULL; - TupleDesc tupdesc; - TupleDesc ret_tdesc; - MemoryContext old_cxt, - tmp_cxt; - bool skipNested = false; - JsonbIterator *it = NULL; - JsonbValue v; - int r; + TupleDesc tupdesc; + TupleDesc ret_tdesc; + MemoryContext old_cxt, + tmp_cxt; + bool skipNested = false; + JsonbIterator *it = NULL; + JsonbValue v; + int r; if (JB_ROOT_IS_SCALAR(jb)) { ereport(ERROR, @@ -1690,7 +1538,6 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) } rsi = (ReturnSetInfo *) fcinfo->resultinfo; - if (!rsi || !IsA(rsi, ReturnSetInfo) || (rsi->allowedModes & SFRM_Materialize) == 0 || rsi->expectedDesc == NULL) { @@ -1700,20 +1547,14 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) "cannot accept a set"))); } - rsi->returnMode = SFRM_Materialize; - /* it's a simple type, so don't use get_call_result_type() */ tupdesc = rsi->expectedDesc; - old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); - ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(ret_tdesc); tuple_store = - tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, - false, u_sess->attr.attr_memory.work_mem); - + tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, false, u_sess->attr.attr_memory.work_mem); MemoryContextSwitchTo(old_cxt); tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, @@ -1721,33 +1562,25 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) ALLOCSET_DEFAULT_MINSIZE, ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); - - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; - if (r == WJB_ELEM) { HeapTuple tuple; Datum values[1]; bool nulls[1] = {false}; - /* use the tmp context so we can clean up after each tuple is done */ old_cxt = MemoryContextSwitchTo(tmp_cxt); - if (!as_text) { Jsonb *val = JsonbValueToJsonb(&v); - values[0] = PointerGetDatum(val); - } else{ + } else { if (v.type == jbvNull) { /* a json null is an sql null in text mode */ nulls[0] = true; values[0] = (Datum) NULL; } else { text *sv = NULL; - if (v.type == jbvString) { /* in text mode scalar strings should be dequoted */ sv = cstring_to_text_with_len(v.string.val, v.string.len); @@ -1755,17 +1588,14 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) /* turn anything else into a json string */ StringInfo jtext = makeStringInfo(); Jsonb *jb = JsonbValueToJsonb(&v); - (void) JsonbToCString(jtext, VARDATA(jb), 2 * v.estSize); sv = cstring_to_text_with_len(jtext->data, jtext->len); } - values[0] = PointerGetDatum(sv); } } tuple = heap_form_tuple(ret_tdesc, values, nulls); - tuplestore_puttuple(tuple_store, tuple); /* clean up and switch back */ @@ -1773,7 +1603,6 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) MemoryContextReset(tmp_cxt); } } - MemoryContextDelete(tmp_cxt); rsi->setResult = tuple_store; @@ -1782,20 +1611,17 @@ elements_worker_jsonb(FunctionCallInfo fcinfo, bool as_text) PG_RETURN_NULL(); } -Datum -json_array_elements(PG_FUNCTION_ARGS) +Datum json_array_elements(PG_FUNCTION_ARGS) { return elements_worker(fcinfo, false); } -Datum -json_array_elements_text(PG_FUNCTION_ARGS) +Datum json_array_elements_text(PG_FUNCTION_ARGS) { return elements_worker(fcinfo, true); } -static inline Datum -elements_worker(FunctionCallInfo fcinfo, bool as_text) +static inline Datum elements_worker(FunctionCallInfo fcinfo, bool as_text) { text *json = PG_GETARG_TEXT_P(0); @@ -1809,7 +1635,6 @@ elements_worker(FunctionCallInfo fcinfo, bool as_text) state = (ElementsState *)palloc0(sizeof(ElementsState)); sem = (JsonSemAction *)palloc0(sizeof(JsonSemAction)); - rsi = (ReturnSetInfo *) fcinfo->resultinfo; if (!rsi || !IsA(rsi, ReturnSetInfo) || @@ -1820,7 +1645,6 @@ elements_worker(FunctionCallInfo fcinfo, bool as_text) errmsg("set-valued function called in context that " "cannot accept a set"))); } - rsi->returnMode = SFRM_Materialize; /* it's a simple type, so don't use get_call_result_type() */ @@ -1828,13 +1652,11 @@ elements_worker(FunctionCallInfo fcinfo, bool as_text) /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); - state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, false, u_sess->attr.attr_memory.work_mem); - MemoryContextSwitchTo(old_cxt); sem->semstate = (void *) state; @@ -1842,29 +1664,23 @@ elements_worker(FunctionCallInfo fcinfo, bool as_text) sem->scalar = elements_scalar; sem->array_element_start = elements_array_element_start; sem->array_element_end = elements_array_element_end; - state->normalize_results = as_text; state->next_scalar = false; - state->lex = lex; state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, - "json_array_elements temporary cxt", + "json_array_elements temporary cxt", ALLOCSET_DEFAULT_MINSIZE, ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); - pg_parse_json(lex, sem); - MemoryContextDelete(state->tmp_cxt); rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; - PG_RETURN_NULL(); } -static void -elements_array_element_start(void *state, bool isnull) +static void elements_array_element_start(void *state, bool isnull) { ElementsState *_state = (ElementsState *) state; @@ -1883,8 +1699,7 @@ elements_array_element_start(void *state, bool isnull) } } -static void -elements_array_element_end(void *state, bool isnull) +static void elements_array_element_end(void *state, bool isnull) { ElementsState *_state = (ElementsState *) state; MemoryContext old_cxt; @@ -1914,9 +1729,7 @@ elements_array_element_end(void *state, bool isnull) values[0] = PointerGetDatum(val); } - tuple = heap_form_tuple(_state->ret_tdesc, values, nulls); - tuplestore_puttuple(_state->tuple_store, tuple); /* clean up and switch back */ @@ -1924,8 +1737,7 @@ elements_array_element_end(void *state, bool isnull) MemoryContextReset(_state->tmp_cxt); } -static void -elements_object_start(void *state) +static void elements_object_start(void *state) { ElementsState *_state = (ElementsState *) state; @@ -1937,8 +1749,7 @@ elements_object_start(void *state) } } -static void -elements_scalar(void *state, char *token, JsonTokenType tokentype) +static void elements_scalar(void *state, char *token, JsonTokenType tokentype) { ElementsState *_state = (ElementsState *) state; @@ -1967,26 +1778,22 @@ elements_scalar(void *state, char *token, JsonTokenType tokentype) * field in the record is then looked up by name. For jsonb * we fetch the values direct from the object. */ -Datum -jsonb_populate_record(PG_FUNCTION_ARGS) +Datum jsonb_populate_record(PG_FUNCTION_ARGS) { return populate_record_worker(fcinfo, true); } -Datum -json_populate_record(PG_FUNCTION_ARGS) +Datum json_populate_record(PG_FUNCTION_ARGS) { return populate_record_worker(fcinfo, true); } -Datum -json_to_record(PG_FUNCTION_ARGS) +Datum json_to_record(PG_FUNCTION_ARGS) { return populate_record_worker(fcinfo, false); } -static inline Datum -populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) +static inline Datum populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) { Oid argtype; Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, have_record_arg ? 1 : 0); @@ -2009,8 +1816,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) Assert(jtype == JSONOID || jtype == JSONBOID); - use_json_as_text = PG_ARGISNULL(have_record_arg ? 2 : 1) ? false : - PG_GETARG_BOOL(have_record_arg ? 2 : 1); + use_json_as_text = PG_ARGISNULL(have_record_arg ? 2 : 1) ? false : PG_GETARG_BOOL(have_record_arg ? 2 : 1); if (have_record_arg) { argtype = get_fn_expr_argtype(fcinfo->flinfo, 0); @@ -2018,14 +1824,14 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (!type_is_rowtype(argtype)) { ereport(ERROR, (errcode(ERRCODE_DATATYPE_MISMATCH), - errmsg("first argument of json%s_populate_record must be a row type", jtype == JSONBOID ? "b" : ""))); + errmsg("first argument of json%s_populate_record must be a row type", + jtype == JSONBOID ? "b" : ""))); } if (PG_ARGISNULL(0)) { if (PG_ARGISNULL(1)) { PG_RETURN_NULL(); } - /* * have no tuple to look at, so the only source of type info is * the argtype. The lookup_rowtype_tupdesc call below will error @@ -2035,25 +1841,19 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) tupTypmod = -1; } else { rec = PG_GETARG_HEAPTUPLEHEADER(0); - if (PG_ARGISNULL(1)) { PG_RETURN_POINTER(rec); } - /* Extract type info from the tuple itself */ tupType = HeapTupleHeaderGetTypeId(rec); tupTypmod = HeapTupleHeaderGetTypMod(rec); } - tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod); } else { /* json{b}_to_record case */ - use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1); - if (PG_ARGISNULL(0)) { PG_RETURN_NULL(); } - if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE) { ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), @@ -2067,9 +1867,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (jtype == JSONOID) { /* just get the text */ json = PG_GETARG_TEXT_P(have_record_arg ? 1 : 0); - json_hash = get_json_object_as_hash(json, "json_populate_record", use_json_as_text); - /* * if the input json is empty, we can only skip the rest if we were * passed in a non-null record, since otherwise there may be issues @@ -2078,18 +1876,14 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (hash_get_num_entries(json_hash) == 0 && rec) { PG_RETURN_POINTER(rec); } - } else { jb = PG_GETARG_JSONB(have_record_arg ? 1 : 0); - /* same logic as for json */ if (!have_record_arg && rec) { PG_RETURN_POINTER(rec); } } - ncolumns = tupdesc->natts; - if (rec) { /* Build a temporary HeapTuple control structure */ tuple.t_len = HeapTupleHeaderGetDatumLength(rec); @@ -2117,7 +1911,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (have_record_arg && (my_extra->record_type != tupType || my_extra->record_typmod != tupTypmod)) { rc = memset_s(my_extra, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData), - 0, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); + 0, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); securec_check(rc, "\0", "\0"); my_extra->record_type = tupType; my_extra->record_typmod = tupTypmod; @@ -2151,7 +1945,6 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) } if (jtype == JSONOID) { - rc = memset_s(fname, NAMEDATALEN, 0, NAMEDATALEN); securec_check(rc, "\0", "\0"); rc = strncpy_s(fname, NAMEDATALEN, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN - 1); @@ -2159,9 +1952,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) hashentry = (JsonHashEntry *)hash_search(json_hash, fname, HASH_FIND, NULL); } else { char *key = NameStr(tupdesc->attrs[i]->attname); - - v = findJsonbValueFromSuperHeaderLen(VARDATA(jb), JB_FOBJECT, key, - strlen(key)); + v = findJsonbValueFromSuperHeaderLen(VARDATA(jb), JB_FOBJECT, key, strlen(key)); } /* @@ -2180,12 +1971,9 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) /* * Prepare to convert the column value from text */ - if (column_info->column_type != column_type){ - getTypeInputInfo(column_type, - &column_info->typiofunc, - &column_info->typioparam); - fmgr_info_cxt(column_info->typiofunc, &column_info->proc, - fcinfo->flinfo->fn_mcxt); + if (column_info->column_type != column_type) { + getTypeInputInfo(column_type, &column_info->typiofunc, &column_info->typioparam); + fmgr_info_cxt(column_info->typiofunc, &column_info->proc, fcinfo->flinfo->fn_mcxt); column_info->column_type = column_type; } if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) || @@ -2194,8 +1982,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) * need InputFunctionCall to happen even for nulls, so that domain * checks are done */ - values[i] = InputFunctionCall(&column_info->proc, NULL, - column_info->typioparam, + values[i] = InputFunctionCall(&column_info->proc, NULL, column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = true; } else { @@ -2210,8 +1997,7 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) } else if (v->type == jbvBool) { s = pnstrdup((v->boolean) ? "t" : "f", 1); } else if (v->type == jbvNumeric) { - s = DatumGetCString(DirectFunctionCall1(numeric_out, - PointerGetDatum(v->numeric))); + s = DatumGetCString(DirectFunctionCall1(numeric_out, PointerGetDatum(v->numeric))); } else if (!use_json_as_text) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -2224,16 +2010,13 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) } values[i] = InputFunctionCall(&column_info->proc, s, - column_info->typioparam, - tupdesc->attrs[i]->atttypmod); + column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = false; } } rettuple = heap_form_tuple(tupdesc, values, nulls); - ReleaseTupleDesc(tupdesc); - PG_RETURN_DATUM(HeapTupleGetDatum(rettuple)); } @@ -2248,24 +2031,20 @@ populate_record_worker(FunctionCallInfo fcinfo, bool have_record_arg) * funcname argument allows caller to pass in its name for use in * error messages. */ -static HTAB * -get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text) +static HTAB *get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text) { - HASHCTL ctl; - HTAB *tab = NULL; - JHashState *state = NULL; + HASHCTL ctl; + HTAB *tab = NULL; + JHashState *state = NULL; JsonLexContext *lex = makeJsonLexContext(json, true); - JsonSemAction *sem = NULL; + JsonSemAction *sem = NULL; errno_t rc = memset_s(&ctl, sizeof(ctl), 0, sizeof(ctl)); securec_check(rc, "\0", "\0"); ctl.keysize = NAMEDATALEN; ctl.entrysize = sizeof(JsonHashEntry); ctl.hcxt = CurrentMemoryContext; - tab = hash_create("json object hashtable", - 100, - &ctl, - HASH_ELEM | HASH_CONTEXT); + tab = hash_create("json object hashtable", 100, &ctl, HASH_ELEM | HASH_CONTEXT); state = (JHashState *)palloc0(sizeof(JHashState)); sem = (JsonSemAction *)palloc0(sizeof(JsonSemAction)); @@ -2286,8 +2065,7 @@ get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text) return tab; } -static void -hash_object_field_start(void *state, char *fname, bool isnull) +static void hash_object_field_start(void *state, char *fname, bool isnull) { JHashState *_state = (JHashState *) state; @@ -2295,8 +2073,7 @@ hash_object_field_start(void *state, char *fname, bool isnull) return; } - if (_state->lex->token_type == JSON_TOKEN_ARRAY_START || - _state->lex->token_type == JSON_TOKEN_OBJECT_START) { + if (_state->lex->token_type == JSON_TOKEN_ARRAY_START || _state->lex->token_type == JSON_TOKEN_OBJECT_START) { if (!_state->use_json_as_text) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -2310,13 +2087,12 @@ hash_object_field_start(void *state, char *fname, bool isnull) } } -static void -hash_object_field_end(void *state, char *fname, bool isnull) +static void hash_object_field_end(void *state, char *fname, bool isnull) { - JHashState *_state = (JHashState *) state; + JHashState *_state = (JHashState *) state; JsonHashEntry *hashentry = NULL; - bool found; - char name[NAMEDATALEN]; + bool found; + char name[NAMEDATALEN]; /* * ignore field names >= NAMEDATALEN - they can't match a record field @@ -2340,8 +2116,8 @@ hash_object_field_end(void *state, char *fname, bool isnull) hashentry->isnull = isnull; if (_state->save_json_start != NULL) { - int len = _state->lex->prev_token_terminator - _state->save_json_start; - char *val = (char *)palloc((len + 1) * sizeof(char)); + int len = _state->lex->prev_token_terminator - _state->save_json_start; + char *val = (char *)palloc((len + 1) * sizeof(char)); rc = memcpy_s(val, (len + 1) * sizeof(char), _state->save_json_start, len); securec_check(rc, "\0", "\0"); @@ -2353,27 +2129,25 @@ hash_object_field_end(void *state, char *fname, bool isnull) } } -static void -hash_array_start(void *state) +static void hash_array_start(void *state) { JHashState *_state = (JHashState *) state; if (_state->lex->lex_level == 0) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("cannot call %s on an array", _state->function_name))); + errmsg("cannot call %s on an array", _state->function_name))); } } -static void -hash_scalar(void *state, char *token, JsonTokenType tokentype) +static void hash_scalar(void *state, char *token, JsonTokenType tokentype) { JHashState *_state = (JHashState *) state; if (_state->lex->lex_level == 0) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("cannot call %s on a scalar", _state->function_name))); + errmsg("cannot call %s on a scalar", _state->function_name))); } if (_state->lex->lex_level == 1) { @@ -2392,30 +2166,27 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype) * is pushed down into the semantic action handlers so it's done * per object in the array. */ -Datum -jsonb_populate_recordset(PG_FUNCTION_ARGS) +Datum jsonb_populate_recordset(PG_FUNCTION_ARGS) { return populate_recordset_worker(fcinfo, true); } -static void -make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state) +static void make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state) { - Datum *values = NULL; - bool *nulls = NULL; - int i; + Datum *values = NULL; + bool *nulls = NULL; + int i; RecordIOData *my_extra = state->my_extra; - int ncolumns = my_extra->ncolumns; - TupleDesc tupdesc = state->ret_tdesc; + int ncolumns = my_extra->ncolumns; + TupleDesc tupdesc = state->ret_tdesc; HeapTupleHeader rec = state->rec; - HeapTuple rettuple; + HeapTuple rettuple; values = (Datum *) palloc(ncolumns * sizeof(Datum)); nulls = (bool *) palloc(ncolumns * sizeof(bool)); if (state->rec) { HeapTupleData tuple; - /* Build a temporary HeapTuple control structure */ tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec); ItemPointerSetInvalid(&(tuple.t_self)); @@ -2433,20 +2204,17 @@ make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state) for (i = 0; i < ncolumns; ++i) { ColumnIOData *column_info = &my_extra->columns[i]; - Oid column_type = tupdesc->attrs[i]->atttypid; - JsonbValue *v = NULL; - char *key = NULL; + Oid column_type = tupdesc->attrs[i]->atttypid; + JsonbValue *v = NULL; + char *key = NULL; /* Ignore dropped columns in datatype */ if (tupdesc->attrs[i]->attisdropped) { nulls[i] = true; continue; } - key = NameStr(tupdesc->attrs[i]->attname); - - v = findJsonbValueFromSuperHeaderLen(VARDATA(element), JB_FOBJECT, - key, strlen(key)); + v = findJsonbValueFromSuperHeaderLen(VARDATA(element), JB_FOBJECT, key, strlen(key)); /* * We can't just skip here if the key wasn't found since we might have @@ -2464,11 +2232,8 @@ make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state) * Prepare to convert the column value from text */ if (column_info->column_type != column_type) { - getTypeInputInfo(column_type, - &column_info->typiofunc, - &column_info->typioparam); - fmgr_info_cxt(column_info->typiofunc, &column_info->proc, - state->fn_mcxt); + getTypeInputInfo(column_type, &column_info->typiofunc, &column_info->typioparam); + fmgr_info_cxt(column_info->typiofunc, &column_info->proc, state->fn_mcxt); column_info->column_type = column_type; } if (v == NULL || v->type == jbvNull) { @@ -2476,20 +2241,18 @@ make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state) * Need InputFunctionCall to happen even for nulls, so that domain * checks are done */ - values[i] = InputFunctionCall(&column_info->proc, NULL, - column_info->typioparam, + values[i] = InputFunctionCall(&column_info->proc, NULL, column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = true; } else { - char *s = NULL; + char *s = NULL; if (v->type == jbvString) { s = pnstrdup(v->string.val, v->string.len); } else if (v->type == jbvBool) { s = pnstrdup((v->boolean) ? "t" : "f", 1); } else if (v->type == jbvNumeric) { - s = DatumGetCString(DirectFunctionCall1(numeric_out, - PointerGetDatum(v->numeric))); + s = DatumGetCString(DirectFunctionCall1(numeric_out, PointerGetDatum(v->numeric))); } else if (!state->use_json_as_text) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -2500,26 +2263,21 @@ make_row_from_rec_and_jsonb(Jsonb * element, PopulateRecordsetState *state) elog(ERROR, "invalid jsonb type"); } - values[i] = InputFunctionCall(&column_info->proc, s, - column_info->typioparam, - tupdesc->attrs[i]->atttypmod); + values[i] = InputFunctionCall(&column_info->proc, s, column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = false; } } rettuple = heap_form_tuple(tupdesc, values, nulls); - tuplestore_puttuple(state->tuple_store, rettuple); } -Datum -json_populate_recordset(PG_FUNCTION_ARGS) +Datum json_populate_recordset(PG_FUNCTION_ARGS) { return populate_recordset_worker(fcinfo, true); } -Datum -json_to_recordset(PG_FUNCTION_ARGS) +Datum json_to_recordset(PG_FUNCTION_ARGS) { return populate_recordset_worker(fcinfo, false); } @@ -2527,27 +2285,24 @@ json_to_recordset(PG_FUNCTION_ARGS) /* * common worker for json_populate_recordset() and json_to_recordset() */ -static inline Datum -populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) +static inline Datum populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) { - Oid argtype; - Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, have_record_arg ? 1 : 0); - bool use_json_as_text; + Oid argtype; + Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, have_record_arg ? 1 : 0); + bool use_json_as_text; ReturnSetInfo *rsi = NULL; - MemoryContext old_cxt; - Oid tupType; - int32 tupTypmod; + MemoryContext old_cxt; + Oid tupType; + int32 tupTypmod; HeapTupleHeader rec; - TupleDesc tupdesc; - RecordIOData *my_extra = NULL; - int ncolumns; + TupleDesc tupdesc; + RecordIOData *my_extra = NULL; + int ncolumns; PopulateRecordsetState *state = NULL; if (have_record_arg) { argtype = get_fn_expr_argtype(fcinfo->flinfo, 0); - use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2); - if (!type_is_rowtype(argtype)) { ereport(ERROR, (errcode(ERRCODE_DATATYPE_MISMATCH), @@ -2555,12 +2310,10 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) } } else { argtype = InvalidOid; - use_json_as_text = PG_ARGISNULL(1) ? false : PG_GETARG_BOOL(1); } rsi = (ReturnSetInfo *) fcinfo->resultinfo; - if (!rsi || !IsA(rsi, ReturnSetInfo) || (rsi->allowedModes & SFRM_Materialize) == 0 || rsi->expectedDesc == NULL) { @@ -2569,10 +2322,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) errmsg("set-valued function called in context that " "cannot accept a set"))); } - - rsi->returnMode = SFRM_Materialize; - /* * get the tupdesc from the result set info - it must be a record type * because we already checked that arg1 is a record type, or we're in a @@ -2590,7 +2340,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (PG_ARGISNULL(1)) { PG_RETURN_NULL(); } - if (PG_ARGISNULL(0)) { rec = NULL; } else { @@ -2600,10 +2349,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) if (PG_ARGISNULL(1)) { PG_RETURN_NULL(); } - rec = NULL; } - tupType = tupdesc->tdtypeid; tupTypmod = tupdesc->tdtypmod; ncolumns = tupdesc->natts; @@ -2613,20 +2360,15 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) * calls, assuming the record type doesn't change underneath us. */ my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra; - if (my_extra == NULL || - my_extra->ncolumns != ncolumns) { - fcinfo->flinfo->fn_extra = - MemoryContextAlloc(fcinfo->flinfo->fn_mcxt, - sizeof(RecordIOData) - sizeof(ColumnIOData) - + ncolumns * sizeof(ColumnIOData)); + if (my_extra == NULL || my_extra->ncolumns != ncolumns) { + fcinfo->flinfo->fn_extra = MemoryContextAlloc(fcinfo->flinfo->fn_mcxt, + sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra; my_extra->record_type = InvalidOid; my_extra->record_typmod = 0; } - if (my_extra->record_type != tupType || - my_extra->record_typmod != tupTypmod) { - + if (my_extra->record_type != tupType || my_extra->record_typmod != tupTypmod) { errno_t rc = memset_s(my_extra, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData), 0, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); securec_check(rc, "\0", "\0"); @@ -2634,15 +2376,12 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) my_extra->record_typmod = tupTypmod; my_extra->ncolumns = ncolumns; } - state = (PopulateRecordsetState *)palloc0(sizeof(PopulateRecordsetState)); - /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); - state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & - SFRM_Materialize_Random, + state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random, false, u_sess->attr.attr_memory.work_mem); MemoryContextSwitchTo(old_cxt); @@ -2652,14 +2391,12 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) state->fn_mcxt = fcinfo->flinfo->fn_mcxt; if (jtype == JSONOID) { - text *json = PG_GETARG_TEXT_P(have_record_arg ? 1 : 0); + text *json = PG_GETARG_TEXT_P(have_record_arg ? 1 : 0); JsonLexContext *lex = NULL; - JsonSemAction *sem = NULL; + JsonSemAction *sem = NULL; sem = (JsonSemAction *)palloc0(sizeof(JsonSemAction)); - lex = makeJsonLexContext(json, true); - sem->semstate = (void *) state; sem->array_start = populate_recordset_array_start; sem->array_element_start = populate_recordset_array_element_start; @@ -2668,11 +2405,9 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) sem->object_field_end = populate_recordset_object_field_end; sem->object_start = populate_recordset_object_start; sem->object_end = populate_recordset_object_end; - state->lex = lex; pg_parse_json(lex, sem); - } else { Jsonb *jb = NULL; JsonbIterator *it = NULL; @@ -2682,21 +2417,16 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) Assert(jtype == JSONBOID); jb = PG_GETARG_JSONB(have_record_arg ? 1 : 0); - if (JB_ROOT_IS_SCALAR(jb) || !JB_ROOT_IS_ARRAY(jb)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("cannot call jsonb_populate_recordset on non-array"))); + errmsg("cannot call jsonb_populate_recordset on non-array"))); } - it = JsonbIteratorInit(VARDATA_ANY(jb)); - while ((r = JsonbIteratorNext(&it, &v, skipNested)) != WJB_DONE) { skipNested = true; - if (r == WJB_ELEM) { Jsonb *element = JsonbValueToJsonb(&v); - if (!JB_ROOT_IS_OBJECT(element)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -2706,20 +2436,16 @@ populate_recordset_worker(FunctionCallInfo fcinfo, bool have_record_arg) } } } - rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; - PG_RETURN_NULL(); - } -static void -populate_recordset_object_start(void *state) +static void populate_recordset_object_start(void *state) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; - int lex_level = _state->lex->lex_level; - HASHCTL ctl; + int lex_level = _state->lex->lex_level; + HASHCTL ctl; if (lex_level == 0) { ereport(ERROR, @@ -2737,44 +2463,36 @@ populate_recordset_object_start(void *state) ctl.keysize = NAMEDATALEN; ctl.entrysize = sizeof(JsonHashEntry); ctl.hcxt = CurrentMemoryContext; - _state->json_hash = hash_create("json object hashtable", - 100, - &ctl, - HASH_ELEM | HASH_CONTEXT); + _state->json_hash = hash_create("json object hashtable", 100, &ctl, HASH_ELEM | HASH_CONTEXT); } -static void -populate_recordset_object_end(void *state) +static void populate_recordset_object_end(void *state) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; - HTAB *json_hash = _state->json_hash; - Datum *values = NULL; - bool *nulls = NULL; - char fname[NAMEDATALEN]; - int i; - RecordIOData *my_extra = _state->my_extra; - int ncolumns = my_extra->ncolumns; - TupleDesc tupdesc = _state->ret_tdesc; - JsonHashEntry *hashentry = NULL; - HeapTupleHeader rec = _state->rec; - HeapTuple rettuple; + HTAB *json_hash = _state->json_hash; + Datum *values = NULL; + bool *nulls = NULL; + char fname[NAMEDATALEN]; + int i; + RecordIOData *my_extra = _state->my_extra; + int ncolumns = my_extra->ncolumns; + TupleDesc tupdesc = _state->ret_tdesc; + JsonHashEntry *hashentry = NULL; + HeapTupleHeader rec = _state->rec; + HeapTuple rettuple; if (_state->lex->lex_level > 1) { return; } - values = (Datum *) palloc(ncolumns * sizeof(Datum)); nulls = (bool *) palloc(ncolumns * sizeof(bool)); - if (_state->rec) { HeapTupleData tuple; - /* Build a temporary HeapTuple control structure */ tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec); ItemPointerSetInvalid(&(tuple.t_self)); tuple.t_tableOid = InvalidOid; tuple.t_data = _state->rec; - /* Break down the tuple into fields */ heap_deform_tuple(&tuple, tupdesc, values, nulls); } else { @@ -2786,8 +2504,8 @@ populate_recordset_object_end(void *state) for (i = 0; i < ncolumns; ++i) { ColumnIOData *column_info = &my_extra->columns[i]; - Oid column_type = tupdesc->attrs[i]->atttypid; - char *value = NULL; + Oid column_type = tupdesc->attrs[i]->atttypid; + char *value = NULL; /* Ignore dropped columns in datatype */ if (tupdesc->attrs[i]->attisdropped) { @@ -2817,11 +2535,8 @@ populate_recordset_object_end(void *state) * Prepare to convert the column value from text */ if (column_info->column_type != column_type) { - getTypeInputInfo(column_type, - &column_info->typiofunc, - &column_info->typioparam); - fmgr_info_cxt(column_info->typiofunc, &column_info->proc, - _state->fn_mcxt); + getTypeInputInfo(column_type, &column_info->typiofunc, &column_info->typioparam); + fmgr_info_cxt(column_info->typiofunc, &column_info->proc, _state->fn_mcxt); column_info->column_type = column_type; } if (hashentry == NULL || hashentry->isnull) { @@ -2829,29 +2544,23 @@ populate_recordset_object_end(void *state) * need InputFunctionCall to happen even for nulls, so that domain * checks are done */ - values[i] = InputFunctionCall(&column_info->proc, NULL, - column_info->typioparam, + values[i] = InputFunctionCall(&column_info->proc, NULL, column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = true; } else { value = hashentry->val; - - values[i] = InputFunctionCall(&column_info->proc, value, - column_info->typioparam, + values[i] = InputFunctionCall(&column_info->proc, value, column_info->typioparam, tupdesc->attrs[i]->atttypmod); nulls[i] = false; } } rettuple = heap_form_tuple(tupdesc, values, nulls); - tuplestore_puttuple(_state->tuple_store, rettuple); - hash_destroy(json_hash); } -static void -populate_recordset_array_element_start(void *state, bool isnull) +static void populate_recordset_array_element_start(void *state, bool isnull) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; @@ -2859,24 +2568,21 @@ populate_recordset_array_element_start(void *state, bool isnull) _state->lex->token_type != JSON_TOKEN_OBJECT_START) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("must call json_populate_recordset on an array of objects"))); + errmsg("must call json_populate_recordset on an array of objects"))); } } -static void -populate_recordset_array_start(void *state) +static void populate_recordset_array_start(void *state) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; - if (_state->lex->lex_level != 0 && !_state->use_json_as_text) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("cannot call json_populate_recordset with nested arrays"))); + errmsg("cannot call json_populate_recordset with nested arrays"))); } } -static void -populate_recordset_scalar(void *state, char *token, JsonTokenType tokentype) +static void populate_recordset_scalar(void *state, char *token, JsonTokenType tokentype) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; @@ -2891,8 +2597,7 @@ populate_recordset_scalar(void *state, char *token, JsonTokenType tokentype) } } -static void -populate_recordset_object_field_start(void *state, char *fname, bool isnull) +static void populate_recordset_object_field_start(void *state, char *fname, bool isnull) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; @@ -2900,8 +2605,7 @@ populate_recordset_object_field_start(void *state, char *fname, bool isnull) return; } - if (_state->lex->token_type == JSON_TOKEN_ARRAY_START || - _state->lex->token_type == JSON_TOKEN_OBJECT_START) { + if (_state->lex->token_type == JSON_TOKEN_ARRAY_START || _state->lex->token_type == JSON_TOKEN_OBJECT_START) { if (!_state->use_json_as_text) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), @@ -2913,11 +2617,10 @@ populate_recordset_object_field_start(void *state, char *fname, bool isnull) } } -static void -populate_recordset_object_field_end(void *state, char *fname, bool isnull) +static void populate_recordset_object_field_end(void *state, char *fname, bool isnull) { PopulateRecordsetState *_state = (PopulateRecordsetState *) state; - JsonHashEntry *hashentry = NULL; + JsonHashEntry *hashentry = NULL; bool found; char name[NAMEDATALEN]; @@ -2935,17 +2638,14 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull) securec_check(rc, "\0", "\0"); hashentry = (JsonHashEntry *)hash_search(_state->json_hash, name, HASH_ENTER, &found); - /* * found being true indicates a duplicate. We don't do anything about * that, a later field with the same name overrides the earlier field. */ - hashentry->isnull = isnull; if (_state->save_json_start != NULL) { - int len = _state->lex->prev_token_terminator - _state->save_json_start; - char *val = (char *)palloc((len + 1) * sizeof(char)); - + int len = _state->lex->prev_token_terminator - _state->save_json_start; + char *val = (char *)palloc((len + 1) * sizeof(char)); errno_t rc = memcpy_s(val, (len + 1) * sizeof(char), _state->save_json_start, len); securec_check(rc, "\0", "\0"); val[len] = '\0'; @@ -2959,9 +2659,7 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull) /* * findJsonbValueFromSuperHeader() wrapper that sets up JsonbValue key string. */ -static JsonbValue * -findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader, uint32 flags, - char *key, uint32 keylen) +static JsonbValue *findJsonbValueFromSuperHeaderLen(JsonbSuperHeader sheader, uint32 flags, char *key, uint32 keylen) { JsonbValue k; diff --git a/src/common/backend/utils/adt/numeric.cpp b/src/common/backend/utils/adt/numeric.cpp index e86de3dd4..0a36a623a 100644 --- a/src/common/backend/utils/adt/numeric.cpp +++ b/src/common/backend/utils/adt/numeric.cpp @@ -454,37 +454,32 @@ char* numeric_out_sci(Numeric num, int scale) * * Output function for numeric data type without trailing zeroes. */ -char * -numeric_normalize(Numeric num) +char *numeric_normalize(Numeric num) { - NumericVar x; - char *str; - int orig, last; + NumericVar x; + char *str = NULL; + int orig, last; - /* - * Handle NaN - */ - if (NUMERIC_IS_NAN(num)) - return pstrdup("NaN"); + /* + * Handle NaN + */ + if (NUMERIC_IS_NAN(num)) { + return pstrdup("NaN"); + } + init_var_from_num(num, &x); + str = get_str_from_var(&x); + orig = last = strlen(str) - 1; - init_var_from_num(num, &x); - - str = get_str_from_var(&x); - - orig = last = strlen(str) - 1; - - for (;;) - { - if (last == 0 || str[last] != '0') - break; - - last--; - } - - if (last > 0 && last != orig) - str[last] = '\0'; - - return str; + for (;;) { + if (last == 0 || str[last] != '0') { + break; + } + last--; + } + if (last > 0 && last != orig) { + str[last] = '\0'; + } + return str; } diff --git a/src/gausskernel/optimizer/util/pgxcship.cpp b/src/gausskernel/optimizer/util/pgxcship.cpp index 438d38194..39a7d0e47 100644 --- a/src/gausskernel/optimizer/util/pgxcship.cpp +++ b/src/gausskernel/optimizer/util/pgxcship.cpp @@ -2331,8 +2331,8 @@ bool pgxc_is_internal_agg_final_func(Oid funcid) case TIMESTAMPTZLISTAGGNOARG2FUNCOID: // timestamptz_listagg_noarg2_finalfn case INTERVALLISTAGGFUNCOID: // interval_listagg_finalfn case INTERVALLISTAGGNOARG2FUNCOID: // interval_listagg_noarg2_finalfn - case JSONAGGFUNCOID: //json_agg_finalfn - case JSONOBJECTAGGFUNCOID: //json_object_agg_finalfn + case JSONAGGFUNCOID: // json_agg_finalfn + case JSONOBJECTAGGFUNCOID: // json_object_agg_finalfn is_internal_func = false; break; default: