aboutsummaryrefslogtreecommitdiff
path: root/src/backend/utils/adt/json.c
diff options
context:
space:
mode:
authorAlvaro Herrera <alvherre@alvh.no-ip.org>2023-10-05 10:59:08 +0200
committerAlvaro Herrera <alvherre@alvh.no-ip.org>2023-10-05 10:59:08 +0200
commit1c99cde2f3440c59f582d45b251412c9a9b54f62 (patch)
treeb50d690b8c52dbe74ea8e8f14d9051708df20523 /src/backend/utils/adt/json.c
parenta8a968a8212ee3ef7f22795c834b33d871fac262 (diff)
downloadpostgresql-1c99cde2f3440c59f582d45b251412c9a9b54f62.tar.gz
postgresql-1c99cde2f3440c59f582d45b251412c9a9b54f62.zip
Improve JsonLexContext's freeability
Previously, the JSON code didn't have to worry too much about freeing JsonLexContext, because it was never too long-lived. With new features being added for SQL/JSON this is no longer the case. Add a routine that knows how to free this struct and apply that to a few places, to prevent this from becoming problematic. At the same time, we change the API of makeJsonLexContextCstringLen to make it receive a pointer to JsonLexContext for callers that want it to be stack-allocated; it can also be passed as NULL to get the original behavior of a palloc'ed one. This also causes an ABI break due to the addition of flags to JsonLexContext, so we can't easily backpatch it. AFAICS that's not much of a problem; apparently some leaks might exist in JSON usage of text-search, for example via json_to_tsvector, but I haven't seen any complaints about that. Per Coverity complaint about datum_to_jsonb_internal(). Discussion: https://postgr.es/m/20230808174110.oq3iymllsv6amkih@alvherre.pgsql
Diffstat (limited to 'src/backend/utils/adt/json.c')
-rw-r--r--src/backend/utils/adt/json.c39
1 files changed, 22 insertions, 17 deletions
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 2c620809b20..55413c0fdf2 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -106,11 +106,11 @@ json_in(PG_FUNCTION_ARGS)
{
char *json = PG_GETARG_CSTRING(0);
text *result = cstring_to_text(json);
- JsonLexContext *lex;
+ JsonLexContext lex;
/* validate it */
- lex = makeJsonLexContext(result, false);
- if (!pg_parse_json_or_errsave(lex, &nullSemAction, fcinfo->context))
+ makeJsonLexContext(&lex, result, false);
+ if (!pg_parse_json_or_errsave(&lex, &nullSemAction, fcinfo->context))
PG_RETURN_NULL();
/* Internal representation is the same as text */
@@ -152,13 +152,14 @@ json_recv(PG_FUNCTION_ARGS)
StringInfo buf = (StringInfo) PG_GETARG_POINTER(0);
char *str;
int nbytes;
- JsonLexContext *lex;
+ JsonLexContext lex;
str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes);
/* Validate it. */
- lex = makeJsonLexContextCstringLen(str, nbytes, GetDatabaseEncoding(), false);
- pg_parse_json_or_ereport(lex, &nullSemAction);
+ makeJsonLexContextCstringLen(&lex, str, nbytes, GetDatabaseEncoding(),
+ false);
+ pg_parse_json_or_ereport(&lex, &nullSemAction);
PG_RETURN_TEXT_P(cstring_to_text_with_len(str, nbytes));
}
@@ -1625,14 +1626,16 @@ json_unique_object_field_start(void *_state, char *field, bool isnull)
bool
json_validate(text *json, bool check_unique_keys, bool throw_error)
{
- JsonLexContext *lex = makeJsonLexContext(json, check_unique_keys);
+ JsonLexContext lex;
JsonSemAction uniqueSemAction = {0};
JsonUniqueParsingState state;
JsonParseErrorType result;
+ makeJsonLexContext(&lex, json, check_unique_keys);
+
if (check_unique_keys)
{
- state.lex = lex;
+ state.lex = &lex;
state.stack = NULL;
state.id_counter = 0;
state.unique = true;
@@ -1644,12 +1647,12 @@ json_validate(text *json, bool check_unique_keys, bool throw_error)
uniqueSemAction.object_end = json_unique_object_end;
}
- result = pg_parse_json(lex, check_unique_keys ? &uniqueSemAction : &nullSemAction);
+ result = pg_parse_json(&lex, check_unique_keys ? &uniqueSemAction : &nullSemAction);
if (result != JSON_SUCCESS)
{
if (throw_error)
- json_errsave_error(result, lex, NULL);
+ json_errsave_error(result, &lex, NULL);
return false; /* invalid json */
}
@@ -1664,6 +1667,9 @@ json_validate(text *json, bool check_unique_keys, bool throw_error)
return false; /* not unique keys */
}
+ if (check_unique_keys)
+ freeJsonLexContext(&lex);
+
return true; /* ok */
}
@@ -1683,18 +1689,17 @@ Datum
json_typeof(PG_FUNCTION_ARGS)
{
text *json = PG_GETARG_TEXT_PP(0);
- JsonLexContext *lex = makeJsonLexContext(json, false);
+ JsonLexContext lex;
char *type;
- JsonTokenType tok;
JsonParseErrorType result;
/* Lex exactly one token from the input and check its type. */
- result = json_lex(lex);
+ makeJsonLexContext(&lex, json, false);
+ result = json_lex(&lex);
if (result != JSON_SUCCESS)
- json_errsave_error(result, lex, NULL);
- tok = lex->token_type;
+ json_errsave_error(result, &lex, NULL);
- switch (tok)
+ switch (lex.token_type)
{
case JSON_TOKEN_OBJECT_START:
type = "object";
@@ -1716,7 +1721,7 @@ json_typeof(PG_FUNCTION_ARGS)
type = "null";
break;
default:
- elog(ERROR, "unexpected json token: %d", tok);
+ elog(ERROR, "unexpected json token: %d", lex.token_type);
}
PG_RETURN_TEXT_P(cstring_to_text(type));