aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Gustafsson <dgustafsson@postgresql.org>2025-04-23 11:02:05 +0200
committerDaniel Gustafsson <dgustafsson@postgresql.org>2025-04-23 11:02:05 +0200
commit994a100b37ad8c2fb8282a9fce91a16b4c832277 (patch)
treeb08a5e1e17d950c431d6b8c1c4ba3b84a1e81d83
parent0ff95e0a5be1372bfba9db284ea17c8e0e5da3a0 (diff)
downloadpostgresql-994a100b37ad8c2fb8282a9fce91a16b4c832277.tar.gz
postgresql-994a100b37ad8c2fb8282a9fce91a16b4c832277.zip
Allocate JsonLexContexts on the heap to avoid warnings
The stack allocated JsonLexContexts, in combination with codepaths using goto, were causing warnings when compiling with LTO enabled as the optimizer is unable to figure out that is safe. Rather than contort the code with workarounds for this simply heap allocate the structs instead as these are not in any performance critical paths. Author: Daniel Gustafsson <daniel@yesql.se> Reported-by: Tom Lane <tgl@sss.pgh.pa.us> Reviewed-by: Jacob Champion <jacob.champion@enterprisedb.com> Reviewed-by: Tom Lane <tgl@sss.pgh.pa.us> Discussion: https://postgr.es/m/2074634.1744839761@sss.pgh.pa.us
-rw-r--r--src/interfaces/libpq/fe-auth-oauth.c12
-rw-r--r--src/test/modules/test_json_parser/test_json_parser_incremental.c23
2 files changed, 20 insertions, 15 deletions
diff --git a/src/interfaces/libpq/fe-auth-oauth.c b/src/interfaces/libpq/fe-auth-oauth.c
index cf1a25e2ccc..ab6a45e2aba 100644
--- a/src/interfaces/libpq/fe-auth-oauth.c
+++ b/src/interfaces/libpq/fe-auth-oauth.c
@@ -476,7 +476,7 @@ issuer_from_well_known_uri(PGconn *conn, const char *wkuri)
static bool
handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
{
- JsonLexContext lex = {0};
+ JsonLexContext *lex;
JsonSemAction sem = {0};
JsonParseErrorType err;
struct json_ctx ctx = {0};
@@ -504,8 +504,8 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
return false;
}
- makeJsonLexContextCstringLen(&lex, msg, msglen, PG_UTF8, true);
- setJsonLexContextOwnsTokens(&lex, true); /* must not leak on error */
+ lex = makeJsonLexContextCstringLen(NULL, msg, msglen, PG_UTF8, true);
+ setJsonLexContextOwnsTokens(lex, true); /* must not leak on error */
initPQExpBuffer(&ctx.errbuf);
sem.semstate = &ctx;
@@ -516,7 +516,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
sem.array_start = oauth_json_array_start;
sem.scalar = oauth_json_scalar;
- err = pg_parse_json(&lex, &sem);
+ err = pg_parse_json(lex, &sem);
if (err == JSON_SEM_ACTION_FAILED)
{
@@ -535,7 +535,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
}
}
else if (err != JSON_SUCCESS)
- errmsg = json_errdetail(err, &lex);
+ errmsg = json_errdetail(err, lex);
if (errmsg)
libpq_append_conn_error(conn,
@@ -544,7 +544,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
/* Don't need the error buffer or the JSON lexer anymore. */
termPQExpBuffer(&ctx.errbuf);
- freeJsonLexContext(&lex);
+ freeJsonLexContext(lex);
if (errmsg)
goto cleanup;
diff --git a/src/test/modules/test_json_parser/test_json_parser_incremental.c b/src/test/modules/test_json_parser/test_json_parser_incremental.c
index a529ee47e9b..d1e3e4ab4ea 100644
--- a/src/test/modules/test_json_parser/test_json_parser_incremental.c
+++ b/src/test/modules/test_json_parser/test_json_parser_incremental.c
@@ -84,7 +84,7 @@ main(int argc, char **argv)
char buff[BUFSIZE];
FILE *json_file;
JsonParseErrorType result;
- JsonLexContext lex;
+ JsonLexContext *lex;
StringInfoData json;
int n_read;
size_t chunk_size = DEFAULT_CHUNK_SIZE;
@@ -98,6 +98,10 @@ main(int argc, char **argv)
pg_logging_init(argv[0]);
+ lex = calloc(1, sizeof(JsonLexContext));
+ if (!lex)
+ pg_fatal("out of memory");
+
while ((c = getopt(argc, argv, "c:os")) != -1)
{
switch (c)
@@ -113,7 +117,7 @@ main(int argc, char **argv)
case 's': /* do semantic processing */
testsem = &sem;
sem.semstate = palloc(sizeof(struct DoState));
- ((struct DoState *) sem.semstate)->lex = &lex;
+ ((struct DoState *) sem.semstate)->lex = lex;
((struct DoState *) sem.semstate)->buf = makeStringInfo();
need_strings = true;
break;
@@ -131,8 +135,8 @@ main(int argc, char **argv)
exit(1);
}
- makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
- setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
+ makeJsonLexContextIncremental(lex, PG_UTF8, need_strings);
+ setJsonLexContextOwnsTokens(lex, lex_owns_tokens);
initStringInfo(&json);
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
@@ -165,12 +169,12 @@ main(int argc, char **argv)
bytes_left -= n_read;
if (bytes_left > 0)
{
- result = pg_parse_json_incremental(&lex, testsem,
+ result = pg_parse_json_incremental(lex, testsem,
json.data, n_read,
false);
if (result != JSON_INCOMPLETE)
{
- fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+ fprintf(stderr, "%s\n", json_errdetail(result, lex));
ret = 1;
goto cleanup;
}
@@ -178,12 +182,12 @@ main(int argc, char **argv)
}
else
{
- result = pg_parse_json_incremental(&lex, testsem,
+ result = pg_parse_json_incremental(lex, testsem,
json.data, n_read,
true);
if (result != JSON_SUCCESS)
{
- fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+ fprintf(stderr, "%s\n", json_errdetail(result, lex));
ret = 1;
goto cleanup;
}
@@ -195,8 +199,9 @@ main(int argc, char **argv)
cleanup:
fclose(json_file);
- freeJsonLexContext(&lex);
+ freeJsonLexContext(lex);
free(json.data);
+ free(lex);
return ret;
}