From c5b661e24aff9d6ae54f922a289b92d39383322d Mon Sep 17 00:00:00 2001
From: Daniel Gustafsson <dgustafsson@postgresql.org>
Date: Thu, 17 Apr 2025 13:38:05 +0200
Subject: [PATCH] Allocate JsonLexContexts on the heap to avoid warnings

The stack allocated JsonLexContexts, in combination with codepaths
using goto, were causing warnings when compiling with LTO enabled
as the optimizer is unable to figure out that is safe.  Rather than
contort the code with workarounds for this simply heap allocate the
structs instead as these are not in any performance critical paths.

Reported-by: Tom Lane <tgl@sss.pgh.pa.us>
Discussion: https://postgr.es/m/2074634.1744839761@sss.pgh.pa.us
---
 src/interfaces/libpq/fe-auth-oauth.c          | 23 ++++++++++++++-----
 .../test_json_parser_incremental.c            | 22 ++++++++++--------
 2 files changed, 30 insertions(+), 15 deletions(-)

diff --git a/src/interfaces/libpq/fe-auth-oauth.c b/src/interfaces/libpq/fe-auth-oauth.c
index cf1a25e2ccc..b4d69874c83 100644
--- a/src/interfaces/libpq/fe-auth-oauth.c
+++ b/src/interfaces/libpq/fe-auth-oauth.c
@@ -476,7 +476,7 @@ issuer_from_well_known_uri(PGconn *conn, const char *wkuri)
 static bool
 handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
 {
-	JsonLexContext lex = {0};
+	JsonLexContext *lex;
 	JsonSemAction sem = {0};
 	JsonParseErrorType err;
 	struct json_ctx ctx = {0};
@@ -504,8 +504,19 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
 		return false;
 	}
 
-	makeJsonLexContextCstringLen(&lex, msg, msglen, PG_UTF8, true);
-	setJsonLexContextOwnsTokens(&lex, true);	/* must not leak on error */
+	/*
+	 * Later error paths need to go via the cleanup label but since ctx hasn't
+	 * been initiated yet we return immediately here.
+	 */
+	lex = calloc(1, sizeof(JsonLexContext));
+	if (!lex)
+	{
+		libpq_append_conn_error(conn, "out of memory");
+		return false;
+	}
+
+	makeJsonLexContextCstringLen(lex, msg, msglen, PG_UTF8, true);
+	setJsonLexContextOwnsTokens(lex, true);	/* must not leak on error */
 
 	initPQExpBuffer(&ctx.errbuf);
 	sem.semstate = &ctx;
@@ -516,7 +527,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
 	sem.array_start = oauth_json_array_start;
 	sem.scalar = oauth_json_scalar;
 
-	err = pg_parse_json(&lex, &sem);
+	err = pg_parse_json(lex, &sem);
 
 	if (err == JSON_SEM_ACTION_FAILED)
 	{
@@ -535,7 +546,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
 		}
 	}
 	else if (err != JSON_SUCCESS)
-		errmsg = json_errdetail(err, &lex);
+		errmsg = json_errdetail(err, lex);
 
 	if (errmsg)
 		libpq_append_conn_error(conn,
@@ -544,7 +555,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
 
 	/* Don't need the error buffer or the JSON lexer anymore. */
 	termPQExpBuffer(&ctx.errbuf);
-	freeJsonLexContext(&lex);
+	freeJsonLexContext(lex);
 
 	if (errmsg)
 		goto cleanup;
diff --git a/src/test/modules/test_json_parser/test_json_parser_incremental.c b/src/test/modules/test_json_parser/test_json_parser_incremental.c
index a529ee47e9b..357152e0ae0 100644
--- a/src/test/modules/test_json_parser/test_json_parser_incremental.c
+++ b/src/test/modules/test_json_parser/test_json_parser_incremental.c
@@ -84,7 +84,7 @@ main(int argc, char **argv)
 	char		buff[BUFSIZE];
 	FILE	   *json_file;
 	JsonParseErrorType result;
-	JsonLexContext lex;
+	JsonLexContext *lex;
 	StringInfoData json;
 	int			n_read;
 	size_t		chunk_size = DEFAULT_CHUNK_SIZE;
@@ -98,6 +98,10 @@ main(int argc, char **argv)
 
 	pg_logging_init(argv[0]);
 
+	lex = calloc(1, sizeof(JsonLexContext));
+	if (!lex)
+		pg_fatal("out of memory");
+
 	while ((c = getopt(argc, argv, "c:os")) != -1)
 	{
 		switch (c)
@@ -113,7 +117,7 @@ main(int argc, char **argv)
 			case 's':			/* do semantic processing */
 				testsem = &sem;
 				sem.semstate = palloc(sizeof(struct DoState));
-				((struct DoState *) sem.semstate)->lex = &lex;
+				((struct DoState *) sem.semstate)->lex = lex;
 				((struct DoState *) sem.semstate)->buf = makeStringInfo();
 				need_strings = true;
 				break;
@@ -131,8 +135,8 @@ main(int argc, char **argv)
 		exit(1);
 	}
 
-	makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
-	setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
+	makeJsonLexContextIncremental(lex, PG_UTF8, need_strings);
+	setJsonLexContextOwnsTokens(lex, lex_owns_tokens);
 	initStringInfo(&json);
 
 	if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
@@ -165,12 +169,12 @@ main(int argc, char **argv)
 		bytes_left -= n_read;
 		if (bytes_left > 0)
 		{
-			result = pg_parse_json_incremental(&lex, testsem,
+			result = pg_parse_json_incremental(lex, testsem,
 											   json.data, n_read,
 											   false);
 			if (result != JSON_INCOMPLETE)
 			{
-				fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+				fprintf(stderr, "%s\n", json_errdetail(result, lex));
 				ret = 1;
 				goto cleanup;
 			}
@@ -178,12 +182,12 @@ main(int argc, char **argv)
 		}
 		else
 		{
-			result = pg_parse_json_incremental(&lex, testsem,
+			result = pg_parse_json_incremental(lex, testsem,
 											   json.data, n_read,
 											   true);
 			if (result != JSON_SUCCESS)
 			{
-				fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+				fprintf(stderr, "%s\n", json_errdetail(result, lex));
 				ret = 1;
 				goto cleanup;
 			}
@@ -195,7 +199,7 @@ main(int argc, char **argv)
 
 cleanup:
 	fclose(json_file);
-	freeJsonLexContext(&lex);
+	freeJsonLexContext(lex);
 	free(json.data);
 
 	return ret;
-- 
2.39.3 (Apple Git-146)

