mirror of
https://github.com/postgres/postgres.git
synced 2025-06-01 00:01:20 -04:00
Allocate JsonLexContexts on the heap to avoid warnings
The stack allocated JsonLexContexts, in combination with codepaths using goto, were causing warnings when compiling with LTO enabled as the optimizer is unable to figure out that is safe. Rather than contort the code with workarounds for this simply heap allocate the structs instead as these are not in any performance critical paths. Author: Daniel Gustafsson <daniel@yesql.se> Reported-by: Tom Lane <tgl@sss.pgh.pa.us> Reviewed-by: Jacob Champion <jacob.champion@enterprisedb.com> Reviewed-by: Tom Lane <tgl@sss.pgh.pa.us> Discussion: https://postgr.es/m/2074634.1744839761@sss.pgh.pa.us
This commit is contained in:
parent
0ff95e0a5b
commit
994a100b37
@ -476,7 +476,7 @@ issuer_from_well_known_uri(PGconn *conn, const char *wkuri)
|
||||
static bool
|
||||
handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
|
||||
{
|
||||
JsonLexContext lex = {0};
|
||||
JsonLexContext *lex;
|
||||
JsonSemAction sem = {0};
|
||||
JsonParseErrorType err;
|
||||
struct json_ctx ctx = {0};
|
||||
@ -504,8 +504,8 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
|
||||
return false;
|
||||
}
|
||||
|
||||
makeJsonLexContextCstringLen(&lex, msg, msglen, PG_UTF8, true);
|
||||
setJsonLexContextOwnsTokens(&lex, true); /* must not leak on error */
|
||||
lex = makeJsonLexContextCstringLen(NULL, msg, msglen, PG_UTF8, true);
|
||||
setJsonLexContextOwnsTokens(lex, true); /* must not leak on error */
|
||||
|
||||
initPQExpBuffer(&ctx.errbuf);
|
||||
sem.semstate = &ctx;
|
||||
@ -516,7 +516,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
|
||||
sem.array_start = oauth_json_array_start;
|
||||
sem.scalar = oauth_json_scalar;
|
||||
|
||||
err = pg_parse_json(&lex, &sem);
|
||||
err = pg_parse_json(lex, &sem);
|
||||
|
||||
if (err == JSON_SEM_ACTION_FAILED)
|
||||
{
|
||||
@ -535,7 +535,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
|
||||
}
|
||||
}
|
||||
else if (err != JSON_SUCCESS)
|
||||
errmsg = json_errdetail(err, &lex);
|
||||
errmsg = json_errdetail(err, lex);
|
||||
|
||||
if (errmsg)
|
||||
libpq_append_conn_error(conn,
|
||||
@ -544,7 +544,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
|
||||
|
||||
/* Don't need the error buffer or the JSON lexer anymore. */
|
||||
termPQExpBuffer(&ctx.errbuf);
|
||||
freeJsonLexContext(&lex);
|
||||
freeJsonLexContext(lex);
|
||||
|
||||
if (errmsg)
|
||||
goto cleanup;
|
||||
|
@ -84,7 +84,7 @@ main(int argc, char **argv)
|
||||
char buff[BUFSIZE];
|
||||
FILE *json_file;
|
||||
JsonParseErrorType result;
|
||||
JsonLexContext lex;
|
||||
JsonLexContext *lex;
|
||||
StringInfoData json;
|
||||
int n_read;
|
||||
size_t chunk_size = DEFAULT_CHUNK_SIZE;
|
||||
@ -98,6 +98,10 @@ main(int argc, char **argv)
|
||||
|
||||
pg_logging_init(argv[0]);
|
||||
|
||||
lex = calloc(1, sizeof(JsonLexContext));
|
||||
if (!lex)
|
||||
pg_fatal("out of memory");
|
||||
|
||||
while ((c = getopt(argc, argv, "c:os")) != -1)
|
||||
{
|
||||
switch (c)
|
||||
@ -113,7 +117,7 @@ main(int argc, char **argv)
|
||||
case 's': /* do semantic processing */
|
||||
testsem = &sem;
|
||||
sem.semstate = palloc(sizeof(struct DoState));
|
||||
((struct DoState *) sem.semstate)->lex = &lex;
|
||||
((struct DoState *) sem.semstate)->lex = lex;
|
||||
((struct DoState *) sem.semstate)->buf = makeStringInfo();
|
||||
need_strings = true;
|
||||
break;
|
||||
@ -131,8 +135,8 @@ main(int argc, char **argv)
|
||||
exit(1);
|
||||
}
|
||||
|
||||
makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
|
||||
setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
|
||||
makeJsonLexContextIncremental(lex, PG_UTF8, need_strings);
|
||||
setJsonLexContextOwnsTokens(lex, lex_owns_tokens);
|
||||
initStringInfo(&json);
|
||||
|
||||
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
|
||||
@ -165,12 +169,12 @@ main(int argc, char **argv)
|
||||
bytes_left -= n_read;
|
||||
if (bytes_left > 0)
|
||||
{
|
||||
result = pg_parse_json_incremental(&lex, testsem,
|
||||
result = pg_parse_json_incremental(lex, testsem,
|
||||
json.data, n_read,
|
||||
false);
|
||||
if (result != JSON_INCOMPLETE)
|
||||
{
|
||||
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
|
||||
fprintf(stderr, "%s\n", json_errdetail(result, lex));
|
||||
ret = 1;
|
||||
goto cleanup;
|
||||
}
|
||||
@ -178,12 +182,12 @@ main(int argc, char **argv)
|
||||
}
|
||||
else
|
||||
{
|
||||
result = pg_parse_json_incremental(&lex, testsem,
|
||||
result = pg_parse_json_incremental(lex, testsem,
|
||||
json.data, n_read,
|
||||
true);
|
||||
if (result != JSON_SUCCESS)
|
||||
{
|
||||
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
|
||||
fprintf(stderr, "%s\n", json_errdetail(result, lex));
|
||||
ret = 1;
|
||||
goto cleanup;
|
||||
}
|
||||
@ -195,8 +199,9 @@ main(int argc, char **argv)
|
||||
|
||||
cleanup:
|
||||
fclose(json_file);
|
||||
freeJsonLexContext(&lex);
|
||||
freeJsonLexContext(lex);
|
||||
free(json.data);
|
||||
free(lex);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user