aboutsummaryrefslogtreecommitdiff
path: root/contrib/pg_stat_statements/pg_stat_statements.c
diff options
context:
space:
mode:
authorTom Lane <tgl@sss.pgh.pa.us>2021-10-31 19:13:48 -0400
committerTom Lane <tgl@sss.pgh.pa.us>2021-10-31 19:13:48 -0400
commit16d0da5c8dda7340d6b1e724ad183313e6a254f2 (patch)
treec65b9fe808541933a5f0a43b682673cb6e72872c /contrib/pg_stat_statements/pg_stat_statements.c
parent14b8d25d66bebd0becf03680a8684bb20f8f706c (diff)
downloadpostgresql-16d0da5c8dda7340d6b1e724ad183313e6a254f2.tar.gz
postgresql-16d0da5c8dda7340d6b1e724ad183313e6a254f2.zip
Don't try to read a multi-GB pg_stat_statements file in one call.
Windows fails on a request to read() more than INT_MAX bytes, and perhaps other platforms could have similar issues. Let's adjust this code to read at most 1GB per call. (One would not have thought the file could get that big, but now we have a field report of trouble, so it can. We likely ought to add some mechanism to limit the size of the query-texts file separately from the size of the hash table. That is not this patch, though.) Per bug #17254 from Yusuke Egashira. It's been like this for awhile, so back-patch to all supported branches. Discussion: https://postgr.es/m/17254-a926c89dc03375c2@postgresql.org
Diffstat (limited to 'contrib/pg_stat_statements/pg_stat_statements.c')
-rw-r--r--contrib/pg_stat_statements/pg_stat_statements.c45
1 files changed, 29 insertions, 16 deletions
diff --git a/contrib/pg_stat_statements/pg_stat_statements.c b/contrib/pg_stat_statements/pg_stat_statements.c
index 490ade9e5fe..7c2452fda7c 100644
--- a/contrib/pg_stat_statements/pg_stat_statements.c
+++ b/contrib/pg_stat_statements/pg_stat_statements.c
@@ -1933,6 +1933,7 @@ qtext_load_file(Size *buffer_size)
char *buf;
int fd;
struct stat stat;
+ Size nread;
fd = OpenTransientFile(PGSS_TEXT_FILE, O_RDONLY | PG_BINARY);
if (fd < 0)
@@ -1973,23 +1974,35 @@ qtext_load_file(Size *buffer_size)
}
/*
- * OK, slurp in the file. If we get a short read and errno doesn't get
- * set, the reason is probably that garbage collection truncated the file
- * since we did the fstat(), so we don't log a complaint --- but we don't
- * return the data, either, since it's most likely corrupt due to
- * concurrent writes from garbage collection.
+ * OK, slurp in the file. Windows fails if we try to read more than
+ * INT_MAX bytes at once, and other platforms might not like that either,
+ * so read a very large file in 1GB segments.
*/
- errno = 0;
- if (read(fd, buf, stat.st_size) != stat.st_size)
+ nread = 0;
+ while (nread < stat.st_size)
{
- if (errno)
- ereport(LOG,
- (errcode_for_file_access(),
- errmsg("could not read file \"%s\": %m",
- PGSS_TEXT_FILE)));
- free(buf);
- CloseTransientFile(fd);
- return NULL;
+ int toread = Min(1024 * 1024 * 1024, stat.st_size - nread);
+
+ /*
+ * If we get a short read and errno doesn't get set, the reason is
+ * probably that garbage collection truncated the file since we did
+ * the fstat(), so we don't log a complaint --- but we don't return
+ * the data, either, since it's most likely corrupt due to concurrent
+ * writes from garbage collection.
+ */
+ errno = 0;
+ if (read(fd, buf + nread, toread) != toread)
+ {
+ if (errno)
+ ereport(LOG,
+ (errcode_for_file_access(),
+ errmsg("could not read file \"%s\": %m",
+ PGSS_TEXT_FILE)));
+ free(buf);
+ CloseTransientFile(fd);
+ return NULL;
+ }
+ nread += toread;
}
if (CloseTransientFile(fd))
@@ -1997,7 +2010,7 @@ qtext_load_file(Size *buffer_size)
(errcode_for_file_access(),
errmsg("could not close file \"%s\": %m", PGSS_TEXT_FILE)));
- *buffer_size = stat.st_size;
+ *buffer_size = nread;
return buf;
}