Commit 85f57069 authored by Poul-Henning Kamp's avatar Poul-Henning Kamp

Add a parameter to limit how big chunks we attempt to allocate

from storage.  Asking for more than 256M at a time will not
actually improve performance and is quite likely to hurt it.
parent 64008762
......@@ -118,6 +118,7 @@ struct params {
/* Fetcher hints */
unsigned fetch_chunksize;
unsigned fetch_maxchunksize;
#ifdef SENDFILE_WORKS
/* Sendfile object minimum size */
......
......@@ -629,6 +629,13 @@ static const struct parspec input_parspec[] = {
"above 128kb a dubious idea.",
EXPERIMENTAL,
"128", "kilobytes" },
{ "fetch_maxchunksize",
tweak_uint, &master.fetch_maxchunksize, 64, UINT_MAX / 1024.,
"The maximum chunksize we attempt to allocate from storage. "
"Making this too large may cause delays and storage "
"fragmentation.\n",
EXPERIMENTAL,
"262144", "kilobytes" },
#ifdef SENDFILE_WORKS
{ "sendfile_threshold",
tweak_uint, &master.sendfile_threshold, 0, UINT_MAX,
......
......@@ -171,6 +171,9 @@ stv_alloc(const struct sess *sp, size_t size)
}
CHECK_OBJ_NOTNULL(stv, STEVEDORE_MAGIC);
if (size > (size_t)(params->fetch_maxchunksize) << 10)
size = (size_t)(params->fetch_maxchunksize) << 10;
for (;;) {
/* try to allocate from it */
AN(stv->alloc);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment