0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-05 22:28:54 +01:00

Merge pull request #2330 from matrix-org/erikj/cache_size_factor

Increase default cache size
This commit is contained in:
Erik Johnston 2017-07-04 10:51:21 +01:00 committed by GitHub
commit f92d7416d7
8 changed files with 12 additions and 30 deletions

View file

@ -359,7 +359,7 @@ https://www.archlinux.org/packages/community/any/matrix-synapse/, which should p
the necessary dependencies. If the default web client is to be served (enabled by default in
the generated config),
https://www.archlinux.org/packages/community/any/python2-matrix-angular-sdk/ will also need to
be installed.
be installed.
Alternatively, to install using pip a few changes may be needed as ArchLinux
defaults to python 3, but synapse currently assumes python 2.7 by default:
@ -899,12 +899,9 @@ cache a lot of recent room data and metadata in RAM in order to speed up
common requests. We'll improve this in future, but for now the easiest
way to either reduce the RAM usage (at the risk of slowing things down)
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
variable. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1.0 will max out
at around 3-4GB of resident memory - this is what we currently run the
matrix.org on. The default setting is currently 0.1, which is probably
around a ~700MB footprint. You can dial it down further to 0.02 if
desired, which targets roughly ~512MB. Conversely you can dial it up if
you need performance for lots of users and have a box with a lot of RAM.
variable. The default is 0.5, which can be decreased to reduce RAM usage
in memory constrained enviroments, or increased if performance starts to
degrade.
.. _`key_management`: https://matrix.org/docs/spec/server_server/unstable.html#retrieving-server-keys

View file

@ -24,13 +24,13 @@ from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError
from synapse.events.snapshot import EventContext
from synapse.util.async import Linearizer
from synapse.util.caches import CACHE_SIZE_FACTOR
from collections import namedtuple
from frozendict import frozendict
import logging
import hashlib
import os
logger = logging.getLogger(__name__)
@ -38,9 +38,6 @@ logger = logging.getLogger(__name__)
KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
SIZE_OF_CACHE = int(100000 * CACHE_SIZE_FACTOR)
EVICTION_TIMEOUT_SECONDS = 60 * 60

View file

@ -16,6 +16,7 @@ import logging
from synapse.api.errors import StoreError
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.caches import CACHE_SIZE_FACTOR
from synapse.util.caches.dictionary_cache import DictionaryCache
from synapse.util.caches.descriptors import Cache
from synapse.storage.engines import PostgresEngine
@ -27,10 +28,6 @@ from twisted.internet import defer
import sys
import time
import threading
import os
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
logger = logging.getLogger(__name__)

View file

@ -20,7 +20,8 @@ from twisted.internet import defer, reactor
from ._base import Cache
from . import background_updates
import os
from synapse.util.caches import CACHE_SIZE_FACTOR
logger = logging.getLogger(__name__)
@ -30,9 +31,6 @@ logger = logging.getLogger(__name__)
LAST_SEEN_GRANULARITY = 120 * 1000
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
class ClientIpStore(background_updates.BackgroundUpdateStore):
def __init__(self, hs):
self.client_ip_last_seen = Cache(

View file

@ -16,7 +16,7 @@
import synapse.metrics
import os
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5))
metrics = synapse.metrics.get_metrics_for("synapse.util.caches")

View file

@ -16,6 +16,7 @@ import logging
from synapse.util.async import ObservableDeferred
from synapse.util import unwrapFirstError, logcontext
from synapse.util.caches import CACHE_SIZE_FACTOR
from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
from synapse.util.stringutils import to_ascii
@ -25,7 +26,6 @@ from . import register_cache
from twisted.internet import defer
from collections import namedtuple
import os
import functools
import inspect
import threading
@ -37,9 +37,6 @@ logger = logging.getLogger(__name__)
_CacheSentinel = object()
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
class CacheEntry(object):
__slots__ = [
"deferred", "sequence", "callbacks", "invalidated"

View file

@ -13,20 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.util.caches import register_cache
from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR
from blist import sorteddict
import logging
import os
logger = logging.getLogger(__name__)
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
class StreamChangeCache(object):
"""Keeps track of the stream positions of the latest change in a set of entities.

View file

@ -241,7 +241,7 @@ class CacheDecoratorTestCase(unittest.TestCase):
callcount2 = [0]
class A(object):
@cached(max_entries=20) # HACK: This makes it 2 due to cache factor
@cached(max_entries=4) # HACK: This makes it 2 due to cache factor
def func(self, key):
callcount[0] += 1
return key