Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Add environment variable SYNAPSE_CACHE_FACTOR, default it to 0.1 #612

Merged
merged 5 commits into from
Mar 2, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -565,4 +565,21 @@ sphinxcontrib-napoleon::
Building internal API documentation::

python setup.py build_sphinx




Halp!! Synapse eats all my RAM!
===============================

Synapse's architecture is quite RAM hungry currently - we deliberately
cache a lot of recent room data and metadata in RAM in order to speed up
common requests. We'll improve this in future, but for now the easiest
way to either reduce the RAM usage (at the risk of slowing things down)
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
variable. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1.0 will max out
at around 3-4GB of resident memory - this is what we currently run the
matrix.org on. The default setting is currently 0.1, which is probably
around a ~700MB footprint. You can dial it down further to 0.02 if
desired, which targets roughly ~512MB. Conversely you can dial it up if
you need performance for lots of users and have a box with a lot of RAM.

1 change: 1 addition & 0 deletions jenkins.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
: ${WORKSPACE:="$(pwd)"}

export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1

# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
Expand Down
4 changes: 4 additions & 0 deletions synapse/handlers/presence.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,10 @@ def __init__(self, hs):
for state in active_presence
}

metrics.register_callback(
"user_to_current_state_size", lambda: len(self.user_to_current_state)
)

now = self.clock.time_msec()
for state in active_presence:
self.wheel_timer.insert(
Expand Down
2 changes: 0 additions & 2 deletions synapse/handlers/receipts.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ def __init__(self, hs):
)
self.clock = self.hs.get_clock()

self._receipt_cache = None

@defer.inlineCallbacks
def received_client_receipt(self, room_id, receipt_type, user_id,
event_id):
Expand Down
6 changes: 6 additions & 0 deletions synapse/util/caches/descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

from collections import OrderedDict

import os
import functools
import inspect
import threading
Expand All @@ -38,6 +39,9 @@
_CacheSentinel = object()


CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))


class Cache(object):

def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False):
Expand Down Expand Up @@ -140,6 +144,8 @@ class CacheDescriptor(object):
"""
def __init__(self, orig, max_entries=1000, num_args=1, lru=True, tree=False,
inlineCallbacks=False):
max_entries = int(max_entries * CACHE_SIZE_FACTOR)

self.orig = orig

if inlineCallbacks:
Expand Down
6 changes: 5 additions & 1 deletion synapse/util/caches/stream_change_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,15 @@

from blist import sorteddict
import logging
import os


logger = logging.getLogger(__name__)


CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))


class StreamChangeCache(object):
"""Keeps track of the stream positions of the latest change in a set of entities.

Expand All @@ -33,7 +37,7 @@ class StreamChangeCache(object):
old then the cache will simply return all given entities.
"""
def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}):
self._max_size = max_size
self._max_size = int(max_size * CACHE_SIZE_FACTOR)
self._entity_to_key = {}
self._cache = sorteddict()
self._earliest_known_stream_pos = current_stream_pos
Expand Down
12 changes: 7 additions & 5 deletions tests/storage/test_appservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
def setUp(self):
self.as_yaml_files = []
config = Mock(
app_service_config_files=self.as_yaml_files
app_service_config_files=self.as_yaml_files,
event_cache_size=1,
)
hs = yield setup_test_homeserver(config=config)

Expand Down Expand Up @@ -109,7 +110,8 @@ def setUp(self):
self.as_yaml_files = []

config = Mock(
app_service_config_files=self.as_yaml_files
app_service_config_files=self.as_yaml_files,
event_cache_size=1,
)
hs = yield setup_test_homeserver(config=config)
self.db_pool = hs.get_db_pool()
Expand Down Expand Up @@ -438,7 +440,7 @@ def test_unique_works(self):
f1 = self._write_config(suffix="1")
f2 = self._write_config(suffix="2")

config = Mock(app_service_config_files=[f1, f2])
config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock())

ApplicationServiceStore(hs)
Expand All @@ -448,7 +450,7 @@ def test_duplicate_ids(self):
f1 = self._write_config(id="id", suffix="1")
f2 = self._write_config(id="id", suffix="2")

config = Mock(app_service_config_files=[f1, f2])
config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock())

with self.assertRaises(ConfigError) as cm:
Expand All @@ -464,7 +466,7 @@ def test_duplicate_as_tokens(self):
f1 = self._write_config(as_token="as_token", suffix="1")
f2 = self._write_config(as_token="as_token", suffix="2")

config = Mock(app_service_config_files=[f1, f2])
config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock())

with self.assertRaises(ConfigError) as cm:
Expand Down