Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge pull request #836 from matrix-org/erikj/change_event_cache
Browse files Browse the repository at this point in the history
Change the way we cache events
  • Loading branch information
erikjohnston committed Jun 3, 2016
2 parents 040a560 + cffe464 commit 3210f4c
Showing 1 changed file with 59 additions and 54 deletions.
113 changes: 59 additions & 54 deletions synapse/storage/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,9 @@ def _get_drainining_queue(self, room_id):
pass


_EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))


class EventsStore(SQLBaseStore):
EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"

Expand Down Expand Up @@ -738,53 +741,65 @@ def _get_events(self, event_ids, check_redacted=True,
event_id_list = event_ids
event_ids = set(event_ids)

event_map = self._get_events_from_cache(
event_entry_map = self._get_events_from_cache(
event_ids,
check_redacted=check_redacted,
get_prev_content=get_prev_content,
allow_rejected=allow_rejected,
)

missing_events_ids = [e for e in event_ids if e not in event_map]
missing_events_ids = [e for e in event_ids if e not in event_entry_map]

if missing_events_ids:
missing_events = yield self._enqueue_events(
missing_events_ids,
check_redacted=check_redacted,
get_prev_content=get_prev_content,
allow_rejected=allow_rejected,
)

event_map.update(missing_events)
event_entry_map.update(missing_events)

defer.returnValue([
event_map[e_id] for e_id in event_id_list
if e_id in event_map and event_map[e_id]
])
events = []
for event_id in event_id_list:
entry = event_entry_map.get(event_id, None)
if not entry:
continue

if allow_rejected or not entry.event.rejected_reason:
if check_redacted and entry.redacted_event:
event = entry.redacted_event
else:
event = entry.event

events.append(event)

if get_prev_content:
if "replaces_state" in event.unsigned:
prev = yield self.get_event(
event.unsigned["replaces_state"],
get_prev_content=False,
allow_none=True,
)
if prev:
event.unsigned = dict(event.unsigned)
event.unsigned["prev_content"] = prev.content
event.unsigned["prev_sender"] = prev.sender

defer.returnValue(events)

def _invalidate_get_event_cache(self, event_id):
for check_redacted in (False, True):
for get_prev_content in (False, True):
self._get_event_cache.invalidate(
(event_id, check_redacted, get_prev_content)
)
self._get_event_cache.invalidate((event_id,))

def _get_events_from_cache(self, events, check_redacted, get_prev_content,
allow_rejected):
def _get_events_from_cache(self, events, allow_rejected):
event_map = {}

for event_id in events:
try:
ret = self._get_event_cache.get(
(event_id, check_redacted, get_prev_content,)
)
ret = self._get_event_cache.get((event_id,), None)
if not ret:
continue

if allow_rejected or not ret.rejected_reason:
event_map[event_id] = ret
else:
event_map[event_id] = None
except KeyError:
pass
if allow_rejected or not ret.event.rejected_reason:
event_map[event_id] = ret
else:
event_map[event_id] = None

return event_map

Expand Down Expand Up @@ -855,8 +870,7 @@ def fire(evs):
reactor.callFromThread(fire, event_list)

@defer.inlineCallbacks
def _enqueue_events(self, events, check_redacted=True,
get_prev_content=False, allow_rejected=False):
def _enqueue_events(self, events, check_redacted=True, allow_rejected=False):
"""Fetches events from the database using the _event_fetch_list. This
allows batch and bulk fetching of events - it allows us to fetch events
without having to create a new transaction for each request for events.
Expand Down Expand Up @@ -894,8 +908,6 @@ def _enqueue_events(self, events, check_redacted=True,
[
preserve_fn(self._get_event_from_row)(
row["internal_metadata"], row["json"], row["redacts"],
check_redacted=check_redacted,
get_prev_content=get_prev_content,
rejected_reason=row["rejects"],
)
for row in rows
Expand All @@ -904,7 +916,7 @@ def _enqueue_events(self, events, check_redacted=True,
)

defer.returnValue({
e.event_id: e
e.event.event_id: e
for e in res if e
})

Expand Down Expand Up @@ -936,7 +948,6 @@ def _fetch_event_rows(self, txn, events):

@defer.inlineCallbacks
def _get_event_from_row(self, internal_metadata, js, redacted,
check_redacted=True, get_prev_content=False,
rejected_reason=None):
d = json.loads(js)
internal_metadata = json.loads(internal_metadata)
Expand All @@ -946,26 +957,27 @@ def _get_event_from_row(self, internal_metadata, js, redacted,
table="rejections",
keyvalues={"event_id": rejected_reason},
retcol="reason",
desc="_get_event_from_row",
desc="_get_event_from_row_rejected_reason",
)

ev = FrozenEvent(
original_ev = FrozenEvent(
d,
internal_metadata_dict=internal_metadata,
rejected_reason=rejected_reason,
)

if check_redacted and redacted:
ev = prune_event(ev)
redacted_event = None
if redacted:
redacted_event = prune_event(original_ev)

redaction_id = yield self._simple_select_one_onecol(
table="redactions",
keyvalues={"redacts": ev.event_id},
keyvalues={"redacts": redacted_event.event_id},
retcol="event_id",
desc="_get_event_from_row",
desc="_get_event_from_row_redactions",
)

ev.unsigned["redacted_by"] = redaction_id
redacted_event.unsigned["redacted_by"] = redaction_id
# Get the redaction event.

because = yield self.get_event(
Expand All @@ -977,23 +989,16 @@ def _get_event_from_row(self, internal_metadata, js, redacted,
if because:
# It's fine to do add the event directly, since get_pdu_json
# will serialise this field correctly
ev.unsigned["redacted_because"] = because
redacted_event.unsigned["redacted_because"] = because

if get_prev_content and "replaces_state" in ev.unsigned:
prev = yield self.get_event(
ev.unsigned["replaces_state"],
get_prev_content=False,
allow_none=True,
)
if prev:
ev.unsigned["prev_content"] = prev.content
ev.unsigned["prev_sender"] = prev.sender

self._get_event_cache.prefill(
(ev.event_id, check_redacted, get_prev_content), ev
cache_entry = _EventCacheEntry(
event=original_ev,
redacted_event=redacted_event,
)

defer.returnValue(ev)
self._get_event_cache.prefill((original_ev.event_id,), cache_entry)

defer.returnValue(cache_entry)

@defer.inlineCallbacks
def count_daily_messages(self):
Expand Down

0 comments on commit 3210f4c

Please sign in to comment.