Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Invalidate cache like #13796
Browse files Browse the repository at this point in the history
Copying what #13796
is doing
  • Loading branch information
MadLittleMods committed Sep 22, 2022
1 parent 1054f91 commit 2162ab5
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 33 deletions.
34 changes: 1 addition & 33 deletions synapse/storage/controllers/persist_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from twisted.internet import defer

from synapse.api.constants import EventTypes, Membership
from synapse.events import EventBase, relation_from_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
from synapse.logging.opentracing import (
Expand Down Expand Up @@ -431,22 +431,6 @@ async def enqueue(
else:
events.append(event)

# We expect events to be persisted by this point and this makes
# mypy happy about `stream_ordering` not being optional below
assert event.internal_metadata.stream_ordering is not None
# Invalidate related caches after we persist a new event
relation = relation_from_event(event)
self.main_store._invalidate_caches_for_event(
stream_ordering=event.internal_metadata.stream_ordering,
event_id=event.event_id,
room_id=event.room_id,
etype=event.type,
state_key=event.state_key if hasattr(event, "state_key") else None,
redacts=event.redacts,
relates_to=relation.parent_id if relation else None,
backfilled=backfilled,
)

return (
events,
self.main_store.get_room_max_token(),
Expand Down Expand Up @@ -479,22 +463,6 @@ async def persist_event(
replaced_event = replaced_events.get(event.event_id)
if replaced_event:
event = await self.main_store.get_event(replaced_event)
else:
# We expect events to be persisted by this point and this makes
# mypy happy about `stream_ordering` not being optional below
assert event.internal_metadata.stream_ordering is not None
# Invalidate related caches after we persist a new event
relation = relation_from_event(event)
self.main_store._invalidate_caches_for_event(
stream_ordering=event.internal_metadata.stream_ordering,
event_id=event.event_id,
room_id=event.room_id,
etype=event.type,
state_key=event.state_key if hasattr(event, "state_key") else None,
redacts=event.redacts,
relates_to=relation.parent_id if relation else None,
backfilled=backfilled,
)

event_stream_id = event.internal_metadata.stream_ordering
# stream ordering should have been assigned by now
Expand Down
25 changes: 25 additions & 0 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,31 @@ def _persist_events_txn(
assert min_stream_order
assert max_stream_order

# Once the txn completes, invalidate all of the relevant caches. Note that we do this
# up here because it captures all the events_and_contexts before any are removed.
for event, _ in events_and_contexts:
self.store.invalidate_get_event_cache_after_txn(txn, event.event_id)
if event.redacts:
self.store.invalidate_get_event_cache_after_txn(txn, event.redacts)

relates_to = None
relation = relation_from_event(event)
if relation:
relates_to = relation.parent_id

assert event.internal_metadata.stream_ordering is not None
txn.call_after(
self.store._invalidate_caches_for_event,
event.internal_metadata.stream_ordering,
event.event_id,
event.room_id,
event.type,
getattr(event, "state_key", None),
event.redacts,
relates_to,
backfilled=False,
)

self._update_forward_extremities_txn(
txn,
new_forward_extremities=new_forward_extremities,
Expand Down

0 comments on commit 2162ab5

Please sign in to comment.