Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge tag 'v0.33.3.1'
Browse files Browse the repository at this point in the history
Synapse 0.33.3.1 (2018-09-06)
=============================

SECURITY FIXES
--------------

- Fix an issue where event signatures were not always correctly validated ([\#3796](#3796))
- Fix an issue where server_acls could be circumvented for incoming events ([\#3796](#3796))

Internal Changes
----------------

- Unignore synctl in .dockerignore to fix docker builds ([\#3802](#3802))
  • Loading branch information
richvdh committed Sep 6, 2018
2 parents 74854a9 + 80189ed commit 2fd17b5
Show file tree
Hide file tree
Showing 9 changed files with 157 additions and 47 deletions.
1 change: 0 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,5 @@ Dockerfile
.gitignore
demo/etc
tox.ini
synctl
.git/*
.tox/*
14 changes: 14 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,17 @@
Synapse 0.33.3.1 (2018-09-06)
=============================

SECURITY FIXES
--------------

- Fix an issue where event signatures were not always correctly validated ([\#3796](https://github.com/matrix-org/synapse/issues/3796))
- Fix an issue where server_acls could be circumvented for incoming events ([\#3796](https://github.com/matrix-org/synapse/issues/3796))

Internal Changes
----------------

- Unignore synctl in .dockerignore to fix docker builds ([\#3802](https://github.com/matrix-org/synapse/issues/3802))

Synapse 0.33.3 (2018-08-22)
===========================

Expand Down
2 changes: 1 addition & 1 deletion synapse/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
""" This is a reference implementation of a Matrix home server.
"""

__version__ = "0.33.3"
__version__ = "0.33.3.1"
126 changes: 110 additions & 16 deletions synapse/federation/federation_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import namedtuple

import six

from twisted.internet import defer
from twisted.internet.defer import DeferredList

from synapse.api.constants import MAX_DEPTH
from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
from synapse.api.errors import Codes, SynapseError
from synapse.crypto.event_signing import check_event_content_hash
from synapse.events import FrozenEvent
from synapse.events.utils import prune_event
from synapse.http.servlet import assert_params_in_dict
from synapse.types import get_domain_from_id
from synapse.util import logcontext, unwrapFirstError

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -133,34 +136,25 @@ def _check_sigs_and_hashes(self, pdus):
* throws a SynapseError if the signature check failed.
The deferreds run their callbacks in the sentinel logcontext.
"""

redacted_pdus = [
prune_event(pdu)
for pdu in pdus
]

deferreds = self.keyring.verify_json_objects_for_server([
(p.origin, p.get_pdu_json())
for p in redacted_pdus
])
deferreds = _check_sigs_on_pdus(self.keyring, pdus)

ctx = logcontext.LoggingContext.current_context()

def callback(_, pdu, redacted):
def callback(_, pdu):
with logcontext.PreserveLoggingContext(ctx):
if not check_event_content_hash(pdu):
logger.warn(
"Event content has been tampered, redacting %s: %s",
pdu.event_id, pdu.get_pdu_json()
)
return redacted
return prune_event(pdu)

if self.spam_checker.check_event_for_spam(pdu):
logger.warn(
"Event contains spam, redacting %s: %s",
pdu.event_id, pdu.get_pdu_json()
)
return redacted
return prune_event(pdu)

return pdu

Expand All @@ -173,16 +167,116 @@ def errback(failure, pdu):
)
return failure

for deferred, pdu, redacted in zip(deferreds, pdus, redacted_pdus):
for deferred, pdu in zip(deferreds, pdus):
deferred.addCallbacks(
callback, errback,
callbackArgs=[pdu, redacted],
callbackArgs=[pdu],
errbackArgs=[pdu],
)

return deferreds


class PduToCheckSig(namedtuple("PduToCheckSig", [
"pdu", "redacted_pdu_json", "event_id_domain", "sender_domain", "deferreds",
])):
pass


def _check_sigs_on_pdus(keyring, pdus):
"""Check that the given events are correctly signed
Args:
keyring (synapse.crypto.Keyring): keyring object to do the checks
pdus (Collection[EventBase]): the events to be checked
Returns:
List[Deferred]: a Deferred for each event in pdus, which will either succeed if
the signatures are valid, or fail (with a SynapseError) if not.
"""

# (currently this is written assuming the v1 room structure; we'll probably want a
# separate function for checking v2 rooms)

# we want to check that the event is signed by:
#
# (a) the server which created the event_id
#
# (b) the sender's server.
#
# - except in the case of invites created from a 3pid invite, which are exempt
# from this check, because the sender has to match that of the original 3pid
# invite, but the event may come from a different HS, for reasons that I don't
# entirely grok (why do the senders have to match? and if they do, why doesn't the
# joining server ask the inviting server to do the switcheroo with
# exchange_third_party_invite?).
#
# That's pretty awful, since redacting such an invite will render it invalid
# (because it will then look like a regular invite without a valid signature),
# and signatures are *supposed* to be valid whether or not an event has been
# redacted. But this isn't the worst of the ways that 3pid invites are broken.
#
# let's start by getting the domain for each pdu, and flattening the event back
# to JSON.
pdus_to_check = [
PduToCheckSig(
pdu=p,
redacted_pdu_json=prune_event(p).get_pdu_json(),
event_id_domain=get_domain_from_id(p.event_id),
sender_domain=get_domain_from_id(p.sender),
deferreds=[],
)
for p in pdus
]

# first make sure that the event is signed by the event_id's domain
deferreds = keyring.verify_json_objects_for_server([
(p.event_id_domain, p.redacted_pdu_json)
for p in pdus_to_check
])

for p, d in zip(pdus_to_check, deferreds):
p.deferreds.append(d)

# now let's look for events where the sender's domain is different to the
# event id's domain (normally only the case for joins/leaves), and add additional
# checks.
pdus_to_check_sender = [
p for p in pdus_to_check
if p.sender_domain != p.event_id_domain and not _is_invite_via_3pid(p.pdu)
]

more_deferreds = keyring.verify_json_objects_for_server([
(p.sender_domain, p.redacted_pdu_json)
for p in pdus_to_check_sender
])

for p, d in zip(pdus_to_check_sender, more_deferreds):
p.deferreds.append(d)

# replace lists of deferreds with single Deferreds
return [_flatten_deferred_list(p.deferreds) for p in pdus_to_check]


def _flatten_deferred_list(deferreds):
"""Given a list of one or more deferreds, either return the single deferred, or
combine into a DeferredList.
"""
if len(deferreds) > 1:
return DeferredList(deferreds, fireOnOneErrback=True, consumeErrors=True)
else:
assert len(deferreds) == 1
return deferreds[0]


def _is_invite_via_3pid(event):
return (
event.type == EventTypes.Member
and event.membership == Membership.INVITE
and "third_party_invite" in event.content
)


def event_from_pdu_json(pdu_json, outlier=False):
"""Construct a FrozenEvent from an event json received over federation
Expand Down
20 changes: 10 additions & 10 deletions synapse/federation/federation_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def on_backfill_request(self, origin, room_id, versions, limit):

@defer.inlineCallbacks
@log_function
def on_incoming_transaction(self, transaction_data):
def on_incoming_transaction(self, origin, transaction_data):
# keep this as early as possible to make the calculated origin ts as
# accurate as possible.
request_time = self._clock.time_msec()
Expand All @@ -108,34 +108,33 @@ def on_incoming_transaction(self, transaction_data):

if not transaction.transaction_id:
raise Exception("Transaction missing transaction_id")
if not transaction.origin:
raise Exception("Transaction missing origin")

logger.debug("[%s] Got transaction", transaction.transaction_id)

# use a linearizer to ensure that we don't process the same transaction
# multiple times in parallel.
with (yield self._transaction_linearizer.queue(
(transaction.origin, transaction.transaction_id),
(origin, transaction.transaction_id),
)):
result = yield self._handle_incoming_transaction(
transaction, request_time,
origin, transaction, request_time,
)

defer.returnValue(result)

@defer.inlineCallbacks
def _handle_incoming_transaction(self, transaction, request_time):
def _handle_incoming_transaction(self, origin, transaction, request_time):
""" Process an incoming transaction and return the HTTP response
Args:
origin (unicode): the server making the request
transaction (Transaction): incoming transaction
request_time (int): timestamp that the HTTP request arrived at
Returns:
Deferred[(int, object)]: http response code and body
"""
response = yield self.transaction_actions.have_responded(transaction)
response = yield self.transaction_actions.have_responded(origin, transaction)

if response:
logger.debug(
Expand All @@ -149,7 +148,7 @@ def _handle_incoming_transaction(self, transaction, request_time):

received_pdus_counter.inc(len(transaction.pdus))

origin_host, _ = parse_server_name(transaction.origin)
origin_host, _ = parse_server_name(origin)

pdus_by_room = {}

Expand Down Expand Up @@ -190,7 +189,7 @@ def process_pdus_for_room(room_id):
event_id = pdu.event_id
try:
yield self._handle_received_pdu(
transaction.origin, pdu
origin, pdu
)
pdu_results[event_id] = {}
except FederationError as e:
Expand All @@ -212,7 +211,7 @@ def process_pdus_for_room(room_id):
if hasattr(transaction, "edus"):
for edu in (Edu(**x) for x in transaction.edus):
yield self.received_edu(
transaction.origin,
origin,
edu.edu_type,
edu.content
)
Expand All @@ -224,6 +223,7 @@ def process_pdus_for_room(room_id):
logger.debug("Returning: %s", str(response))

yield self.transaction_actions.set_response(
origin,
transaction,
200, response
)
Expand Down
8 changes: 4 additions & 4 deletions synapse/federation/persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __init__(self, datastore):
self.store = datastore

@log_function
def have_responded(self, transaction):
def have_responded(self, origin, transaction):
""" Have we already responded to a transaction with the same id and
origin?
Expand All @@ -50,11 +50,11 @@ def have_responded(self, transaction):
"transaction_id")

return self.store.get_received_txn_response(
transaction.transaction_id, transaction.origin
transaction.transaction_id, origin
)

@log_function
def set_response(self, transaction, code, response):
def set_response(self, origin, transaction, code, response):
""" Persist how we responded to a transaction.
Returns:
Expand All @@ -66,7 +66,7 @@ def set_response(self, transaction, code, response):

return self.store.set_received_txn_response(
transaction.transaction_id,
transaction.origin,
origin,
code,
response,
)
Expand Down
2 changes: 1 addition & 1 deletion synapse/federation/transport/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def on_PUT(self, origin, content, query, transaction_id):

try:
code, response = yield self.handler.on_incoming_transaction(
transaction_data
origin, transaction_data,
)
except Exception:
logger.exception("on_incoming_transaction failed")
Expand Down
19 changes: 8 additions & 11 deletions tests/handlers/test_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
)


def _expect_edu(destination, edu_type, content, origin="test"):
def _expect_edu_transaction(edu_type, content, origin="test"):
return {
"origin": origin,
"origin_server_ts": 1000000,
Expand All @@ -42,8 +42,8 @@ def _expect_edu(destination, edu_type, content, origin="test"):
}


def _make_edu_json(origin, edu_type, content):
return json.dumps(_expect_edu("test", edu_type, content, origin=origin)).encode(
def _make_edu_transaction_json(edu_type, content):
return json.dumps(_expect_edu_transaction(edu_type, content)).encode(
'utf8'
)

Expand Down Expand Up @@ -190,8 +190,7 @@ def test_started_typing_remote_send(self):
call(
"farm",
path="/_matrix/federation/v1/send/1000000/",
data=_expect_edu(
"farm",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": self.room_id,
Expand Down Expand Up @@ -221,19 +220,18 @@ def test_started_typing_remote_recv(self):

self.assertEquals(self.event_source.get_current_key(), 0)

yield self.mock_federation_resource.trigger(
(code, response) = yield self.mock_federation_resource.trigger(
"PUT",
"/_matrix/federation/v1/send/1000000/",
_make_edu_json(
"farm",
_make_edu_transaction_json(
"m.typing",
content={
"room_id": self.room_id,
"user_id": self.u_onion.to_string(),
"typing": True,
},
),
federation_auth=True,
federation_auth_origin=b'farm',
)

self.on_new_event.assert_has_calls(
Expand Down Expand Up @@ -264,8 +262,7 @@ def test_stopped_typing(self):
call(
"farm",
path="/_matrix/federation/v1/send/1000000/",
data=_expect_edu(
"farm",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": self.room_id,
Expand Down
Loading

0 comments on commit 2fd17b5

Please sign in to comment.