Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge tag 'v1.26.0rc2' into social_login
Browse files Browse the repository at this point in the history
Synapse 1.26.0rc2 (2021-01-25)
==============================

Bugfixes
--------

- Fix receipts and account data not being sent down sync. Introduced in v1.26.0rc1. ([\#9193](#9193), [\#9195](#9195))
- Fix chain cover update to handle events with duplicate auth events. Introduced in v1.26.0rc1. ([\#9210](#9210))

Internal Changes
----------------

- Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration. ([\#9189](#9189))
- Bump minimum `psycopg2` version to v2.8. ([\#9204](#9204))
  • Loading branch information
richvdh committed Jan 25, 2021
2 parents 42a8e81 + 69961c7 commit 65fb3b2
Show file tree
Hide file tree
Showing 12 changed files with 113 additions and 13 deletions.
17 changes: 17 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,20 @@
Synapse 1.26.0rc2 (2021-01-25)
==============================

Bugfixes
--------

- Fix receipts and account data not being sent down sync. Introduced in v1.26.0rc1. ([\#9193](https://github.com/matrix-org/synapse/issues/9193), [\#9195](https://github.com/matrix-org/synapse/issues/9195))
- Fix chain cover update to handle events with duplicate auth events. Introduced in v1.26.0rc1. ([\#9210](https://github.com/matrix-org/synapse/issues/9210))


Internal Changes
----------------

- Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration. ([\#9189](https://github.com/matrix-org/synapse/issues/9189))
- Bump minimum `psycopg2` version to v2.8. ([\#9204](https://github.com/matrix-org/synapse/issues/9204))


Synapse 1.26.0rc1 (2021-01-20)
==============================

Expand Down
1 change: 0 additions & 1 deletion changelog.d/9189.misc

This file was deleted.

2 changes: 1 addition & 1 deletion synapse/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
except ImportError:
pass

__version__ = "1.26.0rc1"
__version__ = "1.26.0rc2"

if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
Expand Down
4 changes: 2 additions & 2 deletions synapse/python_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@

CONDITIONAL_REQUIREMENTS = {
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
# we use execute_batch, which arrived in psycopg 2.7.
"postgres": ["psycopg2>=2.7"],
# we use execute_values with the fetch param, which arrived in psycopg 2.8.
"postgres": ["psycopg2>=2.8"],
# ACME support is required to provision TLS certificates from authorities
# that use the protocol, such as Let's Encrypt.
"acme": [
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/databases/main/account_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self, database: DatabasePool, db_conn, hs):
# `StreamIdGenerator`, otherwise we use `SlavedIdTracker` which gets
# updated over replication. (Multiple writers are not supported for
# SQLite).
if hs.get_instance_name() in hs.config.worker.writers.events:
if hs.get_instance_name() in hs.config.worker.writers.account_data:
self._account_data_id_gen = StreamIdGenerator(
db_conn,
"room_account_data",
Expand Down
4 changes: 2 additions & 2 deletions synapse/storage/databases/main/receipts.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, database: DatabasePool, db_conn, hs):
self._receipts_id_gen = MultiWriterIdGenerator(
db_conn=db_conn,
db=database,
stream_name="account_data",
stream_name="receipts",
instance_name=self._instance_name,
tables=[("receipts_linearized", "instance_name", "stream_id")],
sequence_name="receipts_sequence",
Expand All @@ -61,7 +61,7 @@ def __init__(self, database: DatabasePool, db_conn, hs):
# `StreamIdGenerator`, otherwise we use `SlavedIdTracker` which gets
# updated over replication. (Multiple writers are not supported for
# SQLite).
if hs.get_instance_name() in hs.config.worker.writers.events:
if hs.get_instance_name() in hs.config.worker.writers.receipts:
self._receipts_id_gen = StreamIdGenerator(
db_conn, "receipts_linearized", "stream_id"
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
/* Copyright 2021 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

-- We incorrectly populated these, so we delete them and let the
-- MultiWriterIdGenerator repopulate it.
DELETE FROM stream_positions WHERE stream_name = 'receipts' OR stream_name = 'account_data';
6 changes: 5 additions & 1 deletion synapse/storage/util/id_generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,11 @@ def __init__(
# We check that the table and sequence haven't diverged.
for table, _, id_column in tables:
self._sequence_gen.check_consistency(
db_conn, table=table, id_column=id_column, positive=positive
db_conn,
table=table,
id_column=id_column,
stream_name=stream_name,
positive=positive,
)

# This goes and fills out the above state from the database.
Expand Down
56 changes: 53 additions & 3 deletions synapse/storage/util/sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,21 @@
See docs/postgres.md for more information.
"""

_INCONSISTENT_STREAM_ERROR = """
Postgres sequence '%(seq)s' is inconsistent with associated stream position
of '%(stream_name)s' in the 'stream_positions' table.
This is likely a programming error and should be reported at
https://github.com/matrix-org/synapse.
A temporary workaround to fix this error is to shut down Synapse (including
any and all workers) and run the following SQL:
DELETE FROM stream_positions WHERE stream_name = '%(stream_name)s';
This will need to be done every time the server is restarted.
"""


class SequenceGenerator(metaclass=abc.ABCMeta):
"""A class which generates a unique sequence of integers"""
Expand All @@ -60,14 +75,20 @@ def check_consistency(
db_conn: "LoggingDatabaseConnection",
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
"""Should be called during start up to test that the current value of
the sequence is greater than or equal to the maximum ID in the table.
This is to handle various cases where the sequence value can get out
of sync with the table, e.g. if Synapse gets rolled back to a previous
This is to handle various cases where the sequence value can get out of
sync with the table, e.g. if Synapse gets rolled back to a previous
version and the rolled forwards again.
If a stream name is given then this will check that any value in the
`stream_positions` table is less than or equal to the current sequence
value. If it isn't then it's likely that streams have been crossed
somewhere (e.g. two ID generators have the same stream name).
"""
...

Expand All @@ -93,8 +114,12 @@ def check_consistency(
db_conn: "LoggingDatabaseConnection",
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
"""See SequenceGenerator.check_consistency for docstring.
"""

txn = db_conn.cursor(txn_name="sequence.check_consistency")

# First we get the current max ID from the table.
Expand All @@ -118,6 +143,18 @@ def check_consistency(
"SELECT last_value, is_called FROM %(seq)s" % {"seq": self._sequence_name}
)
last_value, is_called = txn.fetchone()

# If we have an associated stream check the stream_positions table.
max_in_stream_positions = None
if stream_name:
txn.execute(
"SELECT MAX(stream_id) FROM stream_positions WHERE stream_name = ?",
(stream_name,),
)
row = txn.fetchone()
if row:
max_in_stream_positions = row[0]

txn.close()

# If `is_called` is False then `last_value` is actually the value that
Expand All @@ -138,6 +175,14 @@ def check_consistency(
% {"seq": self._sequence_name, "table": table, "max_id_sql": table_sql}
)

# If we have values in the stream positions table then they have to be
# less than or equal to `last_value`
if max_in_stream_positions and max_in_stream_positions > last_value:
raise IncorrectDatabaseSetup(
_INCONSISTENT_STREAM_ERROR
% {"seq": self._sequence_name, "stream_name": stream_name}
)


GetFirstCallbackType = Callable[[Cursor], int]

Expand Down Expand Up @@ -175,7 +220,12 @@ def get_next_id_txn(self, txn: Cursor) -> int:
return self._current_max_id

def check_consistency(
self, db_conn: Connection, table: str, id_column: str, positive: bool = True
self,
db_conn: Connection,
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
# There is nothing to do for in memory sequences
pass
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/iterutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def sorted_topologically(
if node not in degree_map:
continue

for edge in edges:
for edge in set(edges):
if edge in degree_map:
degree_map[node] += 1

Expand Down
12 changes: 12 additions & 0 deletions tests/util/test_itertools.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,3 +92,15 @@ def test_fork(self):
# Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should
# always get the same one.
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])

def test_duplicates(self):
"Test that a graph with duplicate edges work"
graph = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} # type: Dict[int, List[int]]

self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])

def test_multiple_paths(self):
"Test that a graph with multiple paths between two nodes work"
graph = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} # type: Dict[int, List[int]]

self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ commands =
# Make all greater-thans equals so we test the oldest version of our direct
# dependencies, but make the pyopenssl 17.0, which can work against an
# OpenSSL 1.1 compiled cryptography (as older ones don't compile on Travis).
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "s/psycopg2==2.6//" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "/psycopg2/d" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'

# Install Synapse itself. This won't update any libraries.
pip install -e ".[test]"
Expand Down

0 comments on commit 65fb3b2

Please sign in to comment.