Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Fix flake8 #4519

Merged
merged 2 commits into from
Jan 30, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/4519.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix code to comply with linting in PyFlakes 3.7.1.
4 changes: 2 additions & 2 deletions synapse/_scripts/register_new_matrix_user.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def request_registration(
# Get the nonce
r = requests.get(url, verify=False)

if r.status_code is not 200:
if r.status_code != 200:
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
if 400 <= r.status_code < 500:
try:
Expand Down Expand Up @@ -84,7 +84,7 @@ def request_registration(
_print("Sending registration request...")
r = requests.post(url, json=data, verify=False)

if r.status_code is not 200:
if r.status_code != 200:
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
if 400 <= r.status_code < 500:
try:
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@ def _create_association(self, room_alias, room_id, servers=None, creator=None):
# general association creation for both human users and app services

for wchar in string.whitespace:
if wchar in room_alias.localpart:
raise SynapseError(400, "Invalid characters in room alias")
if wchar in room_alias.localpart:
raise SynapseError(400, "Invalid characters in room alias")

if not self.hs.is_mine(room_alias):
raise SynapseError(400, "Room alias must be local")
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def __init__(self, hs):

self.hs = hs

self.store = hs.get_datastore() # type: synapse.storage.DataStore
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SADFACE

self.store = hs.get_datastore()
self.federation_client = hs.get_federation_client()
self.state_handler = hs.get_state_handler()
self.server_name = hs.hostname
Expand Down
2 changes: 1 addition & 1 deletion synapse/push/clientformat.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def _rule_to_template(rule):
templaterule["pattern"] = thecond["pattern"]

if unscoped_rule_id:
templaterule['rule_id'] = unscoped_rule_id
templaterule['rule_id'] = unscoped_rule_id
if 'default' in rule:
templaterule['default'] = rule['default']
return templaterule
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def _count_r30_users(txn):
thirty_days_ago_in_secs))

for row in txn:
if row[0] is 'unknown':
if row[0] == 'unknown':
pass
results[row[0]] = row[1]

Expand Down
168 changes: 84 additions & 84 deletions synapse/storage/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -904,106 +904,106 @@ def _persist_events_txn(self, txn, events_and_contexts, backfilled,

def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
for room_id, current_state_tuple in iteritems(state_delta_by_room):
to_delete, to_insert = current_state_tuple

# First we add entries to the current_state_delta_stream. We
# do this before updating the current_state_events table so
# that we can use it to calculate the `prev_event_id`. (This
# allows us to not have to pull out the existing state
# unnecessarily).
sql = """
INSERT INTO current_state_delta_stream
(stream_id, room_id, type, state_key, event_id, prev_event_id)
SELECT ?, ?, ?, ?, ?, (
SELECT event_id FROM current_state_events
WHERE room_id = ? AND type = ? AND state_key = ?
)
"""
txn.executemany(sql, (
(
max_stream_order, room_id, etype, state_key, None,
room_id, etype, state_key,
)
for etype, state_key in to_delete
# We sanity check that we're deleting rather than updating
if (etype, state_key) not in to_insert
))
txn.executemany(sql, (
(
max_stream_order, room_id, etype, state_key, ev_id,
room_id, etype, state_key,
)
for (etype, state_key), ev_id in iteritems(to_insert)
))
to_delete, to_insert = current_state_tuple

# Now we actually update the current_state_events table

txn.executemany(
"DELETE FROM current_state_events"
" WHERE room_id = ? AND type = ? AND state_key = ?",
(
(room_id, etype, state_key)
for etype, state_key in itertools.chain(to_delete, to_insert)
),
# First we add entries to the current_state_delta_stream. We
# do this before updating the current_state_events table so
# that we can use it to calculate the `prev_event_id`. (This
# allows us to not have to pull out the existing state
# unnecessarily).
sql = """
INSERT INTO current_state_delta_stream
(stream_id, room_id, type, state_key, event_id, prev_event_id)
SELECT ?, ?, ?, ?, ?, (
SELECT event_id FROM current_state_events
WHERE room_id = ? AND type = ? AND state_key = ?
)

self._simple_insert_many_txn(
txn,
table="current_state_events",
values=[
{
"event_id": ev_id,
"room_id": room_id,
"type": key[0],
"state_key": key[1],
}
for key, ev_id in iteritems(to_insert)
],
"""
txn.executemany(sql, (
(
max_stream_order, room_id, etype, state_key, None,
room_id, etype, state_key,
)

txn.call_after(
self._curr_state_delta_stream_cache.entity_has_changed,
room_id, max_stream_order,
for etype, state_key in to_delete
# We sanity check that we're deleting rather than updating
if (etype, state_key) not in to_insert
))
txn.executemany(sql, (
(
max_stream_order, room_id, etype, state_key, ev_id,
room_id, etype, state_key,
)
for (etype, state_key), ev_id in iteritems(to_insert)
))

# Invalidate the various caches

# Figure out the changes of membership to invalidate the
# `get_rooms_for_user` cache.
# We find out which membership events we may have deleted
# and which we have added, then we invlidate the caches for all
# those users.
members_changed = set(
state_key
for ev_type, state_key in itertools.chain(to_delete, to_insert)
if ev_type == EventTypes.Member
)
# Now we actually update the current_state_events table

for member in members_changed:
self._invalidate_cache_and_stream(
txn, self.get_rooms_for_user_with_stream_ordering, (member,)
)
txn.executemany(
"DELETE FROM current_state_events"
" WHERE room_id = ? AND type = ? AND state_key = ?",
(
(room_id, etype, state_key)
for etype, state_key in itertools.chain(to_delete, to_insert)
),
)

for host in set(get_domain_from_id(u) for u in members_changed):
self._invalidate_cache_and_stream(
txn, self.is_host_joined, (room_id, host)
)
self._invalidate_cache_and_stream(
txn, self.was_host_joined, (room_id, host)
)
self._simple_insert_many_txn(
txn,
table="current_state_events",
values=[
{
"event_id": ev_id,
"room_id": room_id,
"type": key[0],
"state_key": key[1],
}
for key, ev_id in iteritems(to_insert)
],
)

txn.call_after(
self._curr_state_delta_stream_cache.entity_has_changed,
room_id, max_stream_order,
)

# Invalidate the various caches

# Figure out the changes of membership to invalidate the
# `get_rooms_for_user` cache.
# We find out which membership events we may have deleted
# and which we have added, then we invlidate the caches for all
# those users.
members_changed = set(
state_key
for ev_type, state_key in itertools.chain(to_delete, to_insert)
if ev_type == EventTypes.Member
)

for member in members_changed:
self._invalidate_cache_and_stream(
txn, self.get_users_in_room, (room_id,)
txn, self.get_rooms_for_user_with_stream_ordering, (member,)
)

for host in set(get_domain_from_id(u) for u in members_changed):
self._invalidate_cache_and_stream(
txn, self.get_room_summary, (room_id,)
txn, self.is_host_joined, (room_id, host)
)

self._invalidate_cache_and_stream(
txn, self.get_current_state_ids, (room_id,)
txn, self.was_host_joined, (room_id, host)
)

self._invalidate_cache_and_stream(
txn, self.get_users_in_room, (room_id,)
)

self._invalidate_cache_and_stream(
txn, self.get_room_summary, (room_id,)
)

self._invalidate_cache_and_stream(
txn, self.get_current_state_ids, (room_id,)
)

def _update_forward_extremities_txn(self, txn, new_forward_extremities,
max_stream_order):
for room_id, new_extrem in iteritems(new_forward_extremities):
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/events_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def _get_events(self, event_ids, check_redacted=True,
defer.returnValue(events)

def _invalidate_get_event_cache(self, event_id):
self._get_event_cache.invalidate((event_id,))
self._get_event_cache.invalidate((event_id,))

def _get_events_from_cache(self, events, allow_rejected, update_metrics=True):
"""Fetch events from the caches
Expand Down
2 changes: 1 addition & 1 deletion tests/storage/test_background_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class BackgroundUpdateTestCase(unittest.TestCase):
def setUp(self):
hs = yield setup_test_homeserver(
self.addCleanup
) # type: synapse.server.HomeServer
)
self.store = hs.get_datastore()
self.clock = hs.get_clock()

Expand Down
3 changes: 0 additions & 3 deletions tests/storage/test_end_to_end_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@


class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
def __init__(self, *args, **kwargs):
super(EndToEndKeyStoreTestCase, self).__init__(*args, **kwargs)
self.store = None # type: synapse.storage.DataStore

@defer.inlineCallbacks
def setUp(self):
Expand Down
3 changes: 0 additions & 3 deletions tests/storage/test_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@


class KeyStoreTestCase(tests.unittest.TestCase):
def __init__(self, *args, **kwargs):
super(KeyStoreTestCase, self).__init__(*args, **kwargs)
self.store = None # type: synapse.storage.keys.KeyStore

@defer.inlineCallbacks
def setUp(self):
Expand Down
3 changes: 0 additions & 3 deletions tests/storage/test_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@


class StateStoreTestCase(tests.unittest.TestCase):
def __init__(self, *args, **kwargs):
super(StateStoreTestCase, self).__init__(*args, **kwargs)
self.store = None # type: synapse.storage.DataStore

@defer.inlineCallbacks
def setUp(self):
Expand Down