Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Port handlers/ to Python 3 (#3803)
Browse files Browse the repository at this point in the history
  • Loading branch information
hawkowl authored Sep 6, 2018
1 parent 4f8baab commit 2608ebc
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 19 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ media_store/
build/
venv/
venv*/
*venv/

localhost-800*/
static/client/register/register_config.js
Expand Down
1 change: 1 addition & 0 deletions changelog.d/3803.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
handlers/ is now ported to Python 3.
8 changes: 5 additions & 3 deletions synapse/handlers/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -895,22 +895,24 @@ def validate_hash(self, password, stored_hash):
Args:
password (unicode): Password to hash.
stored_hash (unicode): Expected hash value.
stored_hash (bytes): Expected hash value.
Returns:
Deferred(bool): Whether self.hash(password) == stored_hash.
"""

def _do_validate_hash():
# Normalise the Unicode in the password
pw = unicodedata.normalize("NFKC", password)

return bcrypt.checkpw(
pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"),
stored_hash.encode('utf8')
stored_hash
)

if stored_hash:
if not isinstance(stored_hash, bytes):
stored_hash = stored_hash.encode('ascii')

return make_deferred_yieldable(
threads.deferToThreadPool(
self.hs.get_reactor(),
Expand Down
5 changes: 3 additions & 2 deletions synapse/handlers/e2e_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,8 @@ def _upload_one_time_keys_for_user(self, user_id, device_id, time_now,
(algorithm, key_id, ex_json, key)
)
else:
new_keys.append((algorithm, key_id, encode_canonical_json(key)))
new_keys.append((
algorithm, key_id, encode_canonical_json(key).decode('ascii')))

yield self.store.add_e2e_one_time_keys(
user_id, device_id, time_now, new_keys
Expand Down Expand Up @@ -358,7 +359,7 @@ def _exception_to_failure(e):
# Note that some Exceptions (notably twisted's ResponseFailed etc) don't
# give a string for e.message, which json then fails to serialize.
return {
"status": 503, "message": str(e.message),
"status": 503, "message": str(e),
}


Expand Down
6 changes: 3 additions & 3 deletions synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,7 @@ def backfill(self, dest, room_id, limit, extremities):

required_auth = set(
a_id
for event in events + state_events.values() + auth_events.values()
for event in events + list(state_events.values()) + list(auth_events.values())
for a_id, _ in event.auth_events
)
auth_events.update({
Expand Down Expand Up @@ -802,7 +802,7 @@ def try_backfill(domains):
)
continue
except NotRetryingDestination as e:
logger.info(e.message)
logger.info(str(e))
continue
except FederationDeniedError as e:
logger.info(e)
Expand Down Expand Up @@ -1358,7 +1358,7 @@ def get_state_ids_for_pdu(self, room_id, event_id):
)

if state_groups:
_, state = state_groups.items().pop()
_, state = list(state_groups.items()).pop()
results = state

if event.is_state():
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/room_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def get_order_for_room(room_id):
# Filter out rooms that we don't want to return
rooms_to_scan = [
r for r in sorted_rooms
if r not in newly_unpublished and rooms_to_num_joined[room_id] > 0
if r not in newly_unpublished and rooms_to_num_joined[r] > 0
]

total_room_count = len(rooms_to_scan)
Expand Down
14 changes: 7 additions & 7 deletions synapse/handlers/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def search(self, user, content, batch=None):
batch_token = None
if batch:
try:
b = decode_base64(batch)
b = decode_base64(batch).decode('ascii')
batch_group, batch_group_key, batch_token = b.split("\n")

assert batch_group is not None
Expand Down Expand Up @@ -258,18 +258,18 @@ def search(self, user, content, batch=None):
# it returns more from the same group (if applicable) rather
# than reverting to searching all results again.
if batch_group and batch_group_key:
global_next_batch = encode_base64("%s\n%s\n%s" % (
global_next_batch = encode_base64(("%s\n%s\n%s" % (
batch_group, batch_group_key, pagination_token
))
)).encode('ascii'))
else:
global_next_batch = encode_base64("%s\n%s\n%s" % (
global_next_batch = encode_base64(("%s\n%s\n%s" % (
"all", "", pagination_token
))
)).encode('ascii'))

for room_id, group in room_groups.items():
group["next_batch"] = encode_base64("%s\n%s\n%s" % (
group["next_batch"] = encode_base64(("%s\n%s\n%s" % (
"room_id", room_id, pagination_token
))
)).encode('ascii'))

allowed_events.extend(room_events)

Expand Down
6 changes: 3 additions & 3 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ def compute_summary(self, room_id, sync_config, batch, state, now_token):

member_ids = {
state_key: event_id
for (t, state_key), event_id in state_ids.iteritems()
for (t, state_key), event_id in iteritems(state_ids)
if t == EventTypes.Member
}
name_id = state_ids.get((EventTypes.Name, ''))
Expand Down Expand Up @@ -774,7 +774,7 @@ def compute_state_delta(self, room_id, batch, sync_config, since_token, now_toke
logger.debug("filtering state from %r...", state_ids)
state_ids = {
t: event_id
for t, event_id in state_ids.iteritems()
for t, event_id in iteritems(state_ids)
if cache.get(t[1]) != event_id
}
logger.debug("...to %r", state_ids)
Expand Down Expand Up @@ -1753,7 +1753,7 @@ def _calculate_state(

if lazy_load_members:
p_ids.difference_update(
e for t, e in timeline_start.iteritems()
e for t, e in iteritems(timeline_start)
if t[0] == EventTypes.Member
)

Expand Down

0 comments on commit 2608ebc

Please sign in to comment.