Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

s/private_user_data/account_data/ #386

Merged
merged 2 commits into from
Nov 19, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions synapse/api/filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _check_valid_filter(self, user_filter_json):
]

room_level_definitions = [
"state", "timeline", "ephemeral", "private_user_data"
"state", "timeline", "ephemeral", "account_data"
]

for key in top_level_definitions:
Expand Down Expand Up @@ -131,8 +131,8 @@ def __init__(self, filter_json):
self.filter_json.get("room", {}).get("ephemeral", {})
)

self.room_private_user_data = Filter(
self.filter_json.get("room", {}).get("private_user_data", {})
self.room_account_data = Filter(
self.filter_json.get("room", {}).get("account_data", {})
)

self.presence_filter = Filter(
Expand Down Expand Up @@ -160,8 +160,8 @@ def filter_room_timeline(self, events):
def filter_room_ephemeral(self, events):
return self.room_ephemeral_filter.filter(events)

def filter_room_private_user_data(self, events):
return self.room_private_user_data.filter(events)
def filter_room_account_data(self, events):
return self.room_account_data.filter(events)


class Filter(object):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,19 @@
from twisted.internet import defer


class PrivateUserDataEventSource(object):
class AccountDataEventSource(object):
def __init__(self, hs):
self.store = hs.get_datastore()

def get_current_key(self, direction='f'):
return self.store.get_max_private_user_data_stream_id()
return self.store.get_max_account_data_stream_id()

@defer.inlineCallbacks
def get_new_events(self, user, from_key, **kwargs):
user_id = user.to_string()
last_stream_id = from_key

current_stream_id = yield self.store.get_max_private_user_data_stream_id()
current_stream_id = yield self.store.get_max_account_data_stream_id()
tags = yield self.store.get_updated_tags(user_id, last_stream_id)

results = []
Expand Down
12 changes: 6 additions & 6 deletions synapse/handlers/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,14 +436,14 @@ def handle_room(event):
for c in current_state.values()
]

private_user_data = []
account_data = []
tags = tags_by_room.get(event.room_id)
if tags:
private_user_data.append({
account_data.append({
"type": "m.tag",
"content": {"tags": tags},
})
d["private_user_data"] = private_user_data
d["account_data"] = account_data
except:
logger.exception("Failed to get snapshot")

Expand Down Expand Up @@ -498,14 +498,14 @@ def room_initial_sync(self, user_id, room_id, pagin_config=None, is_guest=False)
user_id, room_id, pagin_config, membership, member_event_id, is_guest
)

private_user_data = []
account_data = []
tags = yield self.store.get_tags_for_room(user_id, room_id)
if tags:
private_user_data.append({
account_data.append({
"type": "m.tag",
"content": {"tags": tags},
})
result["private_user_data"] = private_user_data
result["account_data"] = account_data

defer.returnValue(result)

Expand Down
28 changes: 14 additions & 14 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
"timeline", # TimelineBatch
"state", # dict[(str, str), FrozenEvent]
"ephemeral",
"private_user_data",
"account_data",
])):
__slots__ = []

Expand All @@ -63,15 +63,15 @@ def __nonzero__(self):
self.timeline
or self.state
or self.ephemeral
or self.private_user_data
or self.account_data
)


class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
"room_id", # str
"timeline", # TimelineBatch
"state", # dict[(str, str), FrozenEvent]
"private_user_data",
"account_data",
])):
__slots__ = []

Expand All @@ -82,7 +82,7 @@ def __nonzero__(self):
return bool(
self.timeline
or self.state
or self.private_user_data
or self.account_data
)


Expand Down Expand Up @@ -261,20 +261,20 @@ def full_state_sync_for_joined_room(self, room_id, sync_config,
timeline=batch,
state=current_state,
ephemeral=ephemeral_by_room.get(room_id, []),
private_user_data=self.private_user_data_for_room(
account_data=self.account_data_for_room(
room_id, tags_by_room
),
))

def private_user_data_for_room(self, room_id, tags_by_room):
private_user_data = []
def account_data_for_room(self, room_id, tags_by_room):
account_data = []
tags = tags_by_room.get(room_id)
if tags is not None:
private_user_data.append({
account_data.append({
"type": "m.tag",
"content": {"tags": tags},
})
return private_user_data
return account_data

@defer.inlineCallbacks
def ephemeral_by_room(self, sync_config, now_token, since_token=None):
Expand Down Expand Up @@ -357,7 +357,7 @@ def full_state_sync_for_archived_room(self, room_id, sync_config,
room_id=room_id,
timeline=batch,
state=leave_state,
private_user_data=self.private_user_data_for_room(
account_data=self.account_data_for_room(
room_id, tags_by_room
),
))
Expand Down Expand Up @@ -412,7 +412,7 @@ def incremental_sync_with_gap(self, sync_config, since_token):

tags_by_room = yield self.store.get_updated_tags(
sync_config.user.to_string(),
since_token.private_user_data_key,
since_token.account_data_key,
)

joined = []
Expand Down Expand Up @@ -468,7 +468,7 @@ def incremental_sync_with_gap(self, sync_config, since_token):
),
state=state,
ephemeral=ephemeral_by_room.get(room_id, []),
private_user_data=self.private_user_data_for_room(
account_data=self.account_data_for_room(
room_id, tags_by_room
),
)
Expand Down Expand Up @@ -605,7 +605,7 @@ def incremental_sync_with_gap_for_room(self, room_id, sync_config,
timeline=batch,
state=state,
ephemeral=ephemeral_by_room.get(room_id, []),
private_user_data=self.private_user_data_for_room(
account_data=self.account_data_for_room(
room_id, tags_by_room
),
)
Expand Down Expand Up @@ -653,7 +653,7 @@ def incremental_sync_for_archived_room(self, sync_config, leave_event,
room_id=leave_event.room_id,
timeline=batch,
state=state_events_delta,
private_user_data=self.private_user_data_for_room(
account_data=self.account_data_for_room(
leave_event.room_id, tags_by_room
),
)
Expand Down
6 changes: 3 additions & 3 deletions synapse/rest/client/v2_alpha/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,8 +282,8 @@ def encode_room(room, filter, time_now, token_id, joined=True):
)
timeline_event_ids.append(event.event_id)

private_user_data = filter.filter_room_private_user_data(
room.private_user_data
account_data = filter.filter_room_account_data(
room.account_data
)

result = {
Expand All @@ -294,7 +294,7 @@ def encode_room(room, filter, time_now, token_id, joined=True):
"limited": room.timeline.limited,
},
"state": {"events": state_event_ids},
"private_user_data": {"events": private_user_data},
"account_data": {"events": account_data},
}

if joined:
Expand Down
4 changes: 2 additions & 2 deletions synapse/rest/client/v2_alpha/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def on_PUT(self, request, user_id, room_id, tag):
max_id = yield self.store.add_tag_to_room(user_id, room_id, tag, body)

yield self.notifier.on_new_event(
"private_user_data_key", max_id, users=[user_id]
"account_data_key", max_id, users=[user_id]
)

defer.returnValue((200, {}))
Expand All @@ -95,7 +95,7 @@ def on_DELETE(self, request, user_id, room_id, tag):
max_id = yield self.store.remove_tag_from_room(user_id, room_id, tag)

yield self.notifier.on_new_event(
"private_user_data_key", max_id, users=[user_id]
"account_data_key", max_id, users=[user_id]
)

defer.returnValue((200, {}))
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/prepare_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

# Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts.
SCHEMA_VERSION = 25
SCHEMA_VERSION = 26

dir_path = os.path.abspath(os.path.dirname(__file__))

Expand Down
17 changes: 17 additions & 0 deletions synapse/storage/schema/delta/26/account_data.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


ALTER TABLE private_user_data_max_stream_id RENAME TO account_data_max_stream_id;
18 changes: 9 additions & 9 deletions synapse/storage/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,17 @@ class TagsStore(SQLBaseStore):
def __init__(self, hs):
super(TagsStore, self).__init__(hs)

self._private_user_data_id_gen = StreamIdGenerator(
"private_user_data_max_stream_id", "stream_id"
self._account_data_id_gen = StreamIdGenerator(
"account_data_max_stream_id", "stream_id"
)

def get_max_private_user_data_stream_id(self):
def get_max_account_data_stream_id(self):
"""Get the current max stream id for the private user data stream

Returns:
A deferred int.
"""
return self._private_user_data_id_gen.get_max_token(self)
return self._account_data_id_gen.get_max_token(self)

@cached()
def get_tags_for_user(self, user_id):
Expand Down Expand Up @@ -144,12 +144,12 @@ def add_tag_txn(txn, next_id):
)
self._update_revision_txn(txn, user_id, room_id, next_id)

with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
with (yield self._account_data_id_gen.get_next(self)) as next_id:
yield self.runInteraction("add_tag", add_tag_txn, next_id)

self.get_tags_for_user.invalidate((user_id,))

result = yield self._private_user_data_id_gen.get_max_token(self)
result = yield self._account_data_id_gen.get_max_token(self)
defer.returnValue(result)

@defer.inlineCallbacks
Expand All @@ -166,12 +166,12 @@ def remove_tag_txn(txn, next_id):
txn.execute(sql, (user_id, room_id, tag))
self._update_revision_txn(txn, user_id, room_id, next_id)

with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
with (yield self._account_data_id_gen.get_next(self)) as next_id:
yield self.runInteraction("remove_tag", remove_tag_txn, next_id)

self.get_tags_for_user.invalidate((user_id,))

result = yield self._private_user_data_id_gen.get_max_token(self)
result = yield self._account_data_id_gen.get_max_token(self)
defer.returnValue(result)

def _update_revision_txn(self, txn, user_id, room_id, next_id):
Expand All @@ -185,7 +185,7 @@ def _update_revision_txn(self, txn, user_id, room_id, next_id):
"""

update_max_id_sql = (
"UPDATE private_user_data_max_stream_id"
"UPDATE account_data_max_stream_id"
" SET stream_id = ?"
" WHERE stream_id < ?"
)
Expand Down
8 changes: 4 additions & 4 deletions synapse/streams/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from synapse.handlers.room import RoomEventSource
from synapse.handlers.typing import TypingNotificationEventSource
from synapse.handlers.receipts import ReceiptEventSource
from synapse.handlers.private_user_data import PrivateUserDataEventSource
from synapse.handlers.account_data import AccountDataEventSource


class EventSources(object):
Expand All @@ -30,7 +30,7 @@ class EventSources(object):
"presence": PresenceEventSource,
"typing": TypingNotificationEventSource,
"receipt": ReceiptEventSource,
"private_user_data": PrivateUserDataEventSource,
"account_data": AccountDataEventSource,
}

def __init__(self, hs):
Expand All @@ -54,8 +54,8 @@ def get_current_token(self, direction='f'):
receipt_key=(
yield self.sources["receipt"].get_current_key()
),
private_user_data_key=(
yield self.sources["private_user_data"].get_current_key()
account_data_key=(
yield self.sources["account_data"].get_current_key()
),
)
defer.returnValue(token)
4 changes: 2 additions & 2 deletions synapse/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class StreamToken(
"presence_key",
"typing_key",
"receipt_key",
"private_user_data_key",
"account_data_key",
))
):
_SEPARATOR = "_"
Expand Down Expand Up @@ -138,7 +138,7 @@ def is_after(self, other):
or (int(other.presence_key) < int(self.presence_key))
or (int(other.typing_key) < int(self.typing_key))
or (int(other.receipt_key) < int(self.receipt_key))
or (int(other.private_user_data_key) < int(self.private_user_data_key))
or (int(other.account_data_key) < int(self.account_data_key))
)

def copy_and_advance(self, key, new_value):
Expand Down