Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(robot-server): consolidate DB transactions, fix a max analyses length bug #14904

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion robot-server/robot_server/protocols/analysis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ async def update(
completed_analysis
),
)
await self._completed_store.add(
await self._completed_store.make_room_and_add(
completed_analysis_resource=completed_analysis_resource
)

Expand Down
48 changes: 21 additions & 27 deletions robot-server/robot_server/protocols/completed_analysis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,40 +336,34 @@ def get_ids_by_protocol(self, protocol_id: str) -> List[str]:

return result_ids

async def add(self, completed_analysis_resource: CompletedAnalysisResource) -> None:
"""Add a resource to the store."""
self._make_room_for_new_analysis(completed_analysis_resource.protocol_id)
statement = analysis_table.insert().values(
await completed_analysis_resource.to_sql_values()
)
with self._sql_engine.begin() as transaction:
transaction.execute(statement)
self._memcache.insert(
completed_analysis_resource.id, completed_analysis_resource
)

def _make_room_for_new_analysis(self, protocol_id: str) -> None:
"""Remove the oldest analyses in store if the number of analyses exceed the max allowed.
async def make_room_and_add(
self, completed_analysis_resource: CompletedAnalysisResource
) -> None:
"""Make room and add a resource to the store.

Unlike protocols, protocol analysis IDs are not stored by any DB entities
other than the analysis store itself. So we do not have to worry about cleaning up
any other tables.
Removes the oldest analyses in store if the number of analyses exceed
the max allowed, and then adds the new analysis.
"""
analyses_ids = self.get_ids_by_protocol(protocol_id)
analyses_ids = self.get_ids_by_protocol(completed_analysis_resource.protocol_id)

# Delete all analyses exceeding max number allowed,
# plus an additional one to create room for the new one.
# Most existing databases will not have multiple extra analyses per protocol
# but there would be some internally that added multiple analyses before
# we started capping the number of analyses.
analyses_to_delete = analyses_ids[
: len(analyses_ids) - MAX_ANALYSES_TO_STORE + 1
]

analyses_to_delete = analyses_ids[: -MAX_ANALYSES_TO_STORE + 1]
for analysis_id in analyses_to_delete:
self._memcache.remove(analysis_id)
delete_statement = sqlalchemy.delete(analysis_table).where(
analysis_table.c.id == analysis_id
)
with self._sql_engine.begin() as transaction:
transaction.execute(delete_statement)
delete_statement = analysis_table.delete().where(
analysis_table.c.id.in_(analyses_to_delete)
)

insert_statement = analysis_table.insert().values(
await completed_analysis_resource.to_sql_values()
)
with self._sql_engine.begin() as transaction:
transaction.execute(delete_statement)
transaction.execute(insert_statement)
self._memcache.insert(
completed_analysis_resource.id, completed_analysis_resource
)
2 changes: 1 addition & 1 deletion robot-server/tests/protocols/test_analysis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ async def test_update_adds_rtp_values_and_defaults_to_completed_store(
liquids=[],
)
decoy.verify(
await mock_completed_store.add(
await mock_completed_store.make_room_and_add(
completed_analysis_resource=expected_completed_analysis_resource
)
)
Expand Down
32 changes: 21 additions & 11 deletions robot-server/tests/protocols/test_completed_analysis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ async def test_get_by_analysis_id_falls_back_to_sql(
"""It should return analyses from sql if they are not cached."""
resource = _completed_analysis_resource("analysis-id", "protocol-id")
protocol_store.insert(make_dummy_protocol_resource("protocol-id"))
await subject.add(resource)
await subject.make_room_and_add(resource)
# the analysis is not cached
decoy.when(memcache.get("analysis-id")).then_raise(KeyError())
analysis_from_sql = await subject.get_by_id("analysis-id")
Expand All @@ -143,7 +143,7 @@ async def test_get_by_analysis_id_stores_results_in_cache(
"""It should cache successful fetches from sql."""
resource = _completed_analysis_resource("analysis-id", "protocol-id")
protocol_store.insert(make_dummy_protocol_resource("protocol-id"))
await subject.add(resource)
await subject.make_room_and_add(resource)
# the analysis is not cached
decoy.when(memcache.get("analysis-id")).then_raise(KeyError())
from_sql = await subject.get_by_id("analysis-id")
Expand All @@ -158,7 +158,7 @@ async def test_get_by_analysis_id_as_document(
"""It should return the analysis serialized as a JSON string."""
resource = _completed_analysis_resource("analysis-id", "protocol-id")
protocol_store.insert(make_dummy_protocol_resource("protocol-id"))
await subject.add(resource)
await subject.make_room_and_add(resource)
result = await subject.get_by_id_as_document("analysis-id")
assert result is not None
assert json.loads(result) == {
Expand All @@ -184,9 +184,9 @@ async def test_get_ids_by_protocol(
resource_3 = _completed_analysis_resource("analysis-id-3", "protocol-id-2")
protocol_store.insert(make_dummy_protocol_resource("protocol-id-1"))
protocol_store.insert(make_dummy_protocol_resource("protocol-id-2"))
await subject.add(resource_1)
await subject.add(resource_2)
await subject.add(resource_3)
await subject.make_room_and_add(resource_1)
await subject.make_room_and_add(resource_2)
await subject.make_room_and_add(resource_3)
assert subject.get_ids_by_protocol("protocol-id-1") == [
"analysis-id-1",
"analysis-id-2",
Expand All @@ -208,9 +208,9 @@ async def test_get_by_protocol(
decoy.when(memcache.insert("analysis-id-1", resource_1)).then_return(None)
decoy.when(memcache.insert("analysis-id-2", resource_2)).then_return(None)
decoy.when(memcache.insert("analysis-id-3", resource_3)).then_return(None)
await subject.add(resource_1)
await subject.add(resource_2)
await subject.add(resource_3)
await subject.make_room_and_add(resource_1)
await subject.make_room_and_add(resource_2)
await subject.make_room_and_add(resource_3)
decoy.when(memcache.get("analysis-id-1")).then_raise(KeyError())
decoy.when(memcache.get("analysis-id-2")).then_return(resource_2)
decoy.when(memcache.contains("analysis-id-1")).then_return(False)
Expand Down Expand Up @@ -257,7 +257,7 @@ async def test_get_rtp_values_and_defaults_by_analysis_from_db(
},
)
protocol_store.insert(make_dummy_protocol_resource("protocol-id"))
await subject.add(resource)
await subject.make_room_and_add(resource)
# Not in memcache
decoy.when(memcache.get("analysis-id")).then_raise(KeyError())
result = await subject.get_rtp_values_and_defaults_by_analysis_id("analysis-id")
Expand Down Expand Up @@ -297,10 +297,20 @@ async def test_get_rtp_values_and_defaults_by_analysis_from_db(
"new-analysis-id",
],
),
(
[f"analysis-id-{num}" for num in range(3)],
[
"analysis-id-0",
"analysis-id-1",
"analysis-id-2",
"new-analysis-id",
],
),
(
[f"analysis-id-{num}" for num in range(2)],
["analysis-id-0", "analysis-id-1", "new-analysis-id"],
),
(["analysis-id-0"], ["analysis-id-0", "new-analysis-id"]),
([], ["new-analysis-id"]),
],
)
Expand Down Expand Up @@ -330,7 +340,7 @@ async def test_add_makes_room_for_new_analysis(
transaction.execute(statement)

assert subject.get_ids_by_protocol("protocol-id") == existing_analysis_ids
await subject.add(
await subject.make_room_and_add(
_completed_analysis_resource(
analysis_id="new-analysis-id",
protocol_id="protocol-id",
Expand Down
Loading