Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api, robot-server): Add source key to the data_files database to support generated CSV files from the plate reader + other plate reader fixes. #16603

Merged
merged 24 commits into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
369bb40
save
vegano1 Oct 18, 2024
b7b989f
Merge branch 'edge' into PLAT-451-handle-estop-with-plate-reader
vegano1 Oct 18, 2024
d42b4ba
save
vegano1 Oct 18, 2024
893da7a
working place labware command
vegano1 Oct 19, 2024
2fcce8e
feat(api): add unsafe/placeLabware command to place labware the gripp…
vegano1 Oct 19, 2024
0e201e0
feat(robot-server): add placeLabwareState and estopEngaged to runs/cu…
vegano1 Oct 20, 2024
3ae5e2f
clean up
vegano1 Oct 21, 2024
f0b4766
format
vegano1 Oct 21, 2024
a35161b
api, always move the plate reader lid to its dock
vegano1 Oct 22, 2024
c7e8264
feat(app, shared-data): add usePlacePlateReaderLid to EstopPressedMod…
vegano1 Oct 22, 2024
2a7a4b4
save
vegano1 Oct 22, 2024
2cb3765
conditionally call unsafe/placeLabware command based on the runs/curr…
vegano1 Oct 22, 2024
837fe9a
simplify usePlacePlateReaderLid hook
vegano1 Oct 23, 2024
74b02cc
Merge branch 'edge' into PLAT-451-handle-estop-with-plate-reader
vegano1 Oct 23, 2024
6861d3d
fix bad edge merge
vegano1 Oct 23, 2024
6c4de3e
move EstopTakeover component below maintenanceRunTakeover so we dont …
vegano1 Oct 23, 2024
de39cbe
refactor estopPressed
vegano1 Oct 24, 2024
3dee541
clean up
vegano1 Oct 24, 2024
39fb23c
Merge branch 'edge' into PLAT-451-handle-estop-with-plate-reader
vegano1 Oct 24, 2024
ddc24cb
default the filename arg of absorbance read command to None
vegano1 Oct 24, 2024
884b277
fix(api): raise CannotPerformModuleAction if the plate reader is init…
vegano1 Oct 24, 2024
ca2302a
feat(api, robot-server): add source column to the data_files table so…
vegano1 Oct 27, 2024
b7f4f24
update tests
vegano1 Oct 27, 2024
5f5784f
Merge branch 'edge' into plate_reader_fixes
vegano1 Oct 29, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions api/src/opentrons/protocol_api/core/engine/module_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,6 +567,7 @@ class AbsorbanceReaderCore(ModuleCore, AbstractAbsorbanceReaderCore):

_sync_module_hardware: SynchronousAdapter[hw_modules.AbsorbanceReader]
_initialized_value: Optional[List[int]] = None
_ready_to_initialize: bool = False

def initialize(
self,
Expand All @@ -575,6 +576,11 @@ def initialize(
reference_wavelength: Optional[int] = None,
) -> None:
"""Initialize the Absorbance Reader by taking zero reading."""
if not self._ready_to_initialize:
raise CannotPerformModuleAction(
"Cannot perform Initialize action on Absorbance Reader without calling `.close_lid()` first."
)

# TODO: check that the wavelengths are within the supported wavelengths
self._engine_client.execute_command(
cmd.absorbance_reader.InitializeParams(
Expand Down Expand Up @@ -633,6 +639,7 @@ def close_lid(
moduleId=self.module_id,
)
)
self._ready_to_initialize = True

def open_lid(self) -> None:
"""Close the Absorbance Reader's lid."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from ..errors import StorageLimitReachedError


MAXIMUM_CSV_FILE_LIMIT = 40
MAXIMUM_CSV_FILE_LIMIT = 400
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be fine, but just as an aside we're using this same limit within an actual protocol run as well so someone could theoretically do 400 file writes during a single protocol. Not necessarily a problem, just means they'll use up all their write limit in one go.



class GenericCsvTransform:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from opentrons.protocol_engine.clients import SyncClient as EngineClient
from opentrons.protocol_api.core.engine.module_core import AbsorbanceReaderCore
from opentrons.protocol_api import MAX_SUPPORTED_VERSION
from opentrons.protocol_engine.errors.exceptions import CannotPerformModuleAction
from opentrons.protocol_engine.state.module_substates import AbsorbanceReaderSubState
from opentrons.protocol_engine.state.module_substates.absorbance_reader_substate import (
AbsorbanceReaderId,
Expand Down Expand Up @@ -67,6 +68,7 @@ def test_initialize(
decoy: Decoy, mock_engine_client: EngineClient, subject: AbsorbanceReaderCore
) -> None:
"""It should set the sample wavelength with the engine client."""
subject._ready_to_initialize = True
subject.initialize("single", [123])

decoy.verify(
Expand Down Expand Up @@ -115,10 +117,18 @@ def test_initialize(
assert subject._initialized_value == [124, 125, 126]


def test_initialize_not_ready(subject: AbsorbanceReaderCore) -> None:
"""It should raise CannotPerformModuleAction if you dont call .close_lid() command."""
subject._ready_to_initialize = False
with pytest.raises(CannotPerformModuleAction):
subject.initialize("single", [123])


def test_read(
decoy: Decoy, mock_engine_client: EngineClient, subject: AbsorbanceReaderCore
) -> None:
"""It should call absorbance reader to read with the engine client."""
subject._ready_to_initialize = True
subject._initialized_value = [123]
substate = AbsorbanceReaderSubState(
module_id=AbsorbanceReaderId(subject.module_id),
Expand Down
24 changes: 19 additions & 5 deletions robot-server/robot_server/data_files/data_files_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@
analysis_csv_rtp_table,
run_csv_rtp_table,
)
from robot_server.persistence.tables.schema_7 import DataFileSourceSQLEnum

from .models import FileIdNotFoundError, FileInUseError
from .models import DataFileSource, FileIdNotFoundError, FileInUseError


@dataclass(frozen=True)
Expand All @@ -27,6 +28,7 @@ class DataFileInfo:
name: str
file_hash: str
created_at: datetime
source: DataFileSource


class DataFilesStore:
Expand All @@ -53,6 +55,7 @@ async def insert(self, file_info: DataFileInfo) -> None:
file_info_dict = {
"id": file_info.id,
"name": file_info.name,
"source": DataFileSourceSQLEnum(file_info.source.value),
"created_at": file_info.created_at,
"file_hash": file_info.file_hash,
}
Expand Down Expand Up @@ -80,14 +83,24 @@ def sql_get_all_from_engine(self) -> List[DataFileInfo]:
all_rows = transaction.execute(statement).all()
return [_convert_row_data_file_info(sql_row) for sql_row in all_rows]

def get_usage_info(self) -> List[FileUsageInfo]:
def get_usage_info(
self, source: Optional[DataFileSource] = None
) -> List[FileUsageInfo]:
"""Return information about usage of all the existing data files in runs & analyses.

Results are ordered with the oldest-added data file first.
"""
select_all_data_file_ids = sqlalchemy.select(data_files_table.c.id).order_by(
sqlite_rowid
)
if source is None:
select_all_data_file_ids = sqlalchemy.select(
data_files_table.c.id
).order_by(sqlite_rowid)
else:
select_all_data_file_ids = (
sqlalchemy.select(data_files_table.c.id)
.where(data_files_table.c.source.name == source.name)
.order_by(sqlite_rowid)
)

select_ids_used_in_analyses = sqlalchemy.select(
analysis_csv_rtp_table.c.file_id
).where(analysis_csv_rtp_table.c.file_id.is_not(None))
Expand Down Expand Up @@ -165,6 +178,7 @@ def _convert_row_data_file_info(row: sqlalchemy.engine.Row) -> DataFileInfo:
return DataFileInfo(
id=row.id,
name=row.name,
source=DataFileSource(row.source.value),
created_at=row.created_at,
file_hash=row.file_hash,
)
11 changes: 6 additions & 5 deletions robot-server/robot_server/data_files/file_auto_deleter.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
"""Auto-delete old data files to make room for new ones."""
"""Auto-delete old user data files to make room for new ones."""
from logging import getLogger

from robot_server.data_files.data_files_store import DataFilesStore
from robot_server.data_files.models import DataFileSource
from robot_server.deletion_planner import DataFileDeletionPlanner

_log = getLogger(__name__)


class DataFileAutoDeleter:
"""Auto deleter for data files."""
"""Auto deleter for uploaded data files."""

def __init__(
self,
Expand All @@ -22,9 +23,9 @@ async def make_room_for_new_file(self) -> None:
"""Delete old data files to make room for a new one."""
# It feels wasteful to collect usage info of upto 50 files
# even when there's no need for deletion
data_file_usage_info = [
usage_info for usage_info in self._data_files_store.get_usage_info()
]
data_file_usage_info = self._data_files_store.get_usage_info(
DataFileSource.UPLOADED
)

Comment on lines +26 to +28
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Very nice, makes sense to separate our deletion methodology like this.

if len(data_file_usage_info) < self._deletion_planner.maximum_allowed_files:
return
Expand Down
19 changes: 16 additions & 3 deletions robot-server/robot_server/data_files/models.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Data files models."""
from datetime import datetime
from typing import Literal, Set
from enum import Enum

from pydantic import Field

Expand All @@ -10,12 +11,24 @@
from robot_server.service.json_api import ResourceModel


class DataFileSource(Enum):
"""The source this data file is from."""

UPLOADED = "uploaded"
GENERATED = "generated"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Generated should be fine in general, but to look forward do we want to categorize these as something like "generated_csv" or more specifically "plate_reader_csv"? Would there be a reason in the future to want to tell the difference between types of generated/protocol output files via the file source field?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think that is a possibility. I wouldn't mind either way. If you keep it like this for v8.2, changing it to the other way is an easy and cheap migration to do later, if we need to.



class DataFile(ResourceModel):
"""A model representing an uploaded data file."""
"""A model representing a data file."""

id: str = Field(..., description="A unique identifier for this file.")
name: str = Field(..., description="Name of the uploaded file.")
createdAt: datetime = Field(..., description="When this data file was *uploaded*.")
name: str = Field(..., description="Name of the data file.")
source: DataFileSource = Field(
..., description="The origin of the file (uploaded or generated)"
)
createdAt: datetime = Field(
..., description="When this data file was uploaded or generated.."
)


class FileIdNotFoundError(GeneralError):
Expand Down
15 changes: 13 additions & 2 deletions robot-server/robot_server/data_files/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@
)
from .data_files_store import DataFilesStore, DataFileInfo
from .file_auto_deleter import DataFileAutoDeleter
from .models import DataFile, FileIdNotFoundError, FileIdNotFound, FileInUseError
from .models import (
DataFile,
DataFileSource,
FileIdNotFoundError,
FileIdNotFound,
FileInUseError,
)
from ..protocols.dependencies import get_file_hasher, get_file_reader_writer
from ..service.dependencies import get_current_time, get_unique_id

Expand Down Expand Up @@ -137,6 +143,7 @@ async def upload_data_file(
id=existing_file_info.id,
name=existing_file_info.name,
createdAt=existing_file_info.created_at,
source=existing_file_info.source,
)
),
status_code=status.HTTP_200_OK,
Expand All @@ -151,6 +158,7 @@ async def upload_data_file(
name=buffered_file.name,
file_hash=file_hash,
created_at=created_at,
source=DataFileSource.UPLOADED,
)
await data_files_store.insert(file_info)
return await PydanticResponse.create(
Expand All @@ -159,6 +167,7 @@ async def upload_data_file(
id=file_info.id,
name=file_info.name,
createdAt=created_at,
source=DataFileSource.UPLOADED,
)
),
status_code=status.HTTP_201_CREATED,
Expand Down Expand Up @@ -195,6 +204,7 @@ async def get_data_file_info_by_id(
id=resource.id,
name=resource.name,
createdAt=resource.created_at,
source=resource.source,
)
),
status_code=status.HTTP_200_OK,
Expand Down Expand Up @@ -260,6 +270,7 @@ async def get_all_data_files(
id=data_file_info.id,
name=data_file_info.name,
createdAt=data_file_info.created_at,
source=data_file_info.source,
)
for data_file_info in data_files
],
Expand All @@ -282,7 +293,7 @@ async def delete_file_by_id(
dataFileId: str,
data_files_store: DataFilesStore = Depends(get_data_files_store),
) -> PydanticResponse[SimpleEmptyBody]:
"""Delete an uploaded data file by ID.
"""Delete an uploaded or generated data file by ID.

Arguments:
dataFileId: ID of the data file to delete, pulled from URL.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def get_file_provider(
FileProviderWrapper, fastapi.Depends(get_file_provider_wrapper)
],
) -> FileProvider:
"""Return theengine `FileProvider` which accepts callbacks from FileProviderWrapper."""
"""Return the engine `FileProvider` which accepts callbacks from FileProviderWrapper."""
file_provider = FileProvider(
data_files_write_csv_callback=file_provider_wrapper.write_csv_callback,
data_files_filecount=file_provider_wrapper.csv_filecount_callback,
Expand Down
15 changes: 10 additions & 5 deletions robot-server/robot_server/file_provider/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@
get_data_files_directory,
get_data_files_store,
)
from robot_server.data_files.models import DataFileSource
from ..service.dependencies import get_current_time, get_unique_id
from robot_server.data_files.data_files_store import DataFilesStore, DataFileInfo
from robot_server.data_files.data_files_store import (
DataFilesStore,
DataFileInfo,
)
from opentrons.protocol_engine.resources.file_provider import GenericCsvTransform


Expand Down Expand Up @@ -62,13 +66,14 @@ async def write_csv_callback(
name=csv_data.filename,
file_hash="",
created_at=created_at,
source=DataFileSource.GENERATED,
)
await self._data_files_store.insert(file_info)
return file_id

async def csv_filecount_callback(self) -> int:
"""Return the current count of files stored within the data files directory."""
data_file_usage_info = [
usage_info for usage_info in self._data_files_store.get_usage_info()
]
"""Return the current count of generated files stored within the data files directory."""
data_file_usage_info = self._data_files_store.get_usage_info(
DataFileSource.GENERATED
)
return len(data_file_usage_info)
34 changes: 32 additions & 2 deletions robot-server/robot_server/persistence/_migrations/v6_to_v7.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Summary of changes from schema 6:

- Adds a new command_intent to store the commands intent in the commands table
- Adds a new source to store the data files origin in the data_files table
- Adds the `boolean_setting` table.
"""

Expand All @@ -15,7 +16,7 @@
import sqlalchemy

from ..database import sql_engine_ctx, sqlite_rowid
from ..tables import schema_7
from ..tables import DataFileSourceSQLEnum, schema_7
from .._folder_migrator import Migration

from ..file_and_directory_names import (
Expand All @@ -35,7 +36,7 @@ def migrate(self, source_dir: Path, dest_dir: Path) -> None:

dest_db_file = dest_dir / DB_FILE

# Append the new column to existing protocols in v4 database
# Append the new column to existing protocols and data_files in v6 database
with ExitStack() as exit_stack:
dest_engine = exit_stack.enter_context(sql_engine_ctx(dest_db_file))

Expand All @@ -59,10 +60,20 @@ def add_column(
schema_7.run_command_table.c.command_intent,
)

add_column(
dest_engine,
schema_7.data_files_table.name,
schema_7.data_files_table.c.source,
)

_migrate_command_table_with_new_command_intent_col(
dest_transaction=dest_transaction
)

_migrate_data_files_table_with_new_source_col(
dest_transaction=dest_transaction
)


def _migrate_command_table_with_new_command_intent_col(
dest_transaction: sqlalchemy.engine.Connection,
Expand All @@ -83,3 +94,22 @@ def _migrate_command_table_with_new_command_intent_col(
dest_transaction.execute(
f"UPDATE run_command SET command_intent='{new_command_intent}' WHERE row_id={row.row_id}"
)


def _migrate_data_files_table_with_new_source_col(
dest_transaction: sqlalchemy.engine.Connection,
) -> None:
"""Add a new 'source' column to data_files table."""
select_data_files = sqlalchemy.select(schema_7.data_files_table).order_by(
sqlite_rowid
)
insert_new_data = sqlalchemy.insert(schema_7.data_files_table)
for old_row in dest_transaction.execute(select_data_files).all():
dest_transaction.execute(
insert_new_data,
id=old_row.id,
name=old_row.name,
file_hash=old_row.file_hash,
created_at=old_row.created_at,
source=DataFileSourceSQLEnum.UPLOADED,
)
2 changes: 2 additions & 0 deletions robot-server/robot_server/persistence/tables/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
PrimitiveParamSQLEnum,
ProtocolKindSQLEnum,
BooleanSettingKey,
DataFileSourceSQLEnum,
)


Expand All @@ -34,4 +35,5 @@
"PrimitiveParamSQLEnum",
"ProtocolKindSQLEnum",
"BooleanSettingKey",
"DataFileSourceSQLEnum",
]
Loading
Loading