From d9f7d2ac5fbe7419dce30c2da94aabf0dbc419ff Mon Sep 17 00:00:00 2001 From: CaseyBatten Date: Sat, 19 Oct 2024 13:16:02 -0400 Subject: [PATCH] feat(api, robot-server): Plate Reader CSV output functionality (#16495) Covers PLAT-380, PLAT-468 Allows plate reader to save CSV files through a new FileProvider in protocol engine tool, created and passed in through robot-server. --- api/src/opentrons/cli/analyze.py | 1 + api/src/opentrons/execute.py | 1 + .../protocol_api/core/engine/module_core.py | 27 ++- api/src/opentrons/protocol_api/core/module.py | 2 +- .../opentrons/protocol_api/module_contexts.py | 17 +- .../commands/absorbance_reader/read.py | 104 +++++++++++- .../protocol_engine/commands/command.py | 1 + .../protocol_engine/create_protocol_engine.py | 10 +- .../protocol_engine/engine_support.py | 3 +- .../protocol_engine/errors/__init__.py | 2 + .../protocol_engine/errors/exceptions.py | 13 ++ .../protocol_engine/execution/__init__.py | 2 + .../execution/command_executor.py | 5 +- .../execution/create_queue_worker.py | 4 + .../protocol_engine/protocol_engine.py | 5 +- .../protocol_engine/resources/__init__.py | 2 + .../resources/file_provider.py | 157 ++++++++++++++++++ .../opentrons/protocol_engine/state/files.py | 59 +++++++ .../opentrons/protocol_engine/state/state.py | 13 ++ .../protocol_engine/state/state_summary.py | 1 + api/src/opentrons/simulate.py | 1 + api/tests/opentrons/conftest.py | 1 + .../engine/test_absorbance_reader_core.py | 23 ++- .../opentrons/protocol_engine/conftest.py | 7 + .../execution/test_command_executor.py | 10 +- .../robot_server/file_provider/__init__.py | 1 + .../file_provider/fastapi_dependencies.py | 39 +++++ .../robot_server/file_provider/provider.py | 74 +++++++++ .../maintenance_run_data_manager.py | 1 + .../robot_server/runs/router/base_router.py | 7 + .../robot_server/runs/run_data_manager.py | 6 + robot-server/robot_server/runs/run_models.py | 8 + .../runs/run_orchestrator_store.py | 3 + .../test_json_v6_protocol_run.tavern.yaml | 1 + .../test_json_v7_protocol_run.tavern.yaml | 1 + .../runs/test_protocol_run.tavern.yaml | 2 + ...t_run_queued_protocol_commands.tavern.yaml | 1 + ...t_run_with_run_time_parameters.tavern.yaml | 1 + .../tests/protocols/test_protocol_analyzer.py | 1 + robot-server/tests/runs/router/conftest.py | 17 ++ .../tests/runs/router/test_base_router.py | 25 +++ .../tests/runs/router/test_labware_router.py | 1 + .../tests/runs/test_run_controller.py | 1 + .../tests/runs/test_run_data_manager.py | 32 ++++ .../tests/runs/test_run_orchestrator_store.py | 12 ++ robot-server/tests/runs/test_run_store.py | 2 + shared-data/command/schemas/10.json | 5 + 47 files changed, 692 insertions(+), 20 deletions(-) create mode 100644 api/src/opentrons/protocol_engine/resources/file_provider.py create mode 100644 api/src/opentrons/protocol_engine/state/files.py create mode 100644 robot-server/robot_server/file_provider/__init__.py create mode 100644 robot-server/robot_server/file_provider/fastapi_dependencies.py create mode 100644 robot-server/robot_server/file_provider/provider.py diff --git a/api/src/opentrons/cli/analyze.py b/api/src/opentrons/cli/analyze.py index f311adce402..8489da83d68 100644 --- a/api/src/opentrons/cli/analyze.py +++ b/api/src/opentrons/cli/analyze.py @@ -332,6 +332,7 @@ async def _do_analyze( liquids=[], wells=[], hasEverEnteredErrorRecovery=False, + files=[], ), parameters=[], ) diff --git a/api/src/opentrons/execute.py b/api/src/opentrons/execute.py index ade74b1aadd..a9b3562d82b 100644 --- a/api/src/opentrons/execute.py +++ b/api/src/opentrons/execute.py @@ -546,6 +546,7 @@ def _create_live_context_pe( hardware_api=hardware_api_wrapped, config=_get_protocol_engine_config(), deck_configuration=entrypoint_util.get_deck_configuration(), + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/src/opentrons/protocol_api/core/engine/module_core.py b/api/src/opentrons/protocol_api/core/engine/module_core.py index 47b49c54e23..1d800dee7ea 100644 --- a/api/src/opentrons/protocol_api/core/engine/module_core.py +++ b/api/src/opentrons/protocol_api/core/engine/module_core.py @@ -586,11 +586,20 @@ def initialize( ) self._initialized_value = wavelengths - def read(self) -> Optional[Dict[int, Dict[str, float]]]: - """Initiate a read on the Absorbance Reader, and return the results. During Analysis, this will return None.""" + def read(self, filename: Optional[str]) -> Dict[int, Dict[str, float]]: + """Initiate a read on the Absorbance Reader, and return the results. During Analysis, this will return a measurement of zero for all wells.""" + wavelengths = self._engine_client.state.modules.get_absorbance_reader_substate( + self.module_id + ).configured_wavelengths + if wavelengths is None: + raise CannotPerformModuleAction( + "Cannot perform Read action on Absorbance Reader without calling `.initialize(...)` first." + ) if self._initialized_value: self._engine_client.execute_command( - cmd.absorbance_reader.ReadAbsorbanceParams(moduleId=self.module_id) + cmd.absorbance_reader.ReadAbsorbanceParams( + moduleId=self.module_id, fileName=filename + ) ) if not self._engine_client.state.config.use_virtual_modules: read_result = ( @@ -603,7 +612,17 @@ def read(self) -> Optional[Dict[int, Dict[str, float]]]: raise CannotPerformModuleAction( "Absorbance Reader failed to return expected read result." ) - return None + + # When using virtual modules, return all zeroes + virtual_asbsorbance_result: Dict[int, Dict[str, float]] = {} + for wavelength in wavelengths: + converted_values = ( + self._engine_client.state.modules.convert_absorbance_reader_data_points( + data=[0] * 96 + ) + ) + virtual_asbsorbance_result[wavelength] = converted_values + return virtual_asbsorbance_result def close_lid( self, diff --git a/api/src/opentrons/protocol_api/core/module.py b/api/src/opentrons/protocol_api/core/module.py index 90abea1d0ec..c93e8ce8de8 100644 --- a/api/src/opentrons/protocol_api/core/module.py +++ b/api/src/opentrons/protocol_api/core/module.py @@ -365,7 +365,7 @@ def initialize( """Initialize the Absorbance Reader by taking zero reading.""" @abstractmethod - def read(self) -> Optional[Dict[int, Dict[str, float]]]: + def read(self, filename: Optional[str]) -> Dict[int, Dict[str, float]]: """Get an absorbance reading from the Absorbance Reader.""" @abstractmethod diff --git a/api/src/opentrons/protocol_api/module_contexts.py b/api/src/opentrons/protocol_api/module_contexts.py index 5d182843dcc..f7541da1836 100644 --- a/api/src/opentrons/protocol_api/module_contexts.py +++ b/api/src/opentrons/protocol_api/module_contexts.py @@ -1035,6 +1035,17 @@ def initialize( ) @requires_version(2, 21) - def read(self) -> Optional[Dict[int, Dict[str, float]]]: - """Initiate read on the Absorbance Reader. Returns a dictionary of wavelengths to dictionary of values ordered by well name.""" - return self._core.read() + def read(self, export_filename: Optional[str]) -> Dict[int, Dict[str, float]]: + """Initiate read on the Absorbance Reader. + + Returns a dictionary of wavelengths to dictionary of values ordered by well name. + + :param export_filename: Optional, if a filename is provided a CSV file will be saved + as a result of the read action containing measurement data. The filename will + be modified to include the wavelength used during measurement. If multiple + measurements are taken, then a file will be generated for each wavelength provided. + + Example: If `export_filename="my_data"` and wavelengths 450 and 531 are used during + measurement, the output files will be "my_data_450.csv" and "my_data_531.csv". + """ + return self._core.read(filename=export_filename) diff --git a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py index b101cdb70b8..caf8a738f09 100644 --- a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py +++ b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py @@ -1,14 +1,22 @@ """Command models to read absorbance.""" from __future__ import annotations -from typing import Optional, Dict, TYPE_CHECKING +from datetime import datetime +from typing import Optional, Dict, TYPE_CHECKING, List from typing_extensions import Literal, Type from pydantic import BaseModel, Field from ..command import AbstractCommandImpl, BaseCommand, BaseCommandCreate, SuccessData -from ...errors import CannotPerformModuleAction +from ...errors import CannotPerformModuleAction, StorageLimitReachedError from ...errors.error_occurrence import ErrorOccurrence +from ...resources.file_provider import ( + PlateReaderData, + ReadData, + MAXIMUM_CSV_FILE_LIMIT, +) +from ...resources import FileProvider + if TYPE_CHECKING: from opentrons.protocol_engine.state.state import StateView from opentrons.protocol_engine.execution import EquipmentHandler @@ -21,6 +29,10 @@ class ReadAbsorbanceParams(BaseModel): """Input parameters for an absorbance reading.""" moduleId: str = Field(..., description="Unique ID of the Absorbance Reader.") + fileName: Optional[str] = Field( + None, + description="Optional file name to use when storing the results of a measurement.", + ) class ReadAbsorbanceResult(BaseModel): @@ -29,6 +41,10 @@ class ReadAbsorbanceResult(BaseModel): data: Optional[Dict[int, Dict[str, float]]] = Field( ..., description="Absorbance data points per wavelength." ) + fileIds: Optional[List[str]] = Field( + ..., + description="List of file IDs for files output as a result of a Read action.", + ) class ReadAbsorbanceImpl( @@ -40,18 +56,21 @@ def __init__( self, state_view: StateView, equipment: EquipmentHandler, + file_provider: FileProvider, **unused_dependencies: object, ) -> None: self._state_view = state_view self._equipment = equipment + self._file_provider = file_provider - async def execute( + async def execute( # noqa: C901 self, params: ReadAbsorbanceParams ) -> SuccessData[ReadAbsorbanceResult, None]: """Initiate an absorbance measurement.""" abs_reader_substate = self._state_view.modules.get_absorbance_reader_substate( module_id=params.moduleId ) + # Allow propagation of ModuleNotAttachedError. abs_reader = self._equipment.get_module_hardware_api( abs_reader_substate.module_id @@ -62,10 +81,29 @@ async def execute( "Cannot perform Read action on Absorbance Reader without calling `.initialize(...)` first." ) + # TODO: we need to return a file ID and increase the file count even when a moduel is not attached + if ( + params.fileName is not None + and abs_reader_substate.configured_wavelengths is not None + ): + # Validate that the amount of files we are about to generate does not put us higher than the limit + if ( + self._state_view.files.get_filecount() + + len(abs_reader_substate.configured_wavelengths) + > MAXIMUM_CSV_FILE_LIMIT + ): + raise StorageLimitReachedError( + message=f"Attempt to write file {params.fileName} exceeds file creation limit of {MAXIMUM_CSV_FILE_LIMIT} files." + ) + + asbsorbance_result: Dict[int, Dict[str, float]] = {} + transform_results = [] + # Handle the measurement and begin building data for return if abs_reader is not None: + start_time = datetime.now() results = await abs_reader.start_measure() + finish_time = datetime.now() if abs_reader._measurement_config is not None: - asbsorbance_result: Dict[int, Dict[str, float]] = {} sample_wavelengths = abs_reader._measurement_config.sample_wavelengths for wavelength, result in zip(sample_wavelengths, results): converted_values = ( @@ -74,13 +112,67 @@ async def execute( ) ) asbsorbance_result[wavelength] = converted_values + transform_results.append( + ReadData.construct(wavelength=wavelength, data=converted_values) + ) + # Handle the virtual module case for data creation (all zeroes) + elif self._state_view.config.use_virtual_modules: + start_time = finish_time = datetime.now() + if abs_reader_substate.configured_wavelengths is not None: + for wavelength in abs_reader_substate.configured_wavelengths: + converted_values = ( + self._state_view.modules.convert_absorbance_reader_data_points( + data=[0] * 96 + ) + ) + asbsorbance_result[wavelength] = converted_values + transform_results.append( + ReadData.construct(wavelength=wavelength, data=converted_values) + ) + else: + raise CannotPerformModuleAction( + "Plate Reader data cannot be requested with a module that has not been initialized." + ) + + # TODO (cb, 10-17-2024): FILE PROVIDER - Some day we may want to break the file provider behavior into a seperate API function. + # When this happens, we probably will to have the change the command results handler we utilize to track file IDs in engine. + # Today, the action handler for the FileStore looks for a ReadAbsorbanceResult command action, this will need to be delinked. + + # Begin interfacing with the file provider if the user provided a filename + file_ids = [] + if params.fileName is not None: + # Create the Plate Reader Transform + plate_read_result = PlateReaderData.construct( + read_results=transform_results, + reference_wavelength=abs_reader_substate.reference_wavelength, + start_time=start_time, + finish_time=finish_time, + serial_number=abs_reader.serial_number + if (abs_reader is not None and abs_reader.serial_number is not None) + else "VIRTUAL_SERIAL", + ) + + if isinstance(plate_read_result, PlateReaderData): + # Write a CSV file for each of the measurements taken + for measurement in plate_read_result.read_results: + file_id = await self._file_provider.write_csv( + write_data=plate_read_result.build_generic_csv( + filename=params.fileName, + measurement=measurement, + ) + ) + file_ids.append(file_id) + + # Return success data to api return SuccessData( - public=ReadAbsorbanceResult(data=asbsorbance_result), + public=ReadAbsorbanceResult( + data=asbsorbance_result, fileIds=file_ids + ), private=None, ) return SuccessData( - public=ReadAbsorbanceResult(data=None), + public=ReadAbsorbanceResult(data=asbsorbance_result, fileIds=file_ids), private=None, ) diff --git a/api/src/opentrons/protocol_engine/commands/command.py b/api/src/opentrons/protocol_engine/commands/command.py index 759606899c0..9ba9404af1f 100644 --- a/api/src/opentrons/protocol_engine/commands/command.py +++ b/api/src/opentrons/protocol_engine/commands/command.py @@ -274,6 +274,7 @@ def __init__( state_view: StateView, hardware_api: HardwareControlAPI, equipment: execution.EquipmentHandler, + file_provider: execution.FileProvider, movement: execution.MovementHandler, gantry_mover: execution.GantryMover, labware_movement: execution.LabwareMovementHandler, diff --git a/api/src/opentrons/protocol_engine/create_protocol_engine.py b/api/src/opentrons/protocol_engine/create_protocol_engine.py index d3d50da14df..dc66591eff2 100644 --- a/api/src/opentrons/protocol_engine/create_protocol_engine.py +++ b/api/src/opentrons/protocol_engine/create_protocol_engine.py @@ -10,7 +10,7 @@ from opentrons_shared_data.robot import load as load_robot from .protocol_engine import ProtocolEngine -from .resources import DeckDataProvider, ModuleDataProvider +from .resources import DeckDataProvider, ModuleDataProvider, FileProvider from .state.config import Config from .state.state import StateStore from .types import PostRunHardwareState, DeckConfigurationType @@ -26,6 +26,7 @@ async def create_protocol_engine( error_recovery_policy: ErrorRecoveryPolicy, load_fixed_trash: bool = False, deck_configuration: typing.Optional[DeckConfigurationType] = None, + file_provider: typing.Optional[FileProvider] = None, notify_publishers: typing.Optional[typing.Callable[[], None]] = None, ) -> ProtocolEngine: """Create a ProtocolEngine instance. @@ -37,6 +38,7 @@ async def create_protocol_engine( See documentation on `ErrorRecoveryPolicy`. load_fixed_trash: Automatically load fixed trash labware in engine. deck_configuration: The initial deck configuration the engine will be instantiated with. + file_provider: Provides access to robot server file writing procedures for protocol output. notify_publishers: Notifies robot server publishers of internal state change. """ deck_data = DeckDataProvider(config.deck_type) @@ -47,6 +49,7 @@ async def create_protocol_engine( module_calibration_offsets = ModuleDataProvider.load_module_calibrations() robot_definition = load_robot(config.robot_type) + state_store = StateStore( config=config, deck_definition=deck_definition, @@ -62,6 +65,7 @@ async def create_protocol_engine( return ProtocolEngine( state_store=state_store, hardware_api=hardware_api, + file_provider=file_provider, ) @@ -70,6 +74,7 @@ def create_protocol_engine_in_thread( hardware_api: HardwareControlAPI, config: Config, deck_configuration: typing.Optional[DeckConfigurationType], + file_provider: typing.Optional[FileProvider], error_recovery_policy: ErrorRecoveryPolicy, drop_tips_after_run: bool, post_run_hardware_state: PostRunHardwareState, @@ -97,6 +102,7 @@ def create_protocol_engine_in_thread( with async_context_manager_in_thread( _protocol_engine( hardware_api, + file_provider, config, deck_configuration, error_recovery_policy, @@ -114,6 +120,7 @@ def create_protocol_engine_in_thread( @contextlib.asynccontextmanager async def _protocol_engine( hardware_api: HardwareControlAPI, + file_provider: typing.Optional[FileProvider], config: Config, deck_configuration: typing.Optional[DeckConfigurationType], error_recovery_policy: ErrorRecoveryPolicy, @@ -123,6 +130,7 @@ async def _protocol_engine( ) -> typing.AsyncGenerator[ProtocolEngine, None]: protocol_engine = await create_protocol_engine( hardware_api=hardware_api, + file_provider=file_provider, config=config, error_recovery_policy=error_recovery_policy, load_fixed_trash=load_fixed_trash, diff --git a/api/src/opentrons/protocol_engine/engine_support.py b/api/src/opentrons/protocol_engine/engine_support.py index 9d6bdcbdd69..b822b97914d 100644 --- a/api/src/opentrons/protocol_engine/engine_support.py +++ b/api/src/opentrons/protocol_engine/engine_support.py @@ -6,7 +6,8 @@ def create_run_orchestrator( - hardware_api: HardwareControlAPI, protocol_engine: ProtocolEngine + hardware_api: HardwareControlAPI, + protocol_engine: ProtocolEngine, ) -> RunOrchestrator: """Create a RunOrchestrator instance.""" return RunOrchestrator( diff --git a/api/src/opentrons/protocol_engine/errors/__init__.py b/api/src/opentrons/protocol_engine/errors/__init__.py index 304f7db1fff..9bbe3aae9b8 100644 --- a/api/src/opentrons/protocol_engine/errors/__init__.py +++ b/api/src/opentrons/protocol_engine/errors/__init__.py @@ -75,6 +75,7 @@ IncompleteWellDefinitionError, OperationLocationNotInWellError, InvalidDispenseVolumeError, + StorageLimitReachedError, ) from .error_occurrence import ErrorOccurrence, ProtocolCommandFailedError @@ -158,4 +159,5 @@ "IncompleteWellDefinitionError", "OperationLocationNotInWellError", "InvalidDispenseVolumeError", + "StorageLimitReachedError", ] diff --git a/api/src/opentrons/protocol_engine/errors/exceptions.py b/api/src/opentrons/protocol_engine/errors/exceptions.py index dd9dc6e1d51..5656942b338 100644 --- a/api/src/opentrons/protocol_engine/errors/exceptions.py +++ b/api/src/opentrons/protocol_engine/errors/exceptions.py @@ -1108,3 +1108,16 @@ def __init__( ) -> None: """Build an OperationLocationNotInWellError.""" super().__init__(ErrorCodes.GENERAL_ERROR, message, details, wrapping) + + +class StorageLimitReachedError(ProtocolEngineError): + """Raised to indicate that a file cannot be created due to storage limitations.""" + + def __init__( + self, + message: Optional[str] = None, + detail: Optional[Dict[str, str]] = None, + wrapping: Optional[Sequence[EnumeratedError]] = None, + ) -> None: + """Build an StorageLimitReached.""" + super().__init__(ErrorCodes.GENERAL_ERROR, message, detail, wrapping) diff --git a/api/src/opentrons/protocol_engine/execution/__init__.py b/api/src/opentrons/protocol_engine/execution/__init__.py index 80f2dfd0d99..482a16d787f 100644 --- a/api/src/opentrons/protocol_engine/execution/__init__.py +++ b/api/src/opentrons/protocol_engine/execution/__init__.py @@ -21,6 +21,7 @@ from .hardware_stopper import HardwareStopper from .door_watcher import DoorWatcher from .status_bar import StatusBarHandler +from ..resources.file_provider import FileProvider # .thermocycler_movement_flagger omitted from package's public interface. @@ -45,4 +46,5 @@ "DoorWatcher", "RailLightsHandler", "StatusBarHandler", + "FileProvider", ] diff --git a/api/src/opentrons/protocol_engine/execution/command_executor.py b/api/src/opentrons/protocol_engine/execution/command_executor.py index e9dd2ec73b9..1d30b8756d2 100644 --- a/api/src/opentrons/protocol_engine/execution/command_executor.py +++ b/api/src/opentrons/protocol_engine/execution/command_executor.py @@ -14,7 +14,7 @@ from opentrons.protocol_engine.commands.command import SuccessData from ..state.state import StateStore -from ..resources import ModelUtils +from ..resources import ModelUtils, FileProvider from ..commands import CommandStatus from ..actions import ( ActionDispatcher, @@ -72,6 +72,7 @@ class CommandExecutor: def __init__( self, hardware_api: HardwareControlAPI, + file_provider: FileProvider, state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, @@ -88,6 +89,7 @@ def __init__( ) -> None: """Initialize the CommandExecutor with access to its dependencies.""" self._hardware_api = hardware_api + self._file_provider = file_provider self._state_store = state_store self._action_dispatcher = action_dispatcher self._equipment = equipment @@ -116,6 +118,7 @@ async def execute(self, command_id: str) -> None: command_impl = queued_command._ImplementationCls( state_view=self._state_store, hardware_api=self._hardware_api, + file_provider=self._file_provider, equipment=self._equipment, movement=self._movement, gantry_mover=self._gantry_mover, diff --git a/api/src/opentrons/protocol_engine/execution/create_queue_worker.py b/api/src/opentrons/protocol_engine/execution/create_queue_worker.py index e449a013008..e37a2c0716b 100644 --- a/api/src/opentrons/protocol_engine/execution/create_queue_worker.py +++ b/api/src/opentrons/protocol_engine/execution/create_queue_worker.py @@ -6,6 +6,7 @@ from ..state.state import StateStore from ..actions import ActionDispatcher +from ..resources import FileProvider from .equipment import EquipmentHandler from .movement import MovementHandler from .gantry_mover import create_gantry_mover @@ -20,6 +21,7 @@ def create_queue_worker( hardware_api: HardwareControlAPI, + file_provider: FileProvider, state_store: StateStore, action_dispatcher: ActionDispatcher, command_generator: Callable[[], AsyncGenerator[str, None]], @@ -28,6 +30,7 @@ def create_queue_worker( Arguments: hardware_api: Hardware control API to pass down to dependencies. + file_provider: Provides access to robot server file writing procedures for protocol output. state_store: StateStore to pass down to dependencies. action_dispatcher: ActionDispatcher to pass down to dependencies. error_recovery_policy: ErrorRecoveryPolicy to pass down to dependencies. @@ -78,6 +81,7 @@ def create_queue_worker( command_executor = CommandExecutor( hardware_api=hardware_api, + file_provider=file_provider, state_store=state_store, action_dispatcher=action_dispatcher, equipment=equipment_handler, diff --git a/api/src/opentrons/protocol_engine/protocol_engine.py b/api/src/opentrons/protocol_engine/protocol_engine.py index c5219e889a3..d93ab5dd42d 100644 --- a/api/src/opentrons/protocol_engine/protocol_engine.py +++ b/api/src/opentrons/protocol_engine/protocol_engine.py @@ -20,7 +20,7 @@ from .errors import ProtocolCommandFailedError, ErrorOccurrence, CommandNotAllowedError from .errors.exceptions import EStopActivatedError from . import commands, slot_standardization -from .resources import ModelUtils, ModuleDataProvider +from .resources import ModelUtils, ModuleDataProvider, FileProvider from .types import ( LabwareOffset, LabwareOffsetCreate, @@ -95,6 +95,7 @@ def __init__( hardware_stopper: Optional[HardwareStopper] = None, door_watcher: Optional[DoorWatcher] = None, module_data_provider: Optional[ModuleDataProvider] = None, + file_provider: Optional[FileProvider] = None, ) -> None: """Initialize a ProtocolEngine instance. @@ -104,6 +105,7 @@ def __init__( Prefer the `create_protocol_engine()` factory function. """ self._hardware_api = hardware_api + self._file_provider = file_provider or FileProvider() self._state_store = state_store self._model_utils = model_utils or ModelUtils() self._action_dispatcher = action_dispatcher or ActionDispatcher( @@ -616,6 +618,7 @@ def set_and_start_queue_worker( assert self._queue_worker is None self._queue_worker = create_queue_worker( hardware_api=self._hardware_api, + file_provider=self._file_provider, state_store=self._state_store, action_dispatcher=self._action_dispatcher, command_generator=command_generator, diff --git a/api/src/opentrons/protocol_engine/resources/__init__.py b/api/src/opentrons/protocol_engine/resources/__init__.py index 94b71831589..a77075c95bb 100644 --- a/api/src/opentrons/protocol_engine/resources/__init__.py +++ b/api/src/opentrons/protocol_engine/resources/__init__.py @@ -9,6 +9,7 @@ from .deck_data_provider import DeckDataProvider, DeckFixedLabware from .labware_data_provider import LabwareDataProvider from .module_data_provider import ModuleDataProvider +from .file_provider import FileProvider from .ot3_validation import ensure_ot3_hardware @@ -18,6 +19,7 @@ "DeckDataProvider", "DeckFixedLabware", "ModuleDataProvider", + "FileProvider", "ensure_ot3_hardware", "pipette_data_provider", "labware_validation", diff --git a/api/src/opentrons/protocol_engine/resources/file_provider.py b/api/src/opentrons/protocol_engine/resources/file_provider.py new file mode 100644 index 00000000000..d4ed7b71522 --- /dev/null +++ b/api/src/opentrons/protocol_engine/resources/file_provider.py @@ -0,0 +1,157 @@ +"""File interaction resource provider.""" +from datetime import datetime +from typing import List, Optional, Callable, Awaitable, Dict +from pydantic import BaseModel +from ..errors import StorageLimitReachedError + + +MAXIMUM_CSV_FILE_LIMIT = 40 + + +class GenericCsvTransform: + """Generic CSV File Type data for rows of data to be seperated by a delimeter.""" + + filename: str + rows: List[List[str]] + delimiter: str = "," + + @staticmethod + def build( + filename: str, rows: List[List[str]], delimiter: str = "," + ) -> "GenericCsvTransform": + """Build a Generic CSV datatype class.""" + if "." in filename and not filename.endswith(".csv"): + raise ValueError( + f"Provided filename {filename} invalid. Only CSV file format is accepted." + ) + elif "." not in filename: + filename = f"{filename}.csv" + csv = GenericCsvTransform() + csv.filename = filename + csv.rows = rows + csv.delimiter = delimiter + return csv + + +class ReadData(BaseModel): + """Read Data type containing the wavelength for a Plate Reader read alongside the Measurement Data of that read.""" + + wavelength: int + data: Dict[str, float] + + +class PlateReaderData(BaseModel): + """Data from a Opentrons Plate Reader Read. Can be converted to CSV template format.""" + + read_results: List[ReadData] + reference_wavelength: Optional[int] = None + start_time: datetime + finish_time: datetime + serial_number: str + + def build_generic_csv( # noqa: C901 + self, filename: str, measurement: ReadData + ) -> GenericCsvTransform: + """Builds a CSV compatible object containing Plate Reader Measurements. + + This will also automatically reformat the provided filename to include the wavelength of those measurements. + """ + plate_alpharows = ["A", "B", "C", "D", "E", "F", "G", "H"] + rows = [] + + rows.append(["", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]) + for i in range(8): + row = [plate_alpharows[i]] + for j in range(12): + row.append(str(measurement.data[f"{plate_alpharows[i]}{j+1}"])) + rows.append(row) + for i in range(3): + rows.append([""]) + rows.append(["", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]) + for i in range(8): + row = [plate_alpharows[i]] + for j in range(12): + row.append("") + rows.append(row) + for i in range(3): + rows.append([""]) + rows.append( + [ + "", + "ID", + "Well", + "Absorbance (OD)", + "Mean Absorbance (OD)", + "Absorbance %CV", + ] + ) + for i in range(3): + rows.append([""]) + rows.append( + [ + "", + "ID", + "Well", + "Absorbance (OD)", + "Mean Absorbance (OD)", + "Dilution Factor", + "Absorbance %CV", + ] + ) + rows.append(["1", "Sample 1", "", "", "", "1", "", "", "", "", "", ""]) + for i in range(3): + rows.append([""]) + + # end of file metadata + rows.append(["Protocol"]) + rows.append(["Assay"]) + rows.append(["Sample Wavelength (nm)", str(measurement.wavelength)]) + if self.reference_wavelength is not None: + rows.append(["Reference Wavelength (nm)", str(self.reference_wavelength)]) + rows.append(["Serial No.", self.serial_number]) + rows.append(["Measurement started at", str(self.start_time)]) + rows.append(["Measurement finished at", str(self.finish_time)]) + + # Ensure the filename adheres to ruleset contains the wavelength for a given measurement + if filename.endswith(".csv"): + filename = filename[:-4] + filename = filename + "_" + str(measurement.wavelength) + ".csv" + + return GenericCsvTransform.build( + filename=filename, + rows=rows, + delimiter=",", + ) + + +class FileProvider: + """Provider class to wrap file read write interactions to the data files directory in the engine.""" + + def __init__( + self, + data_files_write_csv_callback: Optional[ + Callable[[GenericCsvTransform], Awaitable[str]] + ] = None, + data_files_filecount: Optional[Callable[[], Awaitable[int]]] = None, + ) -> None: + """Initialize the interface callbacks of the File Provider for data file handling within the Protocol Engine. + + Params: + data_files_write_csv_callback: Callback to write a CSV file to the data files directory and add it to the database. + data_files_filecount: Callback to check the amount of data files already present in the data files directory. + """ + self._data_files_write_csv_callback = data_files_write_csv_callback + self._data_files_filecount = data_files_filecount + + async def write_csv(self, write_data: GenericCsvTransform) -> str: + """Writes the provided CSV object to a file in the Data Files directory. Returns the File ID of the file created.""" + if self._data_files_filecount is not None: + file_count = await self._data_files_filecount() + if file_count >= MAXIMUM_CSV_FILE_LIMIT: + raise StorageLimitReachedError( + f"Not enough space to store file {write_data.filename}." + ) + if self._data_files_write_csv_callback is not None: + return await self._data_files_write_csv_callback(write_data) + # If we are in an analysis or simulation state, return an empty file ID + return "" diff --git a/api/src/opentrons/protocol_engine/state/files.py b/api/src/opentrons/protocol_engine/state/files.py new file mode 100644 index 00000000000..655d038df34 --- /dev/null +++ b/api/src/opentrons/protocol_engine/state/files.py @@ -0,0 +1,59 @@ +"""Basic protocol engine create file data state and store.""" +from dataclasses import dataclass +from typing import List + +from ._abstract_store import HasState, HandlesActions +from ..actions import Action, SucceedCommandAction +from ..commands import ( + Command, + absorbance_reader, +) + + +@dataclass +class FileState: + """State of Engine created files.""" + + file_ids: List[str] + + +class FileStore(HasState[FileState], HandlesActions): + """File state container.""" + + _state: FileState + + def __init__(self) -> None: + """Initialize a File store and its state.""" + self._state = FileState(file_ids=[]) + + def handle_action(self, action: Action) -> None: + """Modify state in reaction to an action.""" + if isinstance(action, SucceedCommandAction): + self._handle_command(action.command) + + def _handle_command(self, command: Command) -> None: + if isinstance(command.result, absorbance_reader.ReadAbsorbanceResult): + if command.result.fileIds is not None: + self._state.file_ids.extend(command.result.fileIds) + + +class FileView(HasState[FileState]): + """Read-only engine created file state view.""" + + _state: FileState + + def __init__(self, state: FileState) -> None: + """Initialize the view of file state. + + Arguments: + state: File state dataclass used for tracking file creation status. + """ + self._state = state + + def get_filecount(self) -> int: + """Get the number of files currently created by the protocol.""" + return len(self._state.file_ids) + + def get_file_id_list(self) -> List[str]: + """Get the list of files by file ID created by the protocol.""" + return self._state.file_ids diff --git a/api/src/opentrons/protocol_engine/state/state.py b/api/src/opentrons/protocol_engine/state/state.py index 7fc23a8ee2f..6743e1f44fc 100644 --- a/api/src/opentrons/protocol_engine/state/state.py +++ b/api/src/opentrons/protocol_engine/state/state.py @@ -29,6 +29,7 @@ from .wells import WellState, WellView, WellStore from .geometry import GeometryView from .motion import MotionView +from .files import FileView, FileState, FileStore from .config import Config from .state_summary import StateSummary from ..types import DeckConfigurationType @@ -50,6 +51,7 @@ class State: liquids: LiquidState tips: TipState wells: WellState + files: FileState class StateView(HasState[State]): @@ -66,6 +68,7 @@ class StateView(HasState[State]): _wells: WellView _geometry: GeometryView _motion: MotionView + _files: FileView _config: Config @property @@ -118,6 +121,11 @@ def motion(self) -> MotionView: """Get state view selectors for derived motion state.""" return self._motion + @property + def files(self) -> FileView: + """Get state view selectors for engine create file state.""" + return self._files + @property def config(self) -> Config: """Get ProtocolEngine configuration.""" @@ -139,6 +147,7 @@ def get_summary(self) -> StateSummary: liquids=self._liquid.get_all(), wells=self._wells.get_all(), hasEverEnteredErrorRecovery=self._commands.get_has_entered_recovery_mode(), + files=self._state.files.file_ids, ) @@ -206,6 +215,7 @@ def __init__( self._liquid_store = LiquidStore() self._tip_store = TipStore() self._well_store = WellStore() + self._file_store = FileStore() self._substores: List[HandlesActions] = [ self._command_store, @@ -216,6 +226,7 @@ def __init__( self._liquid_store, self._tip_store, self._well_store, + self._file_store, ] self._config = config self._change_notifier = change_notifier or ChangeNotifier() @@ -333,6 +344,7 @@ def _get_next_state(self) -> State: liquids=self._liquid_store.state, tips=self._tip_store.state, wells=self._well_store.state, + files=self._file_store.state, ) def _initialize_state(self) -> None: @@ -349,6 +361,7 @@ def _initialize_state(self) -> None: self._liquid = LiquidView(state.liquids) self._tips = TipView(state.tips) self._wells = WellView(state.wells) + self._files = FileView(state.files) # Derived states self._geometry = GeometryView( diff --git a/api/src/opentrons/protocol_engine/state/state_summary.py b/api/src/opentrons/protocol_engine/state/state_summary.py index 66fc4249851..b1c4dd8f766 100644 --- a/api/src/opentrons/protocol_engine/state/state_summary.py +++ b/api/src/opentrons/protocol_engine/state/state_summary.py @@ -31,3 +31,4 @@ class StateSummary(BaseModel): completedAt: Optional[datetime] liquids: List[Liquid] = Field(default_factory=list) wells: List[LiquidHeightSummary] = Field(default_factory=list) + files: List[str] = Field(default_factory=list) diff --git a/api/src/opentrons/simulate.py b/api/src/opentrons/simulate.py index 23f6c7fdfb9..e565bab83e0 100644 --- a/api/src/opentrons/simulate.py +++ b/api/src/opentrons/simulate.py @@ -815,6 +815,7 @@ def _create_live_context_pe( robot_type, use_pe_virtual_hardware=use_pe_virtual_hardware ), deck_configuration=None, + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/tests/opentrons/conftest.py b/api/tests/opentrons/conftest.py index a52e95248c0..cf8fdd0e97c 100755 --- a/api/tests/opentrons/conftest.py +++ b/api/tests/opentrons/conftest.py @@ -335,6 +335,7 @@ def _make_ot3_pe_ctx( block_on_door_open=False, ), deck_configuration=None, + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py b/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py index 405d737d55b..a5fadde09cc 100644 --- a/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py +++ b/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py @@ -11,6 +11,11 @@ from opentrons.protocol_engine.clients import SyncClient as EngineClient from opentrons.protocol_api.core.engine.module_core import AbsorbanceReaderCore from opentrons.protocol_api import MAX_SUPPORTED_VERSION +from opentrons.protocol_engine.state.module_substates import AbsorbanceReaderSubState +from opentrons.protocol_engine.state.module_substates.absorbance_reader_substate import ( + AbsorbanceReaderId, + AbsorbanceReaderMeasureMode, +) SyncAbsorbanceReaderHardware = SynchronousAdapter[AbsorbanceReader] @@ -115,7 +120,23 @@ def test_read( ) -> None: """It should call absorbance reader to read with the engine client.""" subject._initialized_value = [123] - subject.read() + substate = AbsorbanceReaderSubState( + module_id=AbsorbanceReaderId(subject.module_id), + configured=True, + measured=False, + is_lid_on=True, + data=None, + configured_wavelengths=subject._initialized_value, + measure_mode=AbsorbanceReaderMeasureMode("single"), + reference_wavelength=None, + lid_id="pr_lid_labware", + ) + decoy.when( + mock_engine_client.state.modules.get_absorbance_reader_substate( + subject.module_id + ) + ).then_return(substate) + subject.read(filename=None) decoy.verify( mock_engine_client.execute_command( diff --git a/api/tests/opentrons/protocol_engine/conftest.py b/api/tests/opentrons/protocol_engine/conftest.py index 7040f8497ea..76c5d754f3e 100644 --- a/api/tests/opentrons/protocol_engine/conftest.py +++ b/api/tests/opentrons/protocol_engine/conftest.py @@ -22,6 +22,7 @@ from opentrons.hardware_control.api import API from opentrons.hardware_control.protocols.types import FlexRobotType, OT2RobotType from opentrons.protocol_engine.notes import CommandNoteAdder +from opentrons.protocol_engine.resources.file_provider import FileProvider if TYPE_CHECKING: from opentrons.hardware_control.ot3api import OT3API @@ -252,3 +253,9 @@ def supported_tip_fixture() -> pipette_definition.SupportedTipsDefinition: def mock_command_note_adder(decoy: Decoy) -> CommandNoteAdder: """Get a command note adder.""" return decoy.mock(cls=CommandNoteAdder) + + +@pytest.fixture +def file_provider(decoy: Decoy) -> FileProvider: + """Get a mocked out FileProvider.""" + return decoy.mock(cls=FileProvider) diff --git a/api/tests/opentrons/protocol_engine/execution/test_command_executor.py b/api/tests/opentrons/protocol_engine/execution/test_command_executor.py index 2df3a2cdd25..f5f0ec063b0 100644 --- a/api/tests/opentrons/protocol_engine/execution/test_command_executor.py +++ b/api/tests/opentrons/protocol_engine/execution/test_command_executor.py @@ -18,7 +18,7 @@ from opentrons.protocol_engine.errors.exceptions import ( EStopActivatedError as PE_EStopActivatedError, ) -from opentrons.protocol_engine.resources import ModelUtils +from opentrons.protocol_engine.resources import ModelUtils, FileProvider from opentrons.protocol_engine.state.state import StateStore from opentrons.protocol_engine.actions import ( ActionDispatcher, @@ -174,6 +174,7 @@ def subject( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -188,6 +189,7 @@ def subject( """Get a CommandExecutor test subject with its dependencies mocked out.""" return CommandExecutor( hardware_api=hardware_api, + file_provider=file_provider, state_store=state_store, action_dispatcher=action_dispatcher, equipment=equipment, @@ -234,6 +236,7 @@ async def test_execute( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -329,6 +332,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, @@ -392,6 +396,7 @@ async def test_execute_undefined_error( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -474,6 +479,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, @@ -528,6 +534,7 @@ async def test_execute_defined_error( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -610,6 +617,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, diff --git a/robot-server/robot_server/file_provider/__init__.py b/robot-server/robot_server/file_provider/__init__.py new file mode 100644 index 00000000000..6a8b56c58eb --- /dev/null +++ b/robot-server/robot_server/file_provider/__init__.py @@ -0,0 +1 @@ +"""The HTTP API, and supporting code, for the File Provider entity provided to the Protocol Engine.""" diff --git a/robot-server/robot_server/file_provider/fastapi_dependencies.py b/robot-server/robot_server/file_provider/fastapi_dependencies.py new file mode 100644 index 00000000000..65042288f9a --- /dev/null +++ b/robot-server/robot_server/file_provider/fastapi_dependencies.py @@ -0,0 +1,39 @@ +"""Dependency functions for use with `fastapi.Depends()`.""" +from pathlib import Path +from typing import Annotated + +import fastapi + +from robot_server.file_provider.provider import FileProviderWrapper +from robot_server.data_files.dependencies import ( + get_data_files_directory, + get_data_files_store, +) +from robot_server.data_files.data_files_store import DataFilesStore +from opentrons.protocol_engine.resources.file_provider import FileProvider + + +async def get_file_provider_wrapper( + data_files_directory: Annotated[Path, fastapi.Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, fastapi.Depends(get_data_files_store)], +) -> FileProviderWrapper: + """Return the server's singleton `FileProviderWrapper` which provides the engine related callbacks for FileProvider.""" + file_provider_wrapper = FileProviderWrapper( + data_files_directory=data_files_directory, data_files_store=data_files_store + ) + + return file_provider_wrapper + + +async def get_file_provider( + file_provider_wrapper: Annotated[ + FileProviderWrapper, fastapi.Depends(get_file_provider_wrapper) + ], +) -> FileProvider: + """Return theengine `FileProvider` which accepts callbacks from FileProviderWrapper.""" + file_provider = FileProvider( + data_files_write_csv_callback=file_provider_wrapper.write_csv_callback, + data_files_filecount=file_provider_wrapper.csv_filecount_callback, + ) + + return file_provider diff --git a/robot-server/robot_server/file_provider/provider.py b/robot-server/robot_server/file_provider/provider.py new file mode 100644 index 00000000000..5cfeb640fef --- /dev/null +++ b/robot-server/robot_server/file_provider/provider.py @@ -0,0 +1,74 @@ +"""Wrapper to provide the callbacks utilized by the Protocol Engine File Provider.""" +import os +import asyncio +import csv +from pathlib import Path +from typing import Annotated +from fastapi import Depends +from robot_server.data_files.dependencies import ( + get_data_files_directory, + get_data_files_store, +) +from ..service.dependencies import get_current_time, get_unique_id +from robot_server.data_files.data_files_store import DataFilesStore, DataFileInfo +from opentrons.protocol_engine.resources.file_provider import GenericCsvTransform + + +class FileProviderWrapper: + """Wrapper to provide File Read and Write capabilities to Protocol Engine.""" + + def __init__( + self, + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + ) -> None: + """Provides callbacks for data file manipulation for the Protocol Engine's File Provider class. + + Params: + data_files_directory: The directory to store engine-create files in during a protocol run. + data_files_store: The data files store utilized for database interaction when creating files. + """ + self._data_files_directory = data_files_directory + self._data_files_store = data_files_store + + # dta file store is not generally safe for concurrent access. + self._lock = asyncio.Lock() + + async def write_csv_callback( + self, + csv_data: GenericCsvTransform, + ) -> str: + """Write the provided data transform to a CSV file. Returns the File ID of the created file.""" + async with self._lock: + file_id = await get_unique_id() + os.makedirs( + os.path.dirname( + self._data_files_directory / file_id / csv_data.filename + ), + exist_ok=True, + ) + with open( + file=self._data_files_directory / file_id / csv_data.filename, + mode="w", + newline="", + ) as csvfile: + writer = csv.writer(csvfile, delimiter=csv_data.delimiter) + writer.writerows(csv_data.rows) + + created_at = await get_current_time() + # TODO (cb, 10-14-24): Engine created files do not currently get a file_hash, unlike explicitly uploaded files. Do they need one? + file_info = DataFileInfo( + id=file_id, + name=csv_data.filename, + file_hash="", + created_at=created_at, + ) + await self._data_files_store.insert(file_info) + return file_id + + async def csv_filecount_callback(self) -> int: + """Return the current count of files stored within the data files directory.""" + data_file_usage_info = [ + usage_info for usage_info in self._data_files_store.get_usage_info() + ] + return len(data_file_usage_info) diff --git a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py index 589aaf5614d..46b2c86bd40 100644 --- a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py +++ b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py @@ -33,6 +33,7 @@ def _build_run( modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) return MaintenanceRun.construct( diff --git a/robot-server/robot_server/runs/router/base_router.py b/robot-server/robot_server/runs/router/base_router.py index b9bd8cd24b2..639e6d91628 100644 --- a/robot-server/robot_server/runs/router/base_router.py +++ b/robot-server/robot_server/runs/router/base_router.py @@ -72,6 +72,10 @@ get_deck_configuration_store, ) from robot_server.deck_configuration.store import DeckConfigurationStore +from robot_server.file_provider.fastapi_dependencies import ( + get_file_provider, +) +from opentrons.protocol_engine.resources.file_provider import FileProvider from robot_server.service.notifications import get_pe_notify_publishers log = logging.getLogger(__name__) @@ -187,6 +191,7 @@ async def create_run( # noqa: C901 deck_configuration_store: Annotated[ DeckConfigurationStore, Depends(get_deck_configuration_store) ], + file_provider: Annotated[FileProvider, Depends(get_file_provider)], notify_publishers: Annotated[Callable[[], None], Depends(get_pe_notify_publishers)], request_body: Optional[RequestModel[RunCreate]] = None, ) -> PydanticResponse[SimpleBody[Union[Run, BadRun]]]: @@ -206,6 +211,7 @@ async def create_run( # noqa: C901 resources to make room for the new run. check_estop: Dependency to verify the estop is in a valid state. deck_configuration_store: Dependency to fetch the deck configuration. + file_provider: Dependency to provide access to file Reading and Writing to Protocol engine. notify_publishers: Utilized by the engine to notify publishers of state changes. """ protocol_id = request_body.data.protocolId if request_body is not None else None @@ -260,6 +266,7 @@ async def create_run( # noqa: C901 created_at=created_at, labware_offsets=offsets, deck_configuration=deck_configuration, + file_provider=file_provider, run_time_param_values=rtp_values, run_time_param_paths=rtp_paths, protocol=protocol_resource, diff --git a/robot-server/robot_server/runs/run_data_manager.py b/robot-server/robot_server/runs/run_data_manager.py index 4168b1d4d5d..3edf89ef163 100644 --- a/robot-server/robot_server/runs/run_data_manager.py +++ b/robot-server/robot_server/runs/run_data_manager.py @@ -34,6 +34,7 @@ from .run_models import Run, BadRun, RunDataError from opentrons.protocol_engine.types import DeckConfigurationType, RunTimeParameter +from opentrons.protocol_engine.resources.file_provider import FileProvider _INITIAL_ERROR_RECOVERY_RULES: list[ErrorRecoveryRule] = [] @@ -65,6 +66,7 @@ def _build_run( completedAt=state_summary.completedAt, startedAt=state_summary.startedAt, liquids=state_summary.liquids, + outputFileIds=state_summary.files, runTimeParameters=run_time_parameters, ) @@ -79,6 +81,7 @@ def _build_run( modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) errors.append(state_summary.dataError) @@ -122,6 +125,7 @@ def _build_run( startedAt=state.startedAt, liquids=state.liquids, runTimeParameters=run_time_parameters, + outputFileIds=state.files, hasEverEnteredErrorRecovery=state.hasEverEnteredErrorRecovery, ) @@ -170,6 +174,7 @@ async def create( created_at: datetime, labware_offsets: List[LabwareOffsetCreate], deck_configuration: DeckConfigurationType, + file_provider: FileProvider, run_time_param_values: Optional[PrimitiveRunTimeParamValuesType], run_time_param_paths: Optional[CSVRuntimeParamPaths], notify_publishers: Callable[[], None], @@ -217,6 +222,7 @@ async def create( labware_offsets=labware_offsets, initial_error_recovery_policy=initial_error_recovery_policy, deck_configuration=deck_configuration, + file_provider=file_provider, protocol=protocol, run_time_param_values=run_time_param_values, run_time_param_paths=run_time_param_paths, diff --git a/robot-server/robot_server/runs/run_models.py b/robot-server/robot_server/runs/run_models.py index 962c3ab51e7..bac7c4c740c 100644 --- a/robot-server/robot_server/runs/run_models.py +++ b/robot-server/robot_server/runs/run_models.py @@ -146,6 +146,10 @@ class Run(ResourceModel): " if none are specified in the request." ), ) + outputFileIds: List[str] = Field( + ..., + description="File IDs of files output during a protocol run.", + ) protocolId: Optional[str] = Field( None, description=( @@ -223,6 +227,10 @@ class BadRun(ResourceModel): " if none are specified in the request." ), ) + outputFileIds: List[str] = Field( + ..., + description="File IDs of files output during a protocol run.", + ) protocolId: Optional[str] = Field( None, description=( diff --git a/robot-server/robot_server/runs/run_orchestrator_store.py b/robot-server/robot_server/runs/run_orchestrator_store.py index 03af7315ef9..e05bd3bd349 100644 --- a/robot-server/robot_server/runs/run_orchestrator_store.py +++ b/robot-server/robot_server/runs/run_orchestrator_store.py @@ -52,6 +52,7 @@ EngineStatus, ) from opentrons_shared_data.labware.types import LabwareUri +from opentrons.protocol_engine.resources.file_provider import FileProvider _log = logging.getLogger(__name__) @@ -193,6 +194,7 @@ async def create( labware_offsets: List[LabwareOffsetCreate], initial_error_recovery_policy: error_recovery_policy.ErrorRecoveryPolicy, deck_configuration: DeckConfigurationType, + file_provider: FileProvider, notify_publishers: Callable[[], None], protocol: Optional[ProtocolResource], run_time_param_values: Optional[PrimitiveRunTimeParamValuesType] = None, @@ -236,6 +238,7 @@ async def create( error_recovery_policy=initial_error_recovery_policy, load_fixed_trash=load_fixed_trash, deck_configuration=deck_configuration, + file_provider=file_provider, notify_publishers=notify_publishers, ) diff --git a/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml index 48e1088eb4c..fd98c29a2dc 100644 --- a/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml @@ -52,6 +52,7 @@ stages: description: Liquid H2O displayColor: '#7332a8' runTimeParameters: [] + outputFileIds: [] protocolId: '{protocol_id}' - name: Execute a setup command diff --git a/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml index 0915fb69f12..3ab7386ba4f 100644 --- a/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml @@ -47,6 +47,7 @@ stages: location: !anydict labwareOffsets: [] runTimeParameters: [] + outputFileIds: [] liquids: - id: waterId displayName: Water diff --git a/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml index 2bfa2ccd552..2ad0a92eb8c 100644 --- a/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml @@ -44,6 +44,7 @@ stages: location: !anydict labwareOffsets: [] runTimeParameters: [] + outputFileIds: [] protocolId: '{protocol_id}' liquids: [] save: @@ -240,6 +241,7 @@ stages: startedAt: !re_fullmatch "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+(Z|([+-]\\d{2}:\\d{2}))" liquids: [] runTimeParameters: [] + outputFileIds: [] completedAt: !re_fullmatch "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+(Z|([+-]\\d{2}:\\d{2}))" errors: [] hasEverEnteredErrorRecovery: False diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml index edec26c4e03..14ae502d800 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -96,6 +96,7 @@ stages: labwareOffsets: [] liquids: [] runTimeParameters: [] + outputFileIds: [] modules: [] pipettes: [] status: 'idle' diff --git a/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml index 2533769b56f..8916ebd1cf2 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml @@ -115,6 +115,7 @@ stages: file: id: '{data_file_id}' name: sample_plates.csv + outputFileIds: [] liquids: [] protocolId: '{protocol_id}' diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 8448ded8870..5d3d9da8a13 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -190,6 +190,7 @@ async def test_analyze( labwareOffsets=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ), parameters=[bool_parameter], diff --git a/robot-server/tests/runs/router/conftest.py b/robot-server/tests/runs/router/conftest.py index 700be63f4ef..0ca0c5cc4f5 100644 --- a/robot-server/tests/runs/router/conftest.py +++ b/robot-server/tests/runs/router/conftest.py @@ -12,7 +12,10 @@ ) from robot_server.deck_configuration.store import DeckConfigurationStore +from robot_server.file_provider.provider import FileProviderWrapper + from opentrons.protocol_engine import ProtocolEngine +from opentrons.protocol_engine.resources import FileProvider @pytest.fixture() @@ -63,3 +66,17 @@ def mock_maintenance_run_orchestrator_store( def mock_deck_configuration_store(decoy: Decoy) -> DeckConfigurationStore: """Get a mock DeckConfigurationStore.""" return decoy.mock(cls=DeckConfigurationStore) + + +@pytest.fixture() +def mock_file_provider_wrapper(decoy: Decoy) -> FileProviderWrapper: + """Return a mock FileProviderWrapper.""" + return decoy.mock(cls=FileProviderWrapper) + + +@pytest.fixture() +def mock_file_provider( + decoy: Decoy, mock_file_provider_wrapper: FileProviderWrapper +) -> FileProvider: + """Return a mock FileProvider.""" + return decoy.mock(cls=FileProvider) diff --git a/robot-server/tests/runs/router/test_base_router.py b/robot-server/tests/runs/router/test_base_router.py index 8a10af1940d..25c91f70ade 100644 --- a/robot-server/tests/runs/router/test_base_router.py +++ b/robot-server/tests/runs/router/test_base_router.py @@ -69,6 +69,10 @@ ) from robot_server.deck_configuration.store import DeckConfigurationStore +from opentrons.protocol_engine.resources.file_provider import ( + FileProvider, +) +from robot_server.file_provider.provider import FileProviderWrapper def mock_notify_publishers() -> None: @@ -125,8 +129,10 @@ async def test_create_run( mock_run_auto_deleter: RunAutoDeleter, labware_offset_create: pe_types.LabwareOffsetCreate, mock_deck_configuration_store: DeckConfigurationStore, + mock_file_provider_wrapper: FileProviderWrapper, mock_protocol_store: ProtocolStore, mock_data_files_store: DataFilesStore, + mock_file_provider: FileProvider, ) -> None: """It should be able to create a basic run.""" run_id = "run-id" @@ -145,17 +151,20 @@ async def test_create_run( labwareOffsets=[], status=pe_types.EngineStatus.IDLE, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) decoy.when( await mock_deck_configuration_store.get_deck_configuration() ).then_return([]) + decoy.when( await mock_run_data_manager.create( run_id=run_id, created_at=run_created_at, labware_offsets=[labware_offset_create], deck_configuration=[], + file_provider=mock_file_provider, protocol=None, run_time_param_values=None, run_time_param_paths=None, @@ -175,6 +184,7 @@ async def test_create_run( run_auto_deleter=mock_run_auto_deleter, quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, protocol_store=mock_protocol_store, check_estop=True, @@ -193,6 +203,7 @@ async def test_create_protocol_run( mock_run_auto_deleter: RunAutoDeleter, mock_deck_configuration_store: DeckConfigurationStore, mock_data_files_store: DataFilesStore, + mock_file_provider: FileProvider, ) -> None: """It should be able to create a protocol run.""" run_id = "run-id" @@ -228,6 +239,7 @@ async def test_create_protocol_run( labwareOffsets=[], status=pe_types.EngineStatus.IDLE, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) decoy.when(mock_data_files_store.get("file-id")).then_return( @@ -251,6 +263,7 @@ async def test_create_protocol_run( created_at=run_created_at, labware_offsets=[], deck_configuration=[], + file_provider=mock_file_provider, protocol=protocol_resource, run_time_param_values={"foo": "bar"}, run_time_param_paths={"my-csv-param": Path("/dev/null/file-id/abc.xyz")}, @@ -275,6 +288,7 @@ async def test_create_protocol_run( run_auto_deleter=mock_run_auto_deleter, quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, check_estop=True, ) @@ -293,6 +307,7 @@ async def test_create_protocol_run_bad_protocol_id( mock_run_auto_deleter: RunAutoDeleter, mock_data_files_store: DataFilesStore, mock_data_files_directory: Path, + mock_file_provider: FileProvider, ) -> None: """It should 404 if a protocol for a run does not exist.""" error = ProtocolNotFoundError("protocol-id") @@ -309,6 +324,7 @@ async def test_create_protocol_run_bad_protocol_id( run_data_manager=mock_run_data_manager, data_files_store=mock_data_files_store, data_files_directory=mock_data_files_directory, + file_provider=mock_file_provider, run_id="run-id", created_at=datetime.now(), run_auto_deleter=mock_run_auto_deleter, @@ -329,6 +345,7 @@ async def test_create_run_conflict( mock_protocol_store: ProtocolStore, mock_data_files_store: DataFilesStore, mock_data_files_directory: Path, + mock_file_provider: FileProvider, ) -> None: """It should respond with a conflict error if multiple engines are created.""" created_at = datetime(year=2021, month=1, day=1) @@ -342,6 +359,7 @@ async def test_create_run_conflict( created_at=created_at, labware_offsets=[], deck_configuration=[], + file_provider=mock_file_provider, protocol=None, run_time_param_values=None, run_time_param_paths=None, @@ -361,6 +379,7 @@ async def test_create_run_conflict( deck_configuration_store=mock_deck_configuration_store, data_files_store=mock_data_files_store, data_files_directory=mock_data_files_directory, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, check_estop=True, ) @@ -387,6 +406,7 @@ async def test_get_run_data_from_url( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -434,6 +454,7 @@ async def test_get_run() -> None: labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -480,6 +501,7 @@ async def test_get_runs_not_empty( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -496,6 +518,7 @@ async def test_get_runs_not_empty( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -575,6 +598,7 @@ async def test_update_run_to_not_current( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -610,6 +634,7 @@ async def test_update_current_none_noop( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/router/test_labware_router.py b/robot-server/tests/runs/router/test_labware_router.py index 1e3b929446d..900eac530f1 100644 --- a/robot-server/tests/runs/router/test_labware_router.py +++ b/robot-server/tests/runs/router/test_labware_router.py @@ -40,6 +40,7 @@ def run() -> Run: labwareOffsets=[], protocolId=None, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index d60e9da6082..b069632a4e4 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -72,6 +72,7 @@ def engine_state_summary() -> StateSummary: modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 981a0e7177c..869f1c1c643 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -52,6 +52,8 @@ ) from robot_server.service.notifications import RunsPublisher from robot_server.service.task_runner import TaskRunner +from opentrons.protocol_engine.resources import FileProvider +from robot_server.file_provider.provider import FileProviderWrapper def mock_notify_publishers() -> None: @@ -141,6 +143,20 @@ def mock_nozzle_maps(decoy: Decoy) -> Dict[str, NozzleMap]: return {"mock-pipette-id": mock_nozzle_map} +@pytest.fixture() +def mock_file_provider_wrapper(decoy: Decoy) -> FileProviderWrapper: + """Return a mock FileProviderWrapper.""" + return decoy.mock(cls=FileProviderWrapper) + + +@pytest.fixture() +def mock_file_provider( + decoy: Decoy, mock_file_provider_wrapper: FileProviderWrapper +) -> FileProvider: + """Return a mock FileProvider.""" + return decoy.mock(cls=FileProvider) + + @pytest.fixture def run_resource() -> RunResource: """Get a StateSummary value object.""" @@ -210,6 +226,7 @@ async def test_create( initial_error_recovery_policy=sentinel.initial_error_recovery_policy, protocol=protocol, deck_configuration=sentinel.deck_configuration, + file_provider=sentinel.file_provider, run_time_param_values=sentinel.run_time_param_values, run_time_param_paths=sentinel.run_time_param_paths, notify_publishers=mock_notify_publishers, @@ -251,6 +268,7 @@ async def test_create( labware_offsets=sentinel.labware_offsets, protocol=protocol, deck_configuration=sentinel.deck_configuration, + file_provider=sentinel.file_provider, run_time_param_values=sentinel.run_time_param_values, run_time_param_paths=sentinel.run_time_param_paths, notify_publishers=mock_notify_publishers, @@ -271,6 +289,7 @@ async def test_create( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=[bool_parameter, file_parameter], + outputFileIds=engine_state_summary.files, ) decoy.verify( mock_run_store.insert_csv_rtp( @@ -284,6 +303,7 @@ async def test_create_engine_error( mock_run_orchestrator_store: RunOrchestratorStore, mock_run_store: RunStore, mock_error_recovery_setting_store: ErrorRecoverySettingStore, + mock_file_provider: FileProvider, subject: RunDataManager, ) -> None: """It should not create a resource if engine creation fails.""" @@ -307,6 +327,7 @@ async def test_create_engine_error( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -321,6 +342,7 @@ async def test_create_engine_error( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -374,6 +396,7 @@ async def test_get_current_run( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) assert subject.current_run_id == run_id @@ -416,6 +439,7 @@ async def test_get_historical_run( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -459,6 +483,7 @@ async def test_get_historical_run_no_data( modules=[], liquids=[], runTimeParameters=run_time_parameters, + outputFileIds=[], ) @@ -560,6 +585,7 @@ async def test_get_all_runs( modules=historical_run_data.modules, liquids=historical_run_data.liquids, runTimeParameters=historical_run_time_parameters, + outputFileIds=historical_run_data.files, ), Run( current=True, @@ -576,6 +602,7 @@ async def test_get_all_runs( modules=current_run_data.modules, liquids=current_run_data.liquids, runTimeParameters=current_run_time_parameters, + outputFileIds=current_run_data.files, ), ] @@ -674,6 +701,7 @@ async def test_update_current( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -730,6 +758,7 @@ async def test_update_current_noop( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -759,6 +788,7 @@ async def test_create_archives_existing( mock_run_orchestrator_store: RunOrchestratorStore, mock_run_store: RunStore, mock_error_recovery_setting_store: ErrorRecoverySettingStore, + mock_file_provider: FileProvider, subject: RunDataManager, ) -> None: """It should persist the previously current run when a new run is created.""" @@ -792,6 +822,7 @@ async def test_create_archives_existing( protocol=None, initial_error_recovery_policy=sentinel.initial_error_recovery_policy, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -812,6 +843,7 @@ async def test_create_archives_existing( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, diff --git a/robot-server/tests/runs/test_run_orchestrator_store.py b/robot-server/tests/runs/test_run_orchestrator_store.py index e34c1340359..1774215acfd 100644 --- a/robot-server/tests/runs/test_run_orchestrator_store.py +++ b/robot-server/tests/runs/test_run_orchestrator_store.py @@ -24,6 +24,7 @@ NoRunOrchestrator, handle_estop_event, ) +from opentrons.protocol_engine.resources import FileProvider def mock_notify_publishers() -> None: @@ -61,6 +62,7 @@ async def test_create_engine(decoy: Decoy, subject: RunOrchestratorStore) -> Non labware_offsets=[], initial_error_recovery_policy=never_recover, protocol=None, + file_provider=FileProvider(), deck_configuration=[], notify_publishers=mock_notify_publishers, ) @@ -90,6 +92,7 @@ async def test_create_engine_uses_robot_type( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -112,6 +115,7 @@ async def test_create_engine_with_labware_offsets( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -136,6 +140,7 @@ async def test_archives_state_if_engine_already_exists( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -146,6 +151,7 @@ async def test_archives_state_if_engine_already_exists( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -160,6 +166,7 @@ async def test_clear_engine(subject: RunOrchestratorStore) -> None: initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -182,6 +189,7 @@ async def test_clear_engine_not_stopped_or_idle( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -198,6 +206,7 @@ async def test_clear_idle_engine(subject: RunOrchestratorStore) -> None: initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -250,6 +259,7 @@ async def test_get_default_orchestrator_current_unstarted( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -265,6 +275,7 @@ async def test_get_default_orchestrator_conflict(subject: RunOrchestratorStore) initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) subject.play() @@ -283,6 +294,7 @@ async def test_get_default_orchestrator_run_stopped( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) await subject.finish(error=None) diff --git a/robot-server/tests/runs/test_run_store.py b/robot-server/tests/runs/test_run_store.py index 55a1849e693..ce6f8326c22 100644 --- a/robot-server/tests/runs/test_run_store.py +++ b/robot-server/tests/runs/test_run_store.py @@ -132,6 +132,7 @@ def state_summary() -> StateSummary: status=EngineStatus.IDLE, liquids=liquids, wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) @@ -216,6 +217,7 @@ def invalid_state_summary() -> StateSummary: status=EngineStatus.IDLE, liquids=liquids, wells=[], + files=[], ) diff --git a/shared-data/command/schemas/10.json b/shared-data/command/schemas/10.json index 6508269ac62..be8e870c5bb 100644 --- a/shared-data/command/schemas/10.json +++ b/shared-data/command/schemas/10.json @@ -4236,6 +4236,11 @@ "title": "Moduleid", "description": "Unique ID of the Absorbance Reader.", "type": "string" + }, + "fileName": { + "title": "Filename", + "description": "Optional file name to use when storing the results of a measurement.", + "type": "string" } }, "required": ["moduleId"]