From d9f7d2ac5fbe7419dce30c2da94aabf0dbc419ff Mon Sep 17 00:00:00 2001 From: CaseyBatten Date: Sat, 19 Oct 2024 13:16:02 -0400 Subject: [PATCH 1/9] feat(api, robot-server): Plate Reader CSV output functionality (#16495) Covers PLAT-380, PLAT-468 Allows plate reader to save CSV files through a new FileProvider in protocol engine tool, created and passed in through robot-server. --- api/src/opentrons/cli/analyze.py | 1 + api/src/opentrons/execute.py | 1 + .../protocol_api/core/engine/module_core.py | 27 ++- api/src/opentrons/protocol_api/core/module.py | 2 +- .../opentrons/protocol_api/module_contexts.py | 17 +- .../commands/absorbance_reader/read.py | 104 +++++++++++- .../protocol_engine/commands/command.py | 1 + .../protocol_engine/create_protocol_engine.py | 10 +- .../protocol_engine/engine_support.py | 3 +- .../protocol_engine/errors/__init__.py | 2 + .../protocol_engine/errors/exceptions.py | 13 ++ .../protocol_engine/execution/__init__.py | 2 + .../execution/command_executor.py | 5 +- .../execution/create_queue_worker.py | 4 + .../protocol_engine/protocol_engine.py | 5 +- .../protocol_engine/resources/__init__.py | 2 + .../resources/file_provider.py | 157 ++++++++++++++++++ .../opentrons/protocol_engine/state/files.py | 59 +++++++ .../opentrons/protocol_engine/state/state.py | 13 ++ .../protocol_engine/state/state_summary.py | 1 + api/src/opentrons/simulate.py | 1 + api/tests/opentrons/conftest.py | 1 + .../engine/test_absorbance_reader_core.py | 23 ++- .../opentrons/protocol_engine/conftest.py | 7 + .../execution/test_command_executor.py | 10 +- .../robot_server/file_provider/__init__.py | 1 + .../file_provider/fastapi_dependencies.py | 39 +++++ .../robot_server/file_provider/provider.py | 74 +++++++++ .../maintenance_run_data_manager.py | 1 + .../robot_server/runs/router/base_router.py | 7 + .../robot_server/runs/run_data_manager.py | 6 + robot-server/robot_server/runs/run_models.py | 8 + .../runs/run_orchestrator_store.py | 3 + .../test_json_v6_protocol_run.tavern.yaml | 1 + .../test_json_v7_protocol_run.tavern.yaml | 1 + .../runs/test_protocol_run.tavern.yaml | 2 + ...t_run_queued_protocol_commands.tavern.yaml | 1 + ...t_run_with_run_time_parameters.tavern.yaml | 1 + .../tests/protocols/test_protocol_analyzer.py | 1 + robot-server/tests/runs/router/conftest.py | 17 ++ .../tests/runs/router/test_base_router.py | 25 +++ .../tests/runs/router/test_labware_router.py | 1 + .../tests/runs/test_run_controller.py | 1 + .../tests/runs/test_run_data_manager.py | 32 ++++ .../tests/runs/test_run_orchestrator_store.py | 12 ++ robot-server/tests/runs/test_run_store.py | 2 + shared-data/command/schemas/10.json | 5 + 47 files changed, 692 insertions(+), 20 deletions(-) create mode 100644 api/src/opentrons/protocol_engine/resources/file_provider.py create mode 100644 api/src/opentrons/protocol_engine/state/files.py create mode 100644 robot-server/robot_server/file_provider/__init__.py create mode 100644 robot-server/robot_server/file_provider/fastapi_dependencies.py create mode 100644 robot-server/robot_server/file_provider/provider.py diff --git a/api/src/opentrons/cli/analyze.py b/api/src/opentrons/cli/analyze.py index f311adce402..8489da83d68 100644 --- a/api/src/opentrons/cli/analyze.py +++ b/api/src/opentrons/cli/analyze.py @@ -332,6 +332,7 @@ async def _do_analyze( liquids=[], wells=[], hasEverEnteredErrorRecovery=False, + files=[], ), parameters=[], ) diff --git a/api/src/opentrons/execute.py b/api/src/opentrons/execute.py index ade74b1aadd..a9b3562d82b 100644 --- a/api/src/opentrons/execute.py +++ b/api/src/opentrons/execute.py @@ -546,6 +546,7 @@ def _create_live_context_pe( hardware_api=hardware_api_wrapped, config=_get_protocol_engine_config(), deck_configuration=entrypoint_util.get_deck_configuration(), + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/src/opentrons/protocol_api/core/engine/module_core.py b/api/src/opentrons/protocol_api/core/engine/module_core.py index 47b49c54e23..1d800dee7ea 100644 --- a/api/src/opentrons/protocol_api/core/engine/module_core.py +++ b/api/src/opentrons/protocol_api/core/engine/module_core.py @@ -586,11 +586,20 @@ def initialize( ) self._initialized_value = wavelengths - def read(self) -> Optional[Dict[int, Dict[str, float]]]: - """Initiate a read on the Absorbance Reader, and return the results. During Analysis, this will return None.""" + def read(self, filename: Optional[str]) -> Dict[int, Dict[str, float]]: + """Initiate a read on the Absorbance Reader, and return the results. During Analysis, this will return a measurement of zero for all wells.""" + wavelengths = self._engine_client.state.modules.get_absorbance_reader_substate( + self.module_id + ).configured_wavelengths + if wavelengths is None: + raise CannotPerformModuleAction( + "Cannot perform Read action on Absorbance Reader without calling `.initialize(...)` first." + ) if self._initialized_value: self._engine_client.execute_command( - cmd.absorbance_reader.ReadAbsorbanceParams(moduleId=self.module_id) + cmd.absorbance_reader.ReadAbsorbanceParams( + moduleId=self.module_id, fileName=filename + ) ) if not self._engine_client.state.config.use_virtual_modules: read_result = ( @@ -603,7 +612,17 @@ def read(self) -> Optional[Dict[int, Dict[str, float]]]: raise CannotPerformModuleAction( "Absorbance Reader failed to return expected read result." ) - return None + + # When using virtual modules, return all zeroes + virtual_asbsorbance_result: Dict[int, Dict[str, float]] = {} + for wavelength in wavelengths: + converted_values = ( + self._engine_client.state.modules.convert_absorbance_reader_data_points( + data=[0] * 96 + ) + ) + virtual_asbsorbance_result[wavelength] = converted_values + return virtual_asbsorbance_result def close_lid( self, diff --git a/api/src/opentrons/protocol_api/core/module.py b/api/src/opentrons/protocol_api/core/module.py index 90abea1d0ec..c93e8ce8de8 100644 --- a/api/src/opentrons/protocol_api/core/module.py +++ b/api/src/opentrons/protocol_api/core/module.py @@ -365,7 +365,7 @@ def initialize( """Initialize the Absorbance Reader by taking zero reading.""" @abstractmethod - def read(self) -> Optional[Dict[int, Dict[str, float]]]: + def read(self, filename: Optional[str]) -> Dict[int, Dict[str, float]]: """Get an absorbance reading from the Absorbance Reader.""" @abstractmethod diff --git a/api/src/opentrons/protocol_api/module_contexts.py b/api/src/opentrons/protocol_api/module_contexts.py index 5d182843dcc..f7541da1836 100644 --- a/api/src/opentrons/protocol_api/module_contexts.py +++ b/api/src/opentrons/protocol_api/module_contexts.py @@ -1035,6 +1035,17 @@ def initialize( ) @requires_version(2, 21) - def read(self) -> Optional[Dict[int, Dict[str, float]]]: - """Initiate read on the Absorbance Reader. Returns a dictionary of wavelengths to dictionary of values ordered by well name.""" - return self._core.read() + def read(self, export_filename: Optional[str]) -> Dict[int, Dict[str, float]]: + """Initiate read on the Absorbance Reader. + + Returns a dictionary of wavelengths to dictionary of values ordered by well name. + + :param export_filename: Optional, if a filename is provided a CSV file will be saved + as a result of the read action containing measurement data. The filename will + be modified to include the wavelength used during measurement. If multiple + measurements are taken, then a file will be generated for each wavelength provided. + + Example: If `export_filename="my_data"` and wavelengths 450 and 531 are used during + measurement, the output files will be "my_data_450.csv" and "my_data_531.csv". + """ + return self._core.read(filename=export_filename) diff --git a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py index b101cdb70b8..caf8a738f09 100644 --- a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py +++ b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py @@ -1,14 +1,22 @@ """Command models to read absorbance.""" from __future__ import annotations -from typing import Optional, Dict, TYPE_CHECKING +from datetime import datetime +from typing import Optional, Dict, TYPE_CHECKING, List from typing_extensions import Literal, Type from pydantic import BaseModel, Field from ..command import AbstractCommandImpl, BaseCommand, BaseCommandCreate, SuccessData -from ...errors import CannotPerformModuleAction +from ...errors import CannotPerformModuleAction, StorageLimitReachedError from ...errors.error_occurrence import ErrorOccurrence +from ...resources.file_provider import ( + PlateReaderData, + ReadData, + MAXIMUM_CSV_FILE_LIMIT, +) +from ...resources import FileProvider + if TYPE_CHECKING: from opentrons.protocol_engine.state.state import StateView from opentrons.protocol_engine.execution import EquipmentHandler @@ -21,6 +29,10 @@ class ReadAbsorbanceParams(BaseModel): """Input parameters for an absorbance reading.""" moduleId: str = Field(..., description="Unique ID of the Absorbance Reader.") + fileName: Optional[str] = Field( + None, + description="Optional file name to use when storing the results of a measurement.", + ) class ReadAbsorbanceResult(BaseModel): @@ -29,6 +41,10 @@ class ReadAbsorbanceResult(BaseModel): data: Optional[Dict[int, Dict[str, float]]] = Field( ..., description="Absorbance data points per wavelength." ) + fileIds: Optional[List[str]] = Field( + ..., + description="List of file IDs for files output as a result of a Read action.", + ) class ReadAbsorbanceImpl( @@ -40,18 +56,21 @@ def __init__( self, state_view: StateView, equipment: EquipmentHandler, + file_provider: FileProvider, **unused_dependencies: object, ) -> None: self._state_view = state_view self._equipment = equipment + self._file_provider = file_provider - async def execute( + async def execute( # noqa: C901 self, params: ReadAbsorbanceParams ) -> SuccessData[ReadAbsorbanceResult, None]: """Initiate an absorbance measurement.""" abs_reader_substate = self._state_view.modules.get_absorbance_reader_substate( module_id=params.moduleId ) + # Allow propagation of ModuleNotAttachedError. abs_reader = self._equipment.get_module_hardware_api( abs_reader_substate.module_id @@ -62,10 +81,29 @@ async def execute( "Cannot perform Read action on Absorbance Reader without calling `.initialize(...)` first." ) + # TODO: we need to return a file ID and increase the file count even when a moduel is not attached + if ( + params.fileName is not None + and abs_reader_substate.configured_wavelengths is not None + ): + # Validate that the amount of files we are about to generate does not put us higher than the limit + if ( + self._state_view.files.get_filecount() + + len(abs_reader_substate.configured_wavelengths) + > MAXIMUM_CSV_FILE_LIMIT + ): + raise StorageLimitReachedError( + message=f"Attempt to write file {params.fileName} exceeds file creation limit of {MAXIMUM_CSV_FILE_LIMIT} files." + ) + + asbsorbance_result: Dict[int, Dict[str, float]] = {} + transform_results = [] + # Handle the measurement and begin building data for return if abs_reader is not None: + start_time = datetime.now() results = await abs_reader.start_measure() + finish_time = datetime.now() if abs_reader._measurement_config is not None: - asbsorbance_result: Dict[int, Dict[str, float]] = {} sample_wavelengths = abs_reader._measurement_config.sample_wavelengths for wavelength, result in zip(sample_wavelengths, results): converted_values = ( @@ -74,13 +112,67 @@ async def execute( ) ) asbsorbance_result[wavelength] = converted_values + transform_results.append( + ReadData.construct(wavelength=wavelength, data=converted_values) + ) + # Handle the virtual module case for data creation (all zeroes) + elif self._state_view.config.use_virtual_modules: + start_time = finish_time = datetime.now() + if abs_reader_substate.configured_wavelengths is not None: + for wavelength in abs_reader_substate.configured_wavelengths: + converted_values = ( + self._state_view.modules.convert_absorbance_reader_data_points( + data=[0] * 96 + ) + ) + asbsorbance_result[wavelength] = converted_values + transform_results.append( + ReadData.construct(wavelength=wavelength, data=converted_values) + ) + else: + raise CannotPerformModuleAction( + "Plate Reader data cannot be requested with a module that has not been initialized." + ) + + # TODO (cb, 10-17-2024): FILE PROVIDER - Some day we may want to break the file provider behavior into a seperate API function. + # When this happens, we probably will to have the change the command results handler we utilize to track file IDs in engine. + # Today, the action handler for the FileStore looks for a ReadAbsorbanceResult command action, this will need to be delinked. + + # Begin interfacing with the file provider if the user provided a filename + file_ids = [] + if params.fileName is not None: + # Create the Plate Reader Transform + plate_read_result = PlateReaderData.construct( + read_results=transform_results, + reference_wavelength=abs_reader_substate.reference_wavelength, + start_time=start_time, + finish_time=finish_time, + serial_number=abs_reader.serial_number + if (abs_reader is not None and abs_reader.serial_number is not None) + else "VIRTUAL_SERIAL", + ) + + if isinstance(plate_read_result, PlateReaderData): + # Write a CSV file for each of the measurements taken + for measurement in plate_read_result.read_results: + file_id = await self._file_provider.write_csv( + write_data=plate_read_result.build_generic_csv( + filename=params.fileName, + measurement=measurement, + ) + ) + file_ids.append(file_id) + + # Return success data to api return SuccessData( - public=ReadAbsorbanceResult(data=asbsorbance_result), + public=ReadAbsorbanceResult( + data=asbsorbance_result, fileIds=file_ids + ), private=None, ) return SuccessData( - public=ReadAbsorbanceResult(data=None), + public=ReadAbsorbanceResult(data=asbsorbance_result, fileIds=file_ids), private=None, ) diff --git a/api/src/opentrons/protocol_engine/commands/command.py b/api/src/opentrons/protocol_engine/commands/command.py index 759606899c0..9ba9404af1f 100644 --- a/api/src/opentrons/protocol_engine/commands/command.py +++ b/api/src/opentrons/protocol_engine/commands/command.py @@ -274,6 +274,7 @@ def __init__( state_view: StateView, hardware_api: HardwareControlAPI, equipment: execution.EquipmentHandler, + file_provider: execution.FileProvider, movement: execution.MovementHandler, gantry_mover: execution.GantryMover, labware_movement: execution.LabwareMovementHandler, diff --git a/api/src/opentrons/protocol_engine/create_protocol_engine.py b/api/src/opentrons/protocol_engine/create_protocol_engine.py index d3d50da14df..dc66591eff2 100644 --- a/api/src/opentrons/protocol_engine/create_protocol_engine.py +++ b/api/src/opentrons/protocol_engine/create_protocol_engine.py @@ -10,7 +10,7 @@ from opentrons_shared_data.robot import load as load_robot from .protocol_engine import ProtocolEngine -from .resources import DeckDataProvider, ModuleDataProvider +from .resources import DeckDataProvider, ModuleDataProvider, FileProvider from .state.config import Config from .state.state import StateStore from .types import PostRunHardwareState, DeckConfigurationType @@ -26,6 +26,7 @@ async def create_protocol_engine( error_recovery_policy: ErrorRecoveryPolicy, load_fixed_trash: bool = False, deck_configuration: typing.Optional[DeckConfigurationType] = None, + file_provider: typing.Optional[FileProvider] = None, notify_publishers: typing.Optional[typing.Callable[[], None]] = None, ) -> ProtocolEngine: """Create a ProtocolEngine instance. @@ -37,6 +38,7 @@ async def create_protocol_engine( See documentation on `ErrorRecoveryPolicy`. load_fixed_trash: Automatically load fixed trash labware in engine. deck_configuration: The initial deck configuration the engine will be instantiated with. + file_provider: Provides access to robot server file writing procedures for protocol output. notify_publishers: Notifies robot server publishers of internal state change. """ deck_data = DeckDataProvider(config.deck_type) @@ -47,6 +49,7 @@ async def create_protocol_engine( module_calibration_offsets = ModuleDataProvider.load_module_calibrations() robot_definition = load_robot(config.robot_type) + state_store = StateStore( config=config, deck_definition=deck_definition, @@ -62,6 +65,7 @@ async def create_protocol_engine( return ProtocolEngine( state_store=state_store, hardware_api=hardware_api, + file_provider=file_provider, ) @@ -70,6 +74,7 @@ def create_protocol_engine_in_thread( hardware_api: HardwareControlAPI, config: Config, deck_configuration: typing.Optional[DeckConfigurationType], + file_provider: typing.Optional[FileProvider], error_recovery_policy: ErrorRecoveryPolicy, drop_tips_after_run: bool, post_run_hardware_state: PostRunHardwareState, @@ -97,6 +102,7 @@ def create_protocol_engine_in_thread( with async_context_manager_in_thread( _protocol_engine( hardware_api, + file_provider, config, deck_configuration, error_recovery_policy, @@ -114,6 +120,7 @@ def create_protocol_engine_in_thread( @contextlib.asynccontextmanager async def _protocol_engine( hardware_api: HardwareControlAPI, + file_provider: typing.Optional[FileProvider], config: Config, deck_configuration: typing.Optional[DeckConfigurationType], error_recovery_policy: ErrorRecoveryPolicy, @@ -123,6 +130,7 @@ async def _protocol_engine( ) -> typing.AsyncGenerator[ProtocolEngine, None]: protocol_engine = await create_protocol_engine( hardware_api=hardware_api, + file_provider=file_provider, config=config, error_recovery_policy=error_recovery_policy, load_fixed_trash=load_fixed_trash, diff --git a/api/src/opentrons/protocol_engine/engine_support.py b/api/src/opentrons/protocol_engine/engine_support.py index 9d6bdcbdd69..b822b97914d 100644 --- a/api/src/opentrons/protocol_engine/engine_support.py +++ b/api/src/opentrons/protocol_engine/engine_support.py @@ -6,7 +6,8 @@ def create_run_orchestrator( - hardware_api: HardwareControlAPI, protocol_engine: ProtocolEngine + hardware_api: HardwareControlAPI, + protocol_engine: ProtocolEngine, ) -> RunOrchestrator: """Create a RunOrchestrator instance.""" return RunOrchestrator( diff --git a/api/src/opentrons/protocol_engine/errors/__init__.py b/api/src/opentrons/protocol_engine/errors/__init__.py index 304f7db1fff..9bbe3aae9b8 100644 --- a/api/src/opentrons/protocol_engine/errors/__init__.py +++ b/api/src/opentrons/protocol_engine/errors/__init__.py @@ -75,6 +75,7 @@ IncompleteWellDefinitionError, OperationLocationNotInWellError, InvalidDispenseVolumeError, + StorageLimitReachedError, ) from .error_occurrence import ErrorOccurrence, ProtocolCommandFailedError @@ -158,4 +159,5 @@ "IncompleteWellDefinitionError", "OperationLocationNotInWellError", "InvalidDispenseVolumeError", + "StorageLimitReachedError", ] diff --git a/api/src/opentrons/protocol_engine/errors/exceptions.py b/api/src/opentrons/protocol_engine/errors/exceptions.py index dd9dc6e1d51..5656942b338 100644 --- a/api/src/opentrons/protocol_engine/errors/exceptions.py +++ b/api/src/opentrons/protocol_engine/errors/exceptions.py @@ -1108,3 +1108,16 @@ def __init__( ) -> None: """Build an OperationLocationNotInWellError.""" super().__init__(ErrorCodes.GENERAL_ERROR, message, details, wrapping) + + +class StorageLimitReachedError(ProtocolEngineError): + """Raised to indicate that a file cannot be created due to storage limitations.""" + + def __init__( + self, + message: Optional[str] = None, + detail: Optional[Dict[str, str]] = None, + wrapping: Optional[Sequence[EnumeratedError]] = None, + ) -> None: + """Build an StorageLimitReached.""" + super().__init__(ErrorCodes.GENERAL_ERROR, message, detail, wrapping) diff --git a/api/src/opentrons/protocol_engine/execution/__init__.py b/api/src/opentrons/protocol_engine/execution/__init__.py index 80f2dfd0d99..482a16d787f 100644 --- a/api/src/opentrons/protocol_engine/execution/__init__.py +++ b/api/src/opentrons/protocol_engine/execution/__init__.py @@ -21,6 +21,7 @@ from .hardware_stopper import HardwareStopper from .door_watcher import DoorWatcher from .status_bar import StatusBarHandler +from ..resources.file_provider import FileProvider # .thermocycler_movement_flagger omitted from package's public interface. @@ -45,4 +46,5 @@ "DoorWatcher", "RailLightsHandler", "StatusBarHandler", + "FileProvider", ] diff --git a/api/src/opentrons/protocol_engine/execution/command_executor.py b/api/src/opentrons/protocol_engine/execution/command_executor.py index e9dd2ec73b9..1d30b8756d2 100644 --- a/api/src/opentrons/protocol_engine/execution/command_executor.py +++ b/api/src/opentrons/protocol_engine/execution/command_executor.py @@ -14,7 +14,7 @@ from opentrons.protocol_engine.commands.command import SuccessData from ..state.state import StateStore -from ..resources import ModelUtils +from ..resources import ModelUtils, FileProvider from ..commands import CommandStatus from ..actions import ( ActionDispatcher, @@ -72,6 +72,7 @@ class CommandExecutor: def __init__( self, hardware_api: HardwareControlAPI, + file_provider: FileProvider, state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, @@ -88,6 +89,7 @@ def __init__( ) -> None: """Initialize the CommandExecutor with access to its dependencies.""" self._hardware_api = hardware_api + self._file_provider = file_provider self._state_store = state_store self._action_dispatcher = action_dispatcher self._equipment = equipment @@ -116,6 +118,7 @@ async def execute(self, command_id: str) -> None: command_impl = queued_command._ImplementationCls( state_view=self._state_store, hardware_api=self._hardware_api, + file_provider=self._file_provider, equipment=self._equipment, movement=self._movement, gantry_mover=self._gantry_mover, diff --git a/api/src/opentrons/protocol_engine/execution/create_queue_worker.py b/api/src/opentrons/protocol_engine/execution/create_queue_worker.py index e449a013008..e37a2c0716b 100644 --- a/api/src/opentrons/protocol_engine/execution/create_queue_worker.py +++ b/api/src/opentrons/protocol_engine/execution/create_queue_worker.py @@ -6,6 +6,7 @@ from ..state.state import StateStore from ..actions import ActionDispatcher +from ..resources import FileProvider from .equipment import EquipmentHandler from .movement import MovementHandler from .gantry_mover import create_gantry_mover @@ -20,6 +21,7 @@ def create_queue_worker( hardware_api: HardwareControlAPI, + file_provider: FileProvider, state_store: StateStore, action_dispatcher: ActionDispatcher, command_generator: Callable[[], AsyncGenerator[str, None]], @@ -28,6 +30,7 @@ def create_queue_worker( Arguments: hardware_api: Hardware control API to pass down to dependencies. + file_provider: Provides access to robot server file writing procedures for protocol output. state_store: StateStore to pass down to dependencies. action_dispatcher: ActionDispatcher to pass down to dependencies. error_recovery_policy: ErrorRecoveryPolicy to pass down to dependencies. @@ -78,6 +81,7 @@ def create_queue_worker( command_executor = CommandExecutor( hardware_api=hardware_api, + file_provider=file_provider, state_store=state_store, action_dispatcher=action_dispatcher, equipment=equipment_handler, diff --git a/api/src/opentrons/protocol_engine/protocol_engine.py b/api/src/opentrons/protocol_engine/protocol_engine.py index c5219e889a3..d93ab5dd42d 100644 --- a/api/src/opentrons/protocol_engine/protocol_engine.py +++ b/api/src/opentrons/protocol_engine/protocol_engine.py @@ -20,7 +20,7 @@ from .errors import ProtocolCommandFailedError, ErrorOccurrence, CommandNotAllowedError from .errors.exceptions import EStopActivatedError from . import commands, slot_standardization -from .resources import ModelUtils, ModuleDataProvider +from .resources import ModelUtils, ModuleDataProvider, FileProvider from .types import ( LabwareOffset, LabwareOffsetCreate, @@ -95,6 +95,7 @@ def __init__( hardware_stopper: Optional[HardwareStopper] = None, door_watcher: Optional[DoorWatcher] = None, module_data_provider: Optional[ModuleDataProvider] = None, + file_provider: Optional[FileProvider] = None, ) -> None: """Initialize a ProtocolEngine instance. @@ -104,6 +105,7 @@ def __init__( Prefer the `create_protocol_engine()` factory function. """ self._hardware_api = hardware_api + self._file_provider = file_provider or FileProvider() self._state_store = state_store self._model_utils = model_utils or ModelUtils() self._action_dispatcher = action_dispatcher or ActionDispatcher( @@ -616,6 +618,7 @@ def set_and_start_queue_worker( assert self._queue_worker is None self._queue_worker = create_queue_worker( hardware_api=self._hardware_api, + file_provider=self._file_provider, state_store=self._state_store, action_dispatcher=self._action_dispatcher, command_generator=command_generator, diff --git a/api/src/opentrons/protocol_engine/resources/__init__.py b/api/src/opentrons/protocol_engine/resources/__init__.py index 94b71831589..a77075c95bb 100644 --- a/api/src/opentrons/protocol_engine/resources/__init__.py +++ b/api/src/opentrons/protocol_engine/resources/__init__.py @@ -9,6 +9,7 @@ from .deck_data_provider import DeckDataProvider, DeckFixedLabware from .labware_data_provider import LabwareDataProvider from .module_data_provider import ModuleDataProvider +from .file_provider import FileProvider from .ot3_validation import ensure_ot3_hardware @@ -18,6 +19,7 @@ "DeckDataProvider", "DeckFixedLabware", "ModuleDataProvider", + "FileProvider", "ensure_ot3_hardware", "pipette_data_provider", "labware_validation", diff --git a/api/src/opentrons/protocol_engine/resources/file_provider.py b/api/src/opentrons/protocol_engine/resources/file_provider.py new file mode 100644 index 00000000000..d4ed7b71522 --- /dev/null +++ b/api/src/opentrons/protocol_engine/resources/file_provider.py @@ -0,0 +1,157 @@ +"""File interaction resource provider.""" +from datetime import datetime +from typing import List, Optional, Callable, Awaitable, Dict +from pydantic import BaseModel +from ..errors import StorageLimitReachedError + + +MAXIMUM_CSV_FILE_LIMIT = 40 + + +class GenericCsvTransform: + """Generic CSV File Type data for rows of data to be seperated by a delimeter.""" + + filename: str + rows: List[List[str]] + delimiter: str = "," + + @staticmethod + def build( + filename: str, rows: List[List[str]], delimiter: str = "," + ) -> "GenericCsvTransform": + """Build a Generic CSV datatype class.""" + if "." in filename and not filename.endswith(".csv"): + raise ValueError( + f"Provided filename {filename} invalid. Only CSV file format is accepted." + ) + elif "." not in filename: + filename = f"{filename}.csv" + csv = GenericCsvTransform() + csv.filename = filename + csv.rows = rows + csv.delimiter = delimiter + return csv + + +class ReadData(BaseModel): + """Read Data type containing the wavelength for a Plate Reader read alongside the Measurement Data of that read.""" + + wavelength: int + data: Dict[str, float] + + +class PlateReaderData(BaseModel): + """Data from a Opentrons Plate Reader Read. Can be converted to CSV template format.""" + + read_results: List[ReadData] + reference_wavelength: Optional[int] = None + start_time: datetime + finish_time: datetime + serial_number: str + + def build_generic_csv( # noqa: C901 + self, filename: str, measurement: ReadData + ) -> GenericCsvTransform: + """Builds a CSV compatible object containing Plate Reader Measurements. + + This will also automatically reformat the provided filename to include the wavelength of those measurements. + """ + plate_alpharows = ["A", "B", "C", "D", "E", "F", "G", "H"] + rows = [] + + rows.append(["", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]) + for i in range(8): + row = [plate_alpharows[i]] + for j in range(12): + row.append(str(measurement.data[f"{plate_alpharows[i]}{j+1}"])) + rows.append(row) + for i in range(3): + rows.append([""]) + rows.append(["", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]) + for i in range(8): + row = [plate_alpharows[i]] + for j in range(12): + row.append("") + rows.append(row) + for i in range(3): + rows.append([""]) + rows.append( + [ + "", + "ID", + "Well", + "Absorbance (OD)", + "Mean Absorbance (OD)", + "Absorbance %CV", + ] + ) + for i in range(3): + rows.append([""]) + rows.append( + [ + "", + "ID", + "Well", + "Absorbance (OD)", + "Mean Absorbance (OD)", + "Dilution Factor", + "Absorbance %CV", + ] + ) + rows.append(["1", "Sample 1", "", "", "", "1", "", "", "", "", "", ""]) + for i in range(3): + rows.append([""]) + + # end of file metadata + rows.append(["Protocol"]) + rows.append(["Assay"]) + rows.append(["Sample Wavelength (nm)", str(measurement.wavelength)]) + if self.reference_wavelength is not None: + rows.append(["Reference Wavelength (nm)", str(self.reference_wavelength)]) + rows.append(["Serial No.", self.serial_number]) + rows.append(["Measurement started at", str(self.start_time)]) + rows.append(["Measurement finished at", str(self.finish_time)]) + + # Ensure the filename adheres to ruleset contains the wavelength for a given measurement + if filename.endswith(".csv"): + filename = filename[:-4] + filename = filename + "_" + str(measurement.wavelength) + ".csv" + + return GenericCsvTransform.build( + filename=filename, + rows=rows, + delimiter=",", + ) + + +class FileProvider: + """Provider class to wrap file read write interactions to the data files directory in the engine.""" + + def __init__( + self, + data_files_write_csv_callback: Optional[ + Callable[[GenericCsvTransform], Awaitable[str]] + ] = None, + data_files_filecount: Optional[Callable[[], Awaitable[int]]] = None, + ) -> None: + """Initialize the interface callbacks of the File Provider for data file handling within the Protocol Engine. + + Params: + data_files_write_csv_callback: Callback to write a CSV file to the data files directory and add it to the database. + data_files_filecount: Callback to check the amount of data files already present in the data files directory. + """ + self._data_files_write_csv_callback = data_files_write_csv_callback + self._data_files_filecount = data_files_filecount + + async def write_csv(self, write_data: GenericCsvTransform) -> str: + """Writes the provided CSV object to a file in the Data Files directory. Returns the File ID of the file created.""" + if self._data_files_filecount is not None: + file_count = await self._data_files_filecount() + if file_count >= MAXIMUM_CSV_FILE_LIMIT: + raise StorageLimitReachedError( + f"Not enough space to store file {write_data.filename}." + ) + if self._data_files_write_csv_callback is not None: + return await self._data_files_write_csv_callback(write_data) + # If we are in an analysis or simulation state, return an empty file ID + return "" diff --git a/api/src/opentrons/protocol_engine/state/files.py b/api/src/opentrons/protocol_engine/state/files.py new file mode 100644 index 00000000000..655d038df34 --- /dev/null +++ b/api/src/opentrons/protocol_engine/state/files.py @@ -0,0 +1,59 @@ +"""Basic protocol engine create file data state and store.""" +from dataclasses import dataclass +from typing import List + +from ._abstract_store import HasState, HandlesActions +from ..actions import Action, SucceedCommandAction +from ..commands import ( + Command, + absorbance_reader, +) + + +@dataclass +class FileState: + """State of Engine created files.""" + + file_ids: List[str] + + +class FileStore(HasState[FileState], HandlesActions): + """File state container.""" + + _state: FileState + + def __init__(self) -> None: + """Initialize a File store and its state.""" + self._state = FileState(file_ids=[]) + + def handle_action(self, action: Action) -> None: + """Modify state in reaction to an action.""" + if isinstance(action, SucceedCommandAction): + self._handle_command(action.command) + + def _handle_command(self, command: Command) -> None: + if isinstance(command.result, absorbance_reader.ReadAbsorbanceResult): + if command.result.fileIds is not None: + self._state.file_ids.extend(command.result.fileIds) + + +class FileView(HasState[FileState]): + """Read-only engine created file state view.""" + + _state: FileState + + def __init__(self, state: FileState) -> None: + """Initialize the view of file state. + + Arguments: + state: File state dataclass used for tracking file creation status. + """ + self._state = state + + def get_filecount(self) -> int: + """Get the number of files currently created by the protocol.""" + return len(self._state.file_ids) + + def get_file_id_list(self) -> List[str]: + """Get the list of files by file ID created by the protocol.""" + return self._state.file_ids diff --git a/api/src/opentrons/protocol_engine/state/state.py b/api/src/opentrons/protocol_engine/state/state.py index 7fc23a8ee2f..6743e1f44fc 100644 --- a/api/src/opentrons/protocol_engine/state/state.py +++ b/api/src/opentrons/protocol_engine/state/state.py @@ -29,6 +29,7 @@ from .wells import WellState, WellView, WellStore from .geometry import GeometryView from .motion import MotionView +from .files import FileView, FileState, FileStore from .config import Config from .state_summary import StateSummary from ..types import DeckConfigurationType @@ -50,6 +51,7 @@ class State: liquids: LiquidState tips: TipState wells: WellState + files: FileState class StateView(HasState[State]): @@ -66,6 +68,7 @@ class StateView(HasState[State]): _wells: WellView _geometry: GeometryView _motion: MotionView + _files: FileView _config: Config @property @@ -118,6 +121,11 @@ def motion(self) -> MotionView: """Get state view selectors for derived motion state.""" return self._motion + @property + def files(self) -> FileView: + """Get state view selectors for engine create file state.""" + return self._files + @property def config(self) -> Config: """Get ProtocolEngine configuration.""" @@ -139,6 +147,7 @@ def get_summary(self) -> StateSummary: liquids=self._liquid.get_all(), wells=self._wells.get_all(), hasEverEnteredErrorRecovery=self._commands.get_has_entered_recovery_mode(), + files=self._state.files.file_ids, ) @@ -206,6 +215,7 @@ def __init__( self._liquid_store = LiquidStore() self._tip_store = TipStore() self._well_store = WellStore() + self._file_store = FileStore() self._substores: List[HandlesActions] = [ self._command_store, @@ -216,6 +226,7 @@ def __init__( self._liquid_store, self._tip_store, self._well_store, + self._file_store, ] self._config = config self._change_notifier = change_notifier or ChangeNotifier() @@ -333,6 +344,7 @@ def _get_next_state(self) -> State: liquids=self._liquid_store.state, tips=self._tip_store.state, wells=self._well_store.state, + files=self._file_store.state, ) def _initialize_state(self) -> None: @@ -349,6 +361,7 @@ def _initialize_state(self) -> None: self._liquid = LiquidView(state.liquids) self._tips = TipView(state.tips) self._wells = WellView(state.wells) + self._files = FileView(state.files) # Derived states self._geometry = GeometryView( diff --git a/api/src/opentrons/protocol_engine/state/state_summary.py b/api/src/opentrons/protocol_engine/state/state_summary.py index 66fc4249851..b1c4dd8f766 100644 --- a/api/src/opentrons/protocol_engine/state/state_summary.py +++ b/api/src/opentrons/protocol_engine/state/state_summary.py @@ -31,3 +31,4 @@ class StateSummary(BaseModel): completedAt: Optional[datetime] liquids: List[Liquid] = Field(default_factory=list) wells: List[LiquidHeightSummary] = Field(default_factory=list) + files: List[str] = Field(default_factory=list) diff --git a/api/src/opentrons/simulate.py b/api/src/opentrons/simulate.py index 23f6c7fdfb9..e565bab83e0 100644 --- a/api/src/opentrons/simulate.py +++ b/api/src/opentrons/simulate.py @@ -815,6 +815,7 @@ def _create_live_context_pe( robot_type, use_pe_virtual_hardware=use_pe_virtual_hardware ), deck_configuration=None, + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/tests/opentrons/conftest.py b/api/tests/opentrons/conftest.py index a52e95248c0..cf8fdd0e97c 100755 --- a/api/tests/opentrons/conftest.py +++ b/api/tests/opentrons/conftest.py @@ -335,6 +335,7 @@ def _make_ot3_pe_ctx( block_on_door_open=False, ), deck_configuration=None, + file_provider=None, error_recovery_policy=error_recovery_policy.never_recover, drop_tips_after_run=False, post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE, diff --git a/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py b/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py index 405d737d55b..a5fadde09cc 100644 --- a/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py +++ b/api/tests/opentrons/protocol_api/core/engine/test_absorbance_reader_core.py @@ -11,6 +11,11 @@ from opentrons.protocol_engine.clients import SyncClient as EngineClient from opentrons.protocol_api.core.engine.module_core import AbsorbanceReaderCore from opentrons.protocol_api import MAX_SUPPORTED_VERSION +from opentrons.protocol_engine.state.module_substates import AbsorbanceReaderSubState +from opentrons.protocol_engine.state.module_substates.absorbance_reader_substate import ( + AbsorbanceReaderId, + AbsorbanceReaderMeasureMode, +) SyncAbsorbanceReaderHardware = SynchronousAdapter[AbsorbanceReader] @@ -115,7 +120,23 @@ def test_read( ) -> None: """It should call absorbance reader to read with the engine client.""" subject._initialized_value = [123] - subject.read() + substate = AbsorbanceReaderSubState( + module_id=AbsorbanceReaderId(subject.module_id), + configured=True, + measured=False, + is_lid_on=True, + data=None, + configured_wavelengths=subject._initialized_value, + measure_mode=AbsorbanceReaderMeasureMode("single"), + reference_wavelength=None, + lid_id="pr_lid_labware", + ) + decoy.when( + mock_engine_client.state.modules.get_absorbance_reader_substate( + subject.module_id + ) + ).then_return(substate) + subject.read(filename=None) decoy.verify( mock_engine_client.execute_command( diff --git a/api/tests/opentrons/protocol_engine/conftest.py b/api/tests/opentrons/protocol_engine/conftest.py index 7040f8497ea..76c5d754f3e 100644 --- a/api/tests/opentrons/protocol_engine/conftest.py +++ b/api/tests/opentrons/protocol_engine/conftest.py @@ -22,6 +22,7 @@ from opentrons.hardware_control.api import API from opentrons.hardware_control.protocols.types import FlexRobotType, OT2RobotType from opentrons.protocol_engine.notes import CommandNoteAdder +from opentrons.protocol_engine.resources.file_provider import FileProvider if TYPE_CHECKING: from opentrons.hardware_control.ot3api import OT3API @@ -252,3 +253,9 @@ def supported_tip_fixture() -> pipette_definition.SupportedTipsDefinition: def mock_command_note_adder(decoy: Decoy) -> CommandNoteAdder: """Get a command note adder.""" return decoy.mock(cls=CommandNoteAdder) + + +@pytest.fixture +def file_provider(decoy: Decoy) -> FileProvider: + """Get a mocked out FileProvider.""" + return decoy.mock(cls=FileProvider) diff --git a/api/tests/opentrons/protocol_engine/execution/test_command_executor.py b/api/tests/opentrons/protocol_engine/execution/test_command_executor.py index 2df3a2cdd25..f5f0ec063b0 100644 --- a/api/tests/opentrons/protocol_engine/execution/test_command_executor.py +++ b/api/tests/opentrons/protocol_engine/execution/test_command_executor.py @@ -18,7 +18,7 @@ from opentrons.protocol_engine.errors.exceptions import ( EStopActivatedError as PE_EStopActivatedError, ) -from opentrons.protocol_engine.resources import ModelUtils +from opentrons.protocol_engine.resources import ModelUtils, FileProvider from opentrons.protocol_engine.state.state import StateStore from opentrons.protocol_engine.actions import ( ActionDispatcher, @@ -174,6 +174,7 @@ def subject( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -188,6 +189,7 @@ def subject( """Get a CommandExecutor test subject with its dependencies mocked out.""" return CommandExecutor( hardware_api=hardware_api, + file_provider=file_provider, state_store=state_store, action_dispatcher=action_dispatcher, equipment=equipment, @@ -234,6 +236,7 @@ async def test_execute( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -329,6 +332,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, @@ -392,6 +396,7 @@ async def test_execute_undefined_error( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -474,6 +479,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, @@ -528,6 +534,7 @@ async def test_execute_defined_error( state_store: StateStore, action_dispatcher: ActionDispatcher, equipment: EquipmentHandler, + file_provider: FileProvider, movement: MovementHandler, mock_gantry_mover: GantryMover, labware_movement: LabwareMovementHandler, @@ -610,6 +617,7 @@ class _TestCommand( queued_command._ImplementationCls( state_view=state_store, hardware_api=hardware_api, + file_provider=file_provider, equipment=equipment, movement=movement, gantry_mover=mock_gantry_mover, diff --git a/robot-server/robot_server/file_provider/__init__.py b/robot-server/robot_server/file_provider/__init__.py new file mode 100644 index 00000000000..6a8b56c58eb --- /dev/null +++ b/robot-server/robot_server/file_provider/__init__.py @@ -0,0 +1 @@ +"""The HTTP API, and supporting code, for the File Provider entity provided to the Protocol Engine.""" diff --git a/robot-server/robot_server/file_provider/fastapi_dependencies.py b/robot-server/robot_server/file_provider/fastapi_dependencies.py new file mode 100644 index 00000000000..65042288f9a --- /dev/null +++ b/robot-server/robot_server/file_provider/fastapi_dependencies.py @@ -0,0 +1,39 @@ +"""Dependency functions for use with `fastapi.Depends()`.""" +from pathlib import Path +from typing import Annotated + +import fastapi + +from robot_server.file_provider.provider import FileProviderWrapper +from robot_server.data_files.dependencies import ( + get_data_files_directory, + get_data_files_store, +) +from robot_server.data_files.data_files_store import DataFilesStore +from opentrons.protocol_engine.resources.file_provider import FileProvider + + +async def get_file_provider_wrapper( + data_files_directory: Annotated[Path, fastapi.Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, fastapi.Depends(get_data_files_store)], +) -> FileProviderWrapper: + """Return the server's singleton `FileProviderWrapper` which provides the engine related callbacks for FileProvider.""" + file_provider_wrapper = FileProviderWrapper( + data_files_directory=data_files_directory, data_files_store=data_files_store + ) + + return file_provider_wrapper + + +async def get_file_provider( + file_provider_wrapper: Annotated[ + FileProviderWrapper, fastapi.Depends(get_file_provider_wrapper) + ], +) -> FileProvider: + """Return theengine `FileProvider` which accepts callbacks from FileProviderWrapper.""" + file_provider = FileProvider( + data_files_write_csv_callback=file_provider_wrapper.write_csv_callback, + data_files_filecount=file_provider_wrapper.csv_filecount_callback, + ) + + return file_provider diff --git a/robot-server/robot_server/file_provider/provider.py b/robot-server/robot_server/file_provider/provider.py new file mode 100644 index 00000000000..5cfeb640fef --- /dev/null +++ b/robot-server/robot_server/file_provider/provider.py @@ -0,0 +1,74 @@ +"""Wrapper to provide the callbacks utilized by the Protocol Engine File Provider.""" +import os +import asyncio +import csv +from pathlib import Path +from typing import Annotated +from fastapi import Depends +from robot_server.data_files.dependencies import ( + get_data_files_directory, + get_data_files_store, +) +from ..service.dependencies import get_current_time, get_unique_id +from robot_server.data_files.data_files_store import DataFilesStore, DataFileInfo +from opentrons.protocol_engine.resources.file_provider import GenericCsvTransform + + +class FileProviderWrapper: + """Wrapper to provide File Read and Write capabilities to Protocol Engine.""" + + def __init__( + self, + data_files_directory: Annotated[Path, Depends(get_data_files_directory)], + data_files_store: Annotated[DataFilesStore, Depends(get_data_files_store)], + ) -> None: + """Provides callbacks for data file manipulation for the Protocol Engine's File Provider class. + + Params: + data_files_directory: The directory to store engine-create files in during a protocol run. + data_files_store: The data files store utilized for database interaction when creating files. + """ + self._data_files_directory = data_files_directory + self._data_files_store = data_files_store + + # dta file store is not generally safe for concurrent access. + self._lock = asyncio.Lock() + + async def write_csv_callback( + self, + csv_data: GenericCsvTransform, + ) -> str: + """Write the provided data transform to a CSV file. Returns the File ID of the created file.""" + async with self._lock: + file_id = await get_unique_id() + os.makedirs( + os.path.dirname( + self._data_files_directory / file_id / csv_data.filename + ), + exist_ok=True, + ) + with open( + file=self._data_files_directory / file_id / csv_data.filename, + mode="w", + newline="", + ) as csvfile: + writer = csv.writer(csvfile, delimiter=csv_data.delimiter) + writer.writerows(csv_data.rows) + + created_at = await get_current_time() + # TODO (cb, 10-14-24): Engine created files do not currently get a file_hash, unlike explicitly uploaded files. Do they need one? + file_info = DataFileInfo( + id=file_id, + name=csv_data.filename, + file_hash="", + created_at=created_at, + ) + await self._data_files_store.insert(file_info) + return file_id + + async def csv_filecount_callback(self) -> int: + """Return the current count of files stored within the data files directory.""" + data_file_usage_info = [ + usage_info for usage_info in self._data_files_store.get_usage_info() + ] + return len(data_file_usage_info) diff --git a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py index 589aaf5614d..46b2c86bd40 100644 --- a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py +++ b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py @@ -33,6 +33,7 @@ def _build_run( modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) return MaintenanceRun.construct( diff --git a/robot-server/robot_server/runs/router/base_router.py b/robot-server/robot_server/runs/router/base_router.py index b9bd8cd24b2..639e6d91628 100644 --- a/robot-server/robot_server/runs/router/base_router.py +++ b/robot-server/robot_server/runs/router/base_router.py @@ -72,6 +72,10 @@ get_deck_configuration_store, ) from robot_server.deck_configuration.store import DeckConfigurationStore +from robot_server.file_provider.fastapi_dependencies import ( + get_file_provider, +) +from opentrons.protocol_engine.resources.file_provider import FileProvider from robot_server.service.notifications import get_pe_notify_publishers log = logging.getLogger(__name__) @@ -187,6 +191,7 @@ async def create_run( # noqa: C901 deck_configuration_store: Annotated[ DeckConfigurationStore, Depends(get_deck_configuration_store) ], + file_provider: Annotated[FileProvider, Depends(get_file_provider)], notify_publishers: Annotated[Callable[[], None], Depends(get_pe_notify_publishers)], request_body: Optional[RequestModel[RunCreate]] = None, ) -> PydanticResponse[SimpleBody[Union[Run, BadRun]]]: @@ -206,6 +211,7 @@ async def create_run( # noqa: C901 resources to make room for the new run. check_estop: Dependency to verify the estop is in a valid state. deck_configuration_store: Dependency to fetch the deck configuration. + file_provider: Dependency to provide access to file Reading and Writing to Protocol engine. notify_publishers: Utilized by the engine to notify publishers of state changes. """ protocol_id = request_body.data.protocolId if request_body is not None else None @@ -260,6 +266,7 @@ async def create_run( # noqa: C901 created_at=created_at, labware_offsets=offsets, deck_configuration=deck_configuration, + file_provider=file_provider, run_time_param_values=rtp_values, run_time_param_paths=rtp_paths, protocol=protocol_resource, diff --git a/robot-server/robot_server/runs/run_data_manager.py b/robot-server/robot_server/runs/run_data_manager.py index 4168b1d4d5d..3edf89ef163 100644 --- a/robot-server/robot_server/runs/run_data_manager.py +++ b/robot-server/robot_server/runs/run_data_manager.py @@ -34,6 +34,7 @@ from .run_models import Run, BadRun, RunDataError from opentrons.protocol_engine.types import DeckConfigurationType, RunTimeParameter +from opentrons.protocol_engine.resources.file_provider import FileProvider _INITIAL_ERROR_RECOVERY_RULES: list[ErrorRecoveryRule] = [] @@ -65,6 +66,7 @@ def _build_run( completedAt=state_summary.completedAt, startedAt=state_summary.startedAt, liquids=state_summary.liquids, + outputFileIds=state_summary.files, runTimeParameters=run_time_parameters, ) @@ -79,6 +81,7 @@ def _build_run( modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) errors.append(state_summary.dataError) @@ -122,6 +125,7 @@ def _build_run( startedAt=state.startedAt, liquids=state.liquids, runTimeParameters=run_time_parameters, + outputFileIds=state.files, hasEverEnteredErrorRecovery=state.hasEverEnteredErrorRecovery, ) @@ -170,6 +174,7 @@ async def create( created_at: datetime, labware_offsets: List[LabwareOffsetCreate], deck_configuration: DeckConfigurationType, + file_provider: FileProvider, run_time_param_values: Optional[PrimitiveRunTimeParamValuesType], run_time_param_paths: Optional[CSVRuntimeParamPaths], notify_publishers: Callable[[], None], @@ -217,6 +222,7 @@ async def create( labware_offsets=labware_offsets, initial_error_recovery_policy=initial_error_recovery_policy, deck_configuration=deck_configuration, + file_provider=file_provider, protocol=protocol, run_time_param_values=run_time_param_values, run_time_param_paths=run_time_param_paths, diff --git a/robot-server/robot_server/runs/run_models.py b/robot-server/robot_server/runs/run_models.py index 962c3ab51e7..bac7c4c740c 100644 --- a/robot-server/robot_server/runs/run_models.py +++ b/robot-server/robot_server/runs/run_models.py @@ -146,6 +146,10 @@ class Run(ResourceModel): " if none are specified in the request." ), ) + outputFileIds: List[str] = Field( + ..., + description="File IDs of files output during a protocol run.", + ) protocolId: Optional[str] = Field( None, description=( @@ -223,6 +227,10 @@ class BadRun(ResourceModel): " if none are specified in the request." ), ) + outputFileIds: List[str] = Field( + ..., + description="File IDs of files output during a protocol run.", + ) protocolId: Optional[str] = Field( None, description=( diff --git a/robot-server/robot_server/runs/run_orchestrator_store.py b/robot-server/robot_server/runs/run_orchestrator_store.py index 03af7315ef9..e05bd3bd349 100644 --- a/robot-server/robot_server/runs/run_orchestrator_store.py +++ b/robot-server/robot_server/runs/run_orchestrator_store.py @@ -52,6 +52,7 @@ EngineStatus, ) from opentrons_shared_data.labware.types import LabwareUri +from opentrons.protocol_engine.resources.file_provider import FileProvider _log = logging.getLogger(__name__) @@ -193,6 +194,7 @@ async def create( labware_offsets: List[LabwareOffsetCreate], initial_error_recovery_policy: error_recovery_policy.ErrorRecoveryPolicy, deck_configuration: DeckConfigurationType, + file_provider: FileProvider, notify_publishers: Callable[[], None], protocol: Optional[ProtocolResource], run_time_param_values: Optional[PrimitiveRunTimeParamValuesType] = None, @@ -236,6 +238,7 @@ async def create( error_recovery_policy=initial_error_recovery_policy, load_fixed_trash=load_fixed_trash, deck_configuration=deck_configuration, + file_provider=file_provider, notify_publishers=notify_publishers, ) diff --git a/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml index 48e1088eb4c..fd98c29a2dc 100644 --- a/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_json_v6_protocol_run.tavern.yaml @@ -52,6 +52,7 @@ stages: description: Liquid H2O displayColor: '#7332a8' runTimeParameters: [] + outputFileIds: [] protocolId: '{protocol_id}' - name: Execute a setup command diff --git a/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml index 0915fb69f12..3ab7386ba4f 100644 --- a/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_json_v7_protocol_run.tavern.yaml @@ -47,6 +47,7 @@ stages: location: !anydict labwareOffsets: [] runTimeParameters: [] + outputFileIds: [] liquids: - id: waterId displayName: Water diff --git a/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml index 2bfa2ccd552..2ad0a92eb8c 100644 --- a/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_protocol_run.tavern.yaml @@ -44,6 +44,7 @@ stages: location: !anydict labwareOffsets: [] runTimeParameters: [] + outputFileIds: [] protocolId: '{protocol_id}' liquids: [] save: @@ -240,6 +241,7 @@ stages: startedAt: !re_fullmatch "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+(Z|([+-]\\d{2}:\\d{2}))" liquids: [] runTimeParameters: [] + outputFileIds: [] completedAt: !re_fullmatch "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+(Z|([+-]\\d{2}:\\d{2}))" errors: [] hasEverEnteredErrorRecovery: False diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml index edec26c4e03..14ae502d800 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -96,6 +96,7 @@ stages: labwareOffsets: [] liquids: [] runTimeParameters: [] + outputFileIds: [] modules: [] pipettes: [] status: 'idle' diff --git a/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml index 2533769b56f..8916ebd1cf2 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_with_run_time_parameters.tavern.yaml @@ -115,6 +115,7 @@ stages: file: id: '{data_file_id}' name: sample_plates.csv + outputFileIds: [] liquids: [] protocolId: '{protocol_id}' diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 8448ded8870..5d3d9da8a13 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -190,6 +190,7 @@ async def test_analyze( labwareOffsets=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ), parameters=[bool_parameter], diff --git a/robot-server/tests/runs/router/conftest.py b/robot-server/tests/runs/router/conftest.py index 700be63f4ef..0ca0c5cc4f5 100644 --- a/robot-server/tests/runs/router/conftest.py +++ b/robot-server/tests/runs/router/conftest.py @@ -12,7 +12,10 @@ ) from robot_server.deck_configuration.store import DeckConfigurationStore +from robot_server.file_provider.provider import FileProviderWrapper + from opentrons.protocol_engine import ProtocolEngine +from opentrons.protocol_engine.resources import FileProvider @pytest.fixture() @@ -63,3 +66,17 @@ def mock_maintenance_run_orchestrator_store( def mock_deck_configuration_store(decoy: Decoy) -> DeckConfigurationStore: """Get a mock DeckConfigurationStore.""" return decoy.mock(cls=DeckConfigurationStore) + + +@pytest.fixture() +def mock_file_provider_wrapper(decoy: Decoy) -> FileProviderWrapper: + """Return a mock FileProviderWrapper.""" + return decoy.mock(cls=FileProviderWrapper) + + +@pytest.fixture() +def mock_file_provider( + decoy: Decoy, mock_file_provider_wrapper: FileProviderWrapper +) -> FileProvider: + """Return a mock FileProvider.""" + return decoy.mock(cls=FileProvider) diff --git a/robot-server/tests/runs/router/test_base_router.py b/robot-server/tests/runs/router/test_base_router.py index 8a10af1940d..25c91f70ade 100644 --- a/robot-server/tests/runs/router/test_base_router.py +++ b/robot-server/tests/runs/router/test_base_router.py @@ -69,6 +69,10 @@ ) from robot_server.deck_configuration.store import DeckConfigurationStore +from opentrons.protocol_engine.resources.file_provider import ( + FileProvider, +) +from robot_server.file_provider.provider import FileProviderWrapper def mock_notify_publishers() -> None: @@ -125,8 +129,10 @@ async def test_create_run( mock_run_auto_deleter: RunAutoDeleter, labware_offset_create: pe_types.LabwareOffsetCreate, mock_deck_configuration_store: DeckConfigurationStore, + mock_file_provider_wrapper: FileProviderWrapper, mock_protocol_store: ProtocolStore, mock_data_files_store: DataFilesStore, + mock_file_provider: FileProvider, ) -> None: """It should be able to create a basic run.""" run_id = "run-id" @@ -145,17 +151,20 @@ async def test_create_run( labwareOffsets=[], status=pe_types.EngineStatus.IDLE, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) decoy.when( await mock_deck_configuration_store.get_deck_configuration() ).then_return([]) + decoy.when( await mock_run_data_manager.create( run_id=run_id, created_at=run_created_at, labware_offsets=[labware_offset_create], deck_configuration=[], + file_provider=mock_file_provider, protocol=None, run_time_param_values=None, run_time_param_paths=None, @@ -175,6 +184,7 @@ async def test_create_run( run_auto_deleter=mock_run_auto_deleter, quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, protocol_store=mock_protocol_store, check_estop=True, @@ -193,6 +203,7 @@ async def test_create_protocol_run( mock_run_auto_deleter: RunAutoDeleter, mock_deck_configuration_store: DeckConfigurationStore, mock_data_files_store: DataFilesStore, + mock_file_provider: FileProvider, ) -> None: """It should be able to create a protocol run.""" run_id = "run-id" @@ -228,6 +239,7 @@ async def test_create_protocol_run( labwareOffsets=[], status=pe_types.EngineStatus.IDLE, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) decoy.when(mock_data_files_store.get("file-id")).then_return( @@ -251,6 +263,7 @@ async def test_create_protocol_run( created_at=run_created_at, labware_offsets=[], deck_configuration=[], + file_provider=mock_file_provider, protocol=protocol_resource, run_time_param_values={"foo": "bar"}, run_time_param_paths={"my-csv-param": Path("/dev/null/file-id/abc.xyz")}, @@ -275,6 +288,7 @@ async def test_create_protocol_run( run_auto_deleter=mock_run_auto_deleter, quick_transfer_run_auto_deleter=mock_run_auto_deleter, deck_configuration_store=mock_deck_configuration_store, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, check_estop=True, ) @@ -293,6 +307,7 @@ async def test_create_protocol_run_bad_protocol_id( mock_run_auto_deleter: RunAutoDeleter, mock_data_files_store: DataFilesStore, mock_data_files_directory: Path, + mock_file_provider: FileProvider, ) -> None: """It should 404 if a protocol for a run does not exist.""" error = ProtocolNotFoundError("protocol-id") @@ -309,6 +324,7 @@ async def test_create_protocol_run_bad_protocol_id( run_data_manager=mock_run_data_manager, data_files_store=mock_data_files_store, data_files_directory=mock_data_files_directory, + file_provider=mock_file_provider, run_id="run-id", created_at=datetime.now(), run_auto_deleter=mock_run_auto_deleter, @@ -329,6 +345,7 @@ async def test_create_run_conflict( mock_protocol_store: ProtocolStore, mock_data_files_store: DataFilesStore, mock_data_files_directory: Path, + mock_file_provider: FileProvider, ) -> None: """It should respond with a conflict error if multiple engines are created.""" created_at = datetime(year=2021, month=1, day=1) @@ -342,6 +359,7 @@ async def test_create_run_conflict( created_at=created_at, labware_offsets=[], deck_configuration=[], + file_provider=mock_file_provider, protocol=None, run_time_param_values=None, run_time_param_paths=None, @@ -361,6 +379,7 @@ async def test_create_run_conflict( deck_configuration_store=mock_deck_configuration_store, data_files_store=mock_data_files_store, data_files_directory=mock_data_files_directory, + file_provider=mock_file_provider, notify_publishers=mock_notify_publishers, check_estop=True, ) @@ -387,6 +406,7 @@ async def test_get_run_data_from_url( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -434,6 +454,7 @@ async def test_get_run() -> None: labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -480,6 +501,7 @@ async def test_get_runs_not_empty( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -496,6 +518,7 @@ async def test_get_runs_not_empty( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -575,6 +598,7 @@ async def test_update_run_to_not_current( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) @@ -610,6 +634,7 @@ async def test_update_current_none_noop( labware=[], labwareOffsets=[], liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/router/test_labware_router.py b/robot-server/tests/runs/router/test_labware_router.py index 1e3b929446d..900eac530f1 100644 --- a/robot-server/tests/runs/router/test_labware_router.py +++ b/robot-server/tests/runs/router/test_labware_router.py @@ -40,6 +40,7 @@ def run() -> Run: labwareOffsets=[], protocolId=None, liquids=[], + outputFileIds=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index d60e9da6082..b069632a4e4 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -72,6 +72,7 @@ def engine_state_summary() -> StateSummary: modules=[], liquids=[], wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 981a0e7177c..869f1c1c643 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -52,6 +52,8 @@ ) from robot_server.service.notifications import RunsPublisher from robot_server.service.task_runner import TaskRunner +from opentrons.protocol_engine.resources import FileProvider +from robot_server.file_provider.provider import FileProviderWrapper def mock_notify_publishers() -> None: @@ -141,6 +143,20 @@ def mock_nozzle_maps(decoy: Decoy) -> Dict[str, NozzleMap]: return {"mock-pipette-id": mock_nozzle_map} +@pytest.fixture() +def mock_file_provider_wrapper(decoy: Decoy) -> FileProviderWrapper: + """Return a mock FileProviderWrapper.""" + return decoy.mock(cls=FileProviderWrapper) + + +@pytest.fixture() +def mock_file_provider( + decoy: Decoy, mock_file_provider_wrapper: FileProviderWrapper +) -> FileProvider: + """Return a mock FileProvider.""" + return decoy.mock(cls=FileProvider) + + @pytest.fixture def run_resource() -> RunResource: """Get a StateSummary value object.""" @@ -210,6 +226,7 @@ async def test_create( initial_error_recovery_policy=sentinel.initial_error_recovery_policy, protocol=protocol, deck_configuration=sentinel.deck_configuration, + file_provider=sentinel.file_provider, run_time_param_values=sentinel.run_time_param_values, run_time_param_paths=sentinel.run_time_param_paths, notify_publishers=mock_notify_publishers, @@ -251,6 +268,7 @@ async def test_create( labware_offsets=sentinel.labware_offsets, protocol=protocol, deck_configuration=sentinel.deck_configuration, + file_provider=sentinel.file_provider, run_time_param_values=sentinel.run_time_param_values, run_time_param_paths=sentinel.run_time_param_paths, notify_publishers=mock_notify_publishers, @@ -271,6 +289,7 @@ async def test_create( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=[bool_parameter, file_parameter], + outputFileIds=engine_state_summary.files, ) decoy.verify( mock_run_store.insert_csv_rtp( @@ -284,6 +303,7 @@ async def test_create_engine_error( mock_run_orchestrator_store: RunOrchestratorStore, mock_run_store: RunStore, mock_error_recovery_setting_store: ErrorRecoverySettingStore, + mock_file_provider: FileProvider, subject: RunDataManager, ) -> None: """It should not create a resource if engine creation fails.""" @@ -307,6 +327,7 @@ async def test_create_engine_error( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -321,6 +342,7 @@ async def test_create_engine_error( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -374,6 +396,7 @@ async def test_get_current_run( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) assert subject.current_run_id == run_id @@ -416,6 +439,7 @@ async def test_get_historical_run( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -459,6 +483,7 @@ async def test_get_historical_run_no_data( modules=[], liquids=[], runTimeParameters=run_time_parameters, + outputFileIds=[], ) @@ -560,6 +585,7 @@ async def test_get_all_runs( modules=historical_run_data.modules, liquids=historical_run_data.liquids, runTimeParameters=historical_run_time_parameters, + outputFileIds=historical_run_data.files, ), Run( current=True, @@ -576,6 +602,7 @@ async def test_get_all_runs( modules=current_run_data.modules, liquids=current_run_data.liquids, runTimeParameters=current_run_time_parameters, + outputFileIds=current_run_data.files, ), ] @@ -674,6 +701,7 @@ async def test_update_current( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -730,6 +758,7 @@ async def test_update_current_noop( modules=engine_state_summary.modules, liquids=engine_state_summary.liquids, runTimeParameters=run_time_parameters, + outputFileIds=engine_state_summary.files, ) @@ -759,6 +788,7 @@ async def test_create_archives_existing( mock_run_orchestrator_store: RunOrchestratorStore, mock_run_store: RunStore, mock_error_recovery_setting_store: ErrorRecoverySettingStore, + mock_file_provider: FileProvider, subject: RunDataManager, ) -> None: """It should persist the previously current run when a new run is created.""" @@ -792,6 +822,7 @@ async def test_create_archives_existing( protocol=None, initial_error_recovery_policy=sentinel.initial_error_recovery_policy, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, @@ -812,6 +843,7 @@ async def test_create_archives_existing( labware_offsets=[], protocol=None, deck_configuration=[], + file_provider=mock_file_provider, run_time_param_values=None, run_time_param_paths=None, notify_publishers=mock_notify_publishers, diff --git a/robot-server/tests/runs/test_run_orchestrator_store.py b/robot-server/tests/runs/test_run_orchestrator_store.py index e34c1340359..1774215acfd 100644 --- a/robot-server/tests/runs/test_run_orchestrator_store.py +++ b/robot-server/tests/runs/test_run_orchestrator_store.py @@ -24,6 +24,7 @@ NoRunOrchestrator, handle_estop_event, ) +from opentrons.protocol_engine.resources import FileProvider def mock_notify_publishers() -> None: @@ -61,6 +62,7 @@ async def test_create_engine(decoy: Decoy, subject: RunOrchestratorStore) -> Non labware_offsets=[], initial_error_recovery_policy=never_recover, protocol=None, + file_provider=FileProvider(), deck_configuration=[], notify_publishers=mock_notify_publishers, ) @@ -90,6 +92,7 @@ async def test_create_engine_uses_robot_type( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -112,6 +115,7 @@ async def test_create_engine_with_labware_offsets( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -136,6 +140,7 @@ async def test_archives_state_if_engine_already_exists( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -146,6 +151,7 @@ async def test_archives_state_if_engine_already_exists( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -160,6 +166,7 @@ async def test_clear_engine(subject: RunOrchestratorStore) -> None: initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -182,6 +189,7 @@ async def test_clear_engine_not_stopped_or_idle( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -198,6 +206,7 @@ async def test_clear_idle_engine(subject: RunOrchestratorStore) -> None: initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) assert subject._run_orchestrator is not None @@ -250,6 +259,7 @@ async def test_get_default_orchestrator_current_unstarted( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) @@ -265,6 +275,7 @@ async def test_get_default_orchestrator_conflict(subject: RunOrchestratorStore) initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) subject.play() @@ -283,6 +294,7 @@ async def test_get_default_orchestrator_run_stopped( initial_error_recovery_policy=never_recover, deck_configuration=[], protocol=None, + file_provider=FileProvider(), notify_publishers=mock_notify_publishers, ) await subject.finish(error=None) diff --git a/robot-server/tests/runs/test_run_store.py b/robot-server/tests/runs/test_run_store.py index 55a1849e693..ce6f8326c22 100644 --- a/robot-server/tests/runs/test_run_store.py +++ b/robot-server/tests/runs/test_run_store.py @@ -132,6 +132,7 @@ def state_summary() -> StateSummary: status=EngineStatus.IDLE, liquids=liquids, wells=[], + files=[], hasEverEnteredErrorRecovery=False, ) @@ -216,6 +217,7 @@ def invalid_state_summary() -> StateSummary: status=EngineStatus.IDLE, liquids=liquids, wells=[], + files=[], ) diff --git a/shared-data/command/schemas/10.json b/shared-data/command/schemas/10.json index 6508269ac62..be8e870c5bb 100644 --- a/shared-data/command/schemas/10.json +++ b/shared-data/command/schemas/10.json @@ -4236,6 +4236,11 @@ "title": "Moduleid", "description": "Unique ID of the Absorbance Reader.", "type": "string" + }, + "fileName": { + "title": "Filename", + "description": "Optional file name to use when storing the results of a measurement.", + "type": "string" } }, "required": ["moduleId"] From 3002d0b695b18776081615b8fc26b4c4e1523a5e Mon Sep 17 00:00:00 2001 From: Nick Diehl <47604184+ncdiehl11@users.noreply.github.com> Date: Mon, 21 Oct 2024 09:45:25 -0400 Subject: [PATCH 2/9] feat(protocol-designer): update StepSummary render logic and copy (#16512) This PR updates when StepSummaries show and updates copy to reflect final decisions. The intended behavior is to only show StepSummary when a step is hovered or selected, but not when the step form is open. Hovered StepSummaries take precedence over those of selected steps. I also add an optional second element to StepSummary that will display the step's notes if they exists. Closes AUTH-886 --- .../localization/en/protocol_steps.json | 9 +- .../Designer/ProtocolSteps/StepSummary.tsx | 109 +++++++++++++++--- .../pages/Designer/ProtocolSteps/index.tsx | 13 ++- 3 files changed, 108 insertions(+), 23 deletions(-) diff --git a/protocol-designer/src/assets/localization/en/protocol_steps.json b/protocol-designer/src/assets/localization/en/protocol_steps.json index dfc3e0c6345..42ccaa9c15b 100644 --- a/protocol-designer/src/assets/localization/en/protocol_steps.json +++ b/protocol-designer/src/assets/localization/en/protocol_steps.json @@ -27,6 +27,7 @@ "from": "from", "heater_shaker": { "active": { + "ambient": "ambient", "latch": "Latch", "shake": "Shaking at", "temperature": "{{module}}set to", @@ -49,7 +50,7 @@ "mix_times": "Mix repititions", "mix_volume": "Mix volume", "mix": "Mix", - "mix_step": "Mixing{{times}} times in{{labware}}", + "mix_step": "Mixing{{times}} times in{{wells}} of {{labware}}", "mix_repetitions": "Mix repetitions", "module": "Module", "move_labware": { @@ -57,9 +58,9 @@ "no_gripper": "Move{{labware}}to" }, "move_liquid": { - "consolidate": "Consolidatingfrom{{source}}to{{destination}}", - "distribute": "Distributingfrom{{source}}to{{destination}}", - "transfer": "Transferringfrom{{source}}to{{destination}}" + "consolidate": "Consolidatingfrom{{sourceWells}} of {{source}}to{{destinationWells}} of {{destination}}", + "distribute": "Distributingfrom{{sourceWells}} of {{source}}to{{destinationWells}} of {{destination}}", + "transfer": "Transferringfrom{{sourceWells}} of {{source}}to{{destinationWells}} of {{destination}}" }, "multi_dispense_options": "Distribute options", "multiAspirate": "Consolidate path", diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/StepSummary.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/StepSummary.tsx index 39855667e64..e993496dba3 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/StepSummary.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/StepSummary.tsx @@ -1,6 +1,8 @@ import { useSelector } from 'react-redux' import { Trans, useTranslation } from 'react-i18next' - +import first from 'lodash/first' +import flatten from 'lodash/flatten' +import last from 'lodash/last' import { ALIGN_CENTER, BORDERS, @@ -14,7 +16,10 @@ import { } from '@opentrons/components' import { getModuleDisplayName } from '@opentrons/shared-data' -import { getModuleEntities } from '../../../step-forms/selectors' +import { + getLabwareEntities, + getModuleEntities, +} from '../../../step-forms/selectors' import { getLabwareNicknamesById } from '../../../ui/labware/selectors' import type { FormData } from '../../../form-types' @@ -43,16 +48,35 @@ function StyledTrans(props: StyledTransProps): JSX.Element { ) } +const getWellsForStepSummary = ( + targetWells: string[], + labwareWells: string[] +): string => { + if (targetWells.length === 1) { + return targetWells[0] + } + const firstElementIndexOffset = labwareWells.indexOf(targetWells[0]) + const isInOrder = targetWells.every( + (targetWell, i) => + labwareWells.indexOf(targetWell) === firstElementIndexOffset + i + ) + return isInOrder + ? `${first(targetWells)}-${last(targetWells)}` + : `${targetWells.length} wells` +} + interface StepSummaryProps { currentStep: FormData | null + stepDetails?: string } export function StepSummary(props: StepSummaryProps): JSX.Element | null { - const { currentStep } = props + const { currentStep, stepDetails } = props const { t } = useTranslation(['protocol_steps', 'application']) const labwareNicknamesById = useSelector(getLabwareNicknamesById) + const labwareEntities = useSelector(getLabwareEntities) const modules = useSelector(getModuleEntities) if (currentStep?.stepType == null) { return null @@ -62,13 +86,28 @@ export function StepSummary(props: StepSummaryProps): JSX.Element | null { let stepSummaryContent: JSX.Element | null = null switch (stepType) { case 'mix': - const { labware: mixLabwareId, volume: mixVolume, times } = currentStep + const { + labware: mixLabwareId, + volume: mixVolume, + times, + wells: mix_wells, + labware: mixLabware, + } = currentStep const mixLabwareDisplayName = labwareNicknamesById[mixLabwareId] + const mixWellsDisplay = getWellsForStepSummary( + mix_wells as string[], + flatten(labwareEntities[mixLabware].def.ordering) + ) + stepSummaryContent = ( ) break @@ -254,13 +293,29 @@ export function StepSummary(props: StepSummaryProps): JSX.Element | null { } else { moveLiquidType = 'transfer' } - const { aspirate_labware, dispense_labware, volume } = currentStep + const { + aspirate_labware, + aspirate_wells, + dispense_wells, + dispense_labware, + volume, + } = currentStep const sourceLabwareName = labwareNicknamesById[aspirate_labware] const destinationLabwareName = labwareNicknamesById[dispense_labware] + const aspirateWellsDisplay = getWellsForStepSummary( + aspirate_wells as string[], + flatten(labwareEntities[aspirate_labware].def.ordering) + ) + const dispenseWellsDisplay = getWellsForStepSummary( + dispense_wells as string[], + flatten(labwareEntities[dispense_labware].def.ordering) + ) stepSummaryContent = ( {targetSpeed != null ? ( - {stepSummaryContent} + {stepSummaryContent != null ? ( + + {stepSummaryContent} + + ) : null} + {stepDetails != null && stepDetails !== '' ? ( + + {stepDetails} + + ) : null} ) : null } diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/index.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/index.tsx index 4d1585040a7..468ea45e01f 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/index.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/index.tsx @@ -63,10 +63,10 @@ export function ProtocolSteps(): JSX.Element { ? savedStepForms[currentstepIdForStepSummary] : null + const stepDetails = currentStep?.stepDetails ?? null return ( ) : null} - + {deckView === leftString ? ( ) : ( )} {formData == null ? ( - + ) : null} From 397f079e0e0e51cee54efab9c463c134071d2ee3 Mon Sep 17 00:00:00 2001 From: TamarZanzouri Date: Mon, 21 Oct 2024 09:48:48 -0400 Subject: [PATCH 3/9] feature(api): add a defined over pressure error to prepare to aspirate (#16518) Partially closes EXEC-557 Raise a defined over pressure error for prepare for aspirate command. --- .../commands/prepare_to_aspirate.py | 75 +++++++++++++++---- .../commands/test_prepare_to_aspirate.py | 69 +++++++++++++++-- 2 files changed, 123 insertions(+), 21 deletions(-) diff --git a/api/src/opentrons/protocol_engine/commands/prepare_to_aspirate.py b/api/src/opentrons/protocol_engine/commands/prepare_to_aspirate.py index d427b38dc1e..d63e42a7f90 100644 --- a/api/src/opentrons/protocol_engine/commands/prepare_to_aspirate.py +++ b/api/src/opentrons/protocol_engine/commands/prepare_to_aspirate.py @@ -1,18 +1,28 @@ """Prepare to aspirate command request, result, and implementation models.""" from __future__ import annotations +from opentrons_shared_data.errors.exceptions import PipetteOverpressureError from pydantic import BaseModel -from typing import TYPE_CHECKING, Optional, Type +from typing import TYPE_CHECKING, Optional, Type, Union from typing_extensions import Literal from .pipetting_common import ( + OverpressureError, PipetteIdMixin, ) -from .command import AbstractCommandImpl, BaseCommand, BaseCommandCreate, SuccessData +from .command import ( + AbstractCommandImpl, + BaseCommand, + BaseCommandCreate, + DefinedErrorData, + SuccessData, +) from ..errors.error_occurrence import ErrorOccurrence if TYPE_CHECKING: - from ..execution.pipetting import PipettingHandler + from ..execution import PipettingHandler, GantryMover + from ..resources import ModelUtils + PrepareToAspirateCommandType = Literal["prepareToAspirate"] @@ -29,25 +39,60 @@ class PrepareToAspirateResult(BaseModel): pass +_ExecuteReturn = Union[ + SuccessData[PrepareToAspirateResult, None], + DefinedErrorData[OverpressureError], +] + + class PrepareToAspirateImplementation( - AbstractCommandImpl[ - PrepareToAspirateParams, SuccessData[PrepareToAspirateResult, None] - ] + AbstractCommandImpl[PrepareToAspirateParams, _ExecuteReturn] ): """Prepare for aspirate command implementation.""" - def __init__(self, pipetting: PipettingHandler, **kwargs: object) -> None: + def __init__( + self, + pipetting: PipettingHandler, + model_utils: ModelUtils, + gantry_mover: GantryMover, + **kwargs: object, + ) -> None: self._pipetting_handler = pipetting + self._model_utils = model_utils + self._gantry_mover = gantry_mover - async def execute( - self, params: PrepareToAspirateParams - ) -> SuccessData[PrepareToAspirateResult, None]: + async def execute(self, params: PrepareToAspirateParams) -> _ExecuteReturn: """Prepare the pipette to aspirate.""" - await self._pipetting_handler.prepare_for_aspirate( - pipette_id=params.pipetteId, - ) - - return SuccessData(public=PrepareToAspirateResult(), private=None) + current_position = await self._gantry_mover.get_position(params.pipetteId) + try: + await self._pipetting_handler.prepare_for_aspirate( + pipette_id=params.pipetteId, + ) + except PipetteOverpressureError as e: + return DefinedErrorData( + public=OverpressureError( + id=self._model_utils.generate_id(), + createdAt=self._model_utils.get_timestamp(), + wrappedErrors=[ + ErrorOccurrence.from_failed( + id=self._model_utils.generate_id(), + createdAt=self._model_utils.get_timestamp(), + error=e, + ) + ], + errorInfo=( + { + "retryLocation": ( + current_position.x, + current_position.y, + current_position.z, + ) + } + ), + ), + ) + else: + return SuccessData(public=PrepareToAspirateResult(), private=None) class PrepareToAspirate( diff --git a/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py b/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py index b11254af481..45e8db96837 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py +++ b/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py @@ -1,25 +1,41 @@ """Test prepare to aspirate commands.""" - -from decoy import Decoy +from datetime import datetime +from opentrons.types import Point +import pytest +from decoy import Decoy, matchers from opentrons.protocol_engine.execution import ( PipettingHandler, ) -from opentrons.protocol_engine.commands.command import SuccessData +from opentrons.protocol_engine.commands.command import DefinedErrorData, SuccessData from opentrons.protocol_engine.commands.prepare_to_aspirate import ( PrepareToAspirateParams, PrepareToAspirateImplementation, PrepareToAspirateResult, ) +from opentrons.protocol_engine.execution.gantry_mover import GantryMover +from opentrons.protocol_engine.resources.model_utils import ModelUtils +from opentrons.protocol_engine.commands.pipetting_common import OverpressureError +from opentrons_shared_data.errors.exceptions import PipetteOverpressureError + + +@pytest.fixture +def subject( + pipetting: PipettingHandler, + model_utils: ModelUtils, + gantry_mover: GantryMover, +) -> PrepareToAspirateImplementation: + """Get the implementation subject.""" + return PrepareToAspirateImplementation( + pipetting=pipetting, model_utils=model_utils, gantry_mover=gantry_mover + ) async def test_prepare_to_aspirate_implmenetation( - decoy: Decoy, pipetting: PipettingHandler + decoy: Decoy, subject: PrepareToAspirateImplementation, pipetting: PipettingHandler ) -> None: """A PrepareToAspirate command should have an executing implementation.""" - subject = PrepareToAspirateImplementation(pipetting=pipetting) - data = PrepareToAspirateParams(pipetteId="some id") decoy.when(await pipetting.prepare_for_aspirate(pipette_id="some id")).then_return( @@ -28,3 +44,44 @@ async def test_prepare_to_aspirate_implmenetation( result = await subject.execute(data) assert result == SuccessData(public=PrepareToAspirateResult(), private=None) + + +async def test_overpressure_error( + decoy: Decoy, + gantry_mover: GantryMover, + pipetting: PipettingHandler, + subject: PrepareToAspirateImplementation, + model_utils: ModelUtils, +) -> None: + """It should return an overpressure error if the hardware API indicates that.""" + pipette_id = "pipette-id" + + position = Point(x=1, y=2, z=3) + + error_id = "error-id" + error_timestamp = datetime(year=2020, month=1, day=2) + + data = PrepareToAspirateParams( + pipetteId=pipette_id, + ) + + decoy.when( + await pipetting.prepare_for_aspirate( + pipette_id=pipette_id, + ), + ).then_raise(PipetteOverpressureError()) + + decoy.when(model_utils.generate_id()).then_return(error_id) + decoy.when(model_utils.get_timestamp()).then_return(error_timestamp) + decoy.when(await gantry_mover.get_position(pipette_id)).then_return(position) + + result = await subject.execute(data) + + assert result == DefinedErrorData( + public=OverpressureError.construct( + id=error_id, + createdAt=error_timestamp, + wrappedErrors=[matchers.Anything()], + errorInfo={"retryLocation": (position.x, position.y, position.z)}, + ), + ) From d2829dd92e37724cb82833025b7a567f356f9556 Mon Sep 17 00:00:00 2001 From: koji Date: Mon, 21 Oct 2024 10:30:34 -0400 Subject: [PATCH 4/9] fix(protocol-designer): add setHovered functions to clear unnecessary item (#16538) * fix(protocol-designer): add setHovered functions to clear unnecessary item --- .../src/pages/Designer/DeckSetup/DeckSetupTools.tsx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx index 20975e76c88..0df813cf114 100644 --- a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx +++ b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx @@ -161,6 +161,8 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { } const handleClear = (): void => { + onDeckProps?.setHoveredModule(null) + onDeckProps?.setHoveredFixture(null) if (slot !== 'offDeck') { // clear module from slot if (createdModuleForSlot != null) { @@ -312,6 +314,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { }} setHovered={() => { if (onDeckProps?.setHoveredModule != null) { + onDeckProps.setHoveredFixture(null) onDeckProps.setHoveredModule(model) } }} @@ -390,6 +393,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { }} setHovered={() => { if (onDeckProps?.setHoveredFixture != null) { + onDeckProps.setHoveredModule(null) onDeckProps.setHoveredFixture(fixture) } }} From a5ae3fe4329477953b522c073483d027bd6bffcf Mon Sep 17 00:00:00 2001 From: Jamey Huffnagle Date: Mon, 21 Oct 2024 12:19:46 -0400 Subject: [PATCH 5/9] feat(app): add overpressure during prepare to aspirate to Error Recovery (#16549) Closes EXEC-557 --- .../RecoveryOptions/SelectRecoveryOption.tsx | 18 +++++++++++++----- .../utils/__tests__/getErrorKind.test.ts | 5 +++++ .../ErrorRecoveryFlows/utils/getErrorKind.ts | 7 +++++-- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx b/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx index 8acc69c8ab6..c44252e2da9 100644 --- a/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx @@ -1,8 +1,10 @@ import { useState, useEffect } from 'react' import head from 'lodash/head' import { useTranslation } from 'react-i18next' +import { css } from 'styled-components' import { + RESPONSIVENESS, DIRECTION_COLUMN, Flex, SPACING, @@ -108,11 +110,7 @@ export function RecoveryOptions({ isOnDevice, }: RecoveryOptionsProps): JSX.Element { return ( - + {validRecoveryOptions.map((recoveryOption: RecoveryRoute) => { const optionName = getRecoveryOptionCopy(recoveryOption, errorKind) return ( @@ -133,6 +131,16 @@ export function RecoveryOptions({ ) } +const RECOVERY_OPTION_CONTAINER_STYLE = css` + flex-direction: ${DIRECTION_COLUMN}; + grid-gap: ${SPACING.spacing4}; + width: 100%; + + @media ${RESPONSIVENESS.touchscreenMediaQuerySpecs} { + grid-gap: ${SPACING.spacing8}; + } +` + // Pre-fetch tip attachment status. Users are not blocked from proceeding at this step. export function useCurrentTipStatus( determineTipStatus: () => Promise diff --git a/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts b/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts index 1aa7080a52a..fb9eea82c63 100644 --- a/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts +++ b/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts @@ -7,6 +7,11 @@ import type { RunCommandError, RunTimeCommand } from '@opentrons/shared-data' describe('getErrorKind', () => { it.each([ + { + commandType: 'prepareToAspirate', + errorType: DEFINED_ERROR_TYPES.OVERPRESSURE, + expectedError: ERROR_KINDS.OVERPRESSURE_PREPARE_TO_ASPIRATE, + }, { commandType: 'aspirate', errorType: DEFINED_ERROR_TYPES.OVERPRESSURE, diff --git a/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts b/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts index a537c3cf295..30fc4783473 100644 --- a/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts +++ b/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts @@ -13,9 +13,12 @@ export function getErrorKind(failedCommand: RunTimeCommand | null): ErrorKind { const errorType = failedCommand?.error?.errorType if (errorIsDefined) { - // todo(mm, 2024-07-02): Also handle aspirateInPlace and dispenseInPlace. - // https://opentrons.atlassian.net/browse/EXEC-593 if ( + commandType === 'prepareToAspirate' && + errorType === DEFINED_ERROR_TYPES.OVERPRESSURE + ) { + return ERROR_KINDS.OVERPRESSURE_PREPARE_TO_ASPIRATE + } else if ( (commandType === 'aspirate' || commandType === 'aspirateInPlace') && errorType === DEFINED_ERROR_TYPES.OVERPRESSURE ) { From 31a0340bf5286954d37ef2f238a67f1aa6a52e1f Mon Sep 17 00:00:00 2001 From: Seth Foster Date: Mon, 21 Oct 2024 12:24:13 -0400 Subject: [PATCH 6/9] refactor(app-shell-odd): overhaul odd system update (#16486) Flex robots are capable of updating their own software from the Internet or from USB sticks containing update files. This functionality is handled by the ODD's node side in `app-shell-odd`. It was implemented early in the flex development life cycle before we'd gotten a strong understanding of how everything would work, and was therefore a little scattershot and not really conformant to the way we now think about and structure our code. It also had some unexpected behaviors and was generally a little buggy. This PR refactors `app-shell-odd`'s system update mechanisms, ideally fixing said bugs. ## non-goals ### changing the frontend or the redux interaction patterns this was already a pretty big pr, so I didn't touch these. They probably should be touched, though; sourcing updates from web or from usb is a different thing from sourcing updates from web or from the user's file browser, which is what the UI side and the redux messages are designed to implement, and the concepts don't map terribly well. The redux message flow and semantics should not change in this PR, and this PR doesn't touch any app code ### changing the precedence or user-visible functionality of system updates We still have this idea that - Updates should be presented when an available update is different than the current system version - Updates from USB sticks have higher precedence than updates from the internet, even if the update from the internet has a higher version than the update from the USB stick - If multiple updates from sources of the same precedence are available (i.e., a USB stick with multiple update files is connected) then the update with the highest version takes precedence Changing this would require changing the frontend to have more than a "press a button to start an available update" type of flow, and require changing the redux messages. ## goals ### make the code easier to reason about The code was split sort of weirdly between a single ts file and a module of ts files; code to handle USB updates was scattered around the code to handle internet updates; it was generally a tough assignment to figure out what would happen when user actions are taken or redux messages are sent. Make it easier to understand ### make the code more robust The code didn't really uniformly handle errors or handle errors in the same way in multiple places; this is pretty much guaranteed because the code didn't really have tests. ## changes - move all the system update code into `system-update` - separate the code strongly by layer. the layer that handles the redux messages is in `system-update/handler.ts`, and it imports functions from lower levels in the `from-usb` and `from-web` modules. - separate the code strongly by source. all code dealing with updates from the web is in the `system-updates/from-web` moduels; the code dealing with updates from usb is in `system-updates/from-usb`. Each module presents a closure with a well-defined interface that the handler can use. - remove global mutable state in favor of closed-over mutable state for better testability - add tests, for everything (this is where the size of the diff is from) ## review requests - this is a pr that is mostly motivated by readability and robustness, so opinions on how readable and robust the code is are very welcome and needed - is the behavior correct from the description above? ## reviewing This ended up being a pretty big PR, sorry! I think a good way to approach looking at is to structure into the mostly-separate changes, which only affect each other implicitly: - the new cancellation mechanics in HTTP as its own thing - the action distribution changes in `main` - the `app.getPath()` changes in `main` and in `config` - the mechanics of `system-update/handler` first, then the two update providers underneath; the logic here is separated into "conveying the status of updates" in `handler.ts` and "getting updates" in the providers - the usb device sensing changes, which handle mass storage devices that are like `/dev/sda` but mounted at `/media/VOLUMENAME-sda` and fix excessive timeouts in the recursive enumeration ## testing this needs to be tested on a flex (conveniently, the OT-2 doesn't run this code). things that should be tested are - [x] when the robot is connected to the internet on an update channel that has an available update that is higher than the current robot version, does the system download, present, and apply the update - [x] when the robot is connected to the internet on an update channel that has an available update that is lower than or the same as the current robot version, does the system download but not present the update - [x] when the robot is connected to the internet on an update channel that has an available update, then disconnected after downloading the available update, does the system present and apply the update - [x] when the robot is disconnected from the internet while downloading an available update, does the system not present the update - [x] when you change the update channel, does the presented update get properly cleared and re-queried, including - [x] changing from a channel with an update to a channel with a different update - [x] changing from a channel with an update to a channel without an update - [x] changing from a channel without an update to a channel with an update - [x] when you plug in a usb stick with an update, it gets detected, presented, and applied - [x] including when an update is available from the internet - [x] including when the internet update is of a higher version than the usb update - [x] including when there are multiple updates on the usb stick - [x] including when the update on the usb stick is a lower version than the current version of the robot Closes RQA-3060 --- .github/workflows/app-test-build-deploy.yaml | 8 +- app-shell-odd/src/__tests__/http.test.ts | 1 + app-shell-odd/src/__tests__/update.test.ts | 47 -- app-shell-odd/src/actions.ts | 2 + app-shell-odd/src/config/index.ts | 16 +- app-shell-odd/src/constants.ts | 2 + app-shell-odd/src/early.ts | 22 + app-shell-odd/src/http.ts | 52 +- app-shell-odd/src/log.ts | 4 +- app-shell-odd/src/main.ts | 66 +- .../system-update/__tests__/handler.test.ts | 777 ++++++++++++++++++ .../__tests__/release-files.test.ts | 72 -- .../__tests__/release-manifest.test.ts | 42 - app-shell-odd/src/system-update/constants.ts | 11 + .../src/system-update/directories.ts | 15 +- .../from-usb/__tests__/provider.test.ts | 205 +++++ .../from-usb/__tests__/scan-device.test.ts | 59 ++ .../from-usb/__tests__/scan-zip.test.ts | 151 ++++ .../src/system-update/from-usb/index.ts | 2 + .../src/system-update/from-usb/provider.ts | 111 +++ .../src/system-update/from-usb/scan-device.ts | 37 + .../src/system-update/from-usb/scan-zip.ts | 88 ++ .../from-web/__tests__/latest-update.test.ts | 40 + .../from-web/__tests__/provider.test.ts | 774 +++++++++++++++++ .../from-web/__tests__/release-files.test.ts | 514 ++++++++++++ .../__tests__/release-manifest.test.ts | 185 +++++ .../src/system-update/from-web/index.ts | 2 + .../system-update/from-web/latest-update.ts | 28 + .../src/system-update/from-web/provider.ts | 209 +++++ .../system-update/from-web/release-files.ts | 243 ++++++ .../from-web/release-manifest.ts | 101 +++ app-shell-odd/src/system-update/handler.ts | 380 +++++++++ app-shell-odd/src/system-update/index.ts | 394 +-------- .../src/system-update/release-files.ts | 148 ---- .../src/system-update/release-manifest.ts | 29 - app-shell-odd/src/system-update/types.ts | 59 +- app-shell-odd/src/system-update/update.ts | 25 - app-shell-odd/src/system-update/utils.ts | 18 + app-shell-odd/src/system.ts | 22 + app-shell-odd/src/types.ts | 2 + app-shell-odd/src/update.ts | 113 --- app-shell-odd/src/usb.ts | 120 ++- app-shell/src/config/actions.ts | 2 + app-shell/src/main.ts | 1 - app-shell/src/types.ts | 2 + app/src/redux/config/actions.ts | 2 + app/src/redux/config/types.ts | 2 + app/src/redux/shell/update.ts | 19 +- update-server/oe_upload.py | 2 +- 49 files changed, 4238 insertions(+), 988 deletions(-) delete mode 100644 app-shell-odd/src/__tests__/update.test.ts create mode 100644 app-shell-odd/src/early.ts create mode 100644 app-shell-odd/src/system-update/__tests__/handler.test.ts delete mode 100644 app-shell-odd/src/system-update/__tests__/release-files.test.ts delete mode 100644 app-shell-odd/src/system-update/__tests__/release-manifest.test.ts create mode 100644 app-shell-odd/src/system-update/constants.ts create mode 100644 app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts create mode 100644 app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts create mode 100644 app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts create mode 100644 app-shell-odd/src/system-update/from-usb/index.ts create mode 100644 app-shell-odd/src/system-update/from-usb/provider.ts create mode 100644 app-shell-odd/src/system-update/from-usb/scan-device.ts create mode 100644 app-shell-odd/src/system-update/from-usb/scan-zip.ts create mode 100644 app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts create mode 100644 app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts create mode 100644 app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts create mode 100644 app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts create mode 100644 app-shell-odd/src/system-update/from-web/index.ts create mode 100644 app-shell-odd/src/system-update/from-web/latest-update.ts create mode 100644 app-shell-odd/src/system-update/from-web/provider.ts create mode 100644 app-shell-odd/src/system-update/from-web/release-files.ts create mode 100644 app-shell-odd/src/system-update/from-web/release-manifest.ts create mode 100644 app-shell-odd/src/system-update/handler.ts delete mode 100644 app-shell-odd/src/system-update/release-files.ts delete mode 100644 app-shell-odd/src/system-update/release-manifest.ts delete mode 100644 app-shell-odd/src/system-update/update.ts create mode 100644 app-shell-odd/src/system-update/utils.ts create mode 100644 app-shell-odd/src/system.ts delete mode 100644 app-shell-odd/src/update.ts diff --git a/.github/workflows/app-test-build-deploy.yaml b/.github/workflows/app-test-build-deploy.yaml index 8c3bd21503d..d3c03e1f500 100644 --- a/.github/workflows/app-test-build-deploy.yaml +++ b/.github/workflows/app-test-build-deploy.yaml @@ -97,7 +97,11 @@ jobs: strategy: matrix: os: ['windows-2022', 'ubuntu-22.04', 'macos-latest'] - name: 'opentrons app backend unit tests on ${{matrix.os}}' + shell: ['app-shell', 'app-shell-odd', 'discovery-client'] + exclude: + - os: 'windows-2022' + shell: 'app-shell-odd' + name: 'opentrons ${{matrix.shell}} unit tests on ${{matrix.os}}' timeout-minutes: 60 runs-on: ${{ matrix.os }} steps: @@ -144,7 +148,7 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'test native(er) packages' - run: make test-js-internal tests="app-shell/src app-shell-odd/src discovery-client/src" cov_opts="--coverage=true" + run: make test-js-internal tests="${{}matrix.shell}/src" cov_opts="--coverage=true" - name: 'Upload coverage report' uses: 'codecov/codecov-action@v3' with: diff --git a/app-shell-odd/src/__tests__/http.test.ts b/app-shell-odd/src/__tests__/http.test.ts index 7b2c72578c0..c7ea4443a96 100644 --- a/app-shell-odd/src/__tests__/http.test.ts +++ b/app-shell-odd/src/__tests__/http.test.ts @@ -9,6 +9,7 @@ import type { Request, Response } from 'node-fetch' vi.mock('../config') vi.mock('node-fetch') +vi.mock('../log') describe('app-shell main http module', () => { beforeEach(() => { diff --git a/app-shell-odd/src/__tests__/update.test.ts b/app-shell-odd/src/__tests__/update.test.ts deleted file mode 100644 index 26adb67684b..00000000000 --- a/app-shell-odd/src/__tests__/update.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -// app-shell self-update tests -import { when } from 'vitest-when' -import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' -import * as http from '../http' -import { registerUpdate, FLEX_MANIFEST_URL } from '../update' -import * as Cfg from '../config' - -import type { Dispatch } from '../types' - -vi.unmock('electron-updater') -vi.mock('electron-updater') -vi.mock('../log') -vi.mock('../config') -vi.mock('../http') -vi.mock('fs-extra') - -describe('update', () => { - let dispatch: Dispatch - let handleAction: Dispatch - - beforeEach(() => { - dispatch = vi.fn() - handleAction = registerUpdate(dispatch) - }) - - afterEach(() => { - vi.resetAllMocks() - }) - - it('handles shell:CHECK_UPDATE with available update', () => { - when(vi.mocked(Cfg.getConfig)) - // @ts-expect-error getConfig mock not recognizing correct type overload - .calledWith('update') - .thenReturn({ - channel: 'latest', - } as any) - - when(vi.mocked(http.fetchJson)) - .calledWith(FLEX_MANIFEST_URL) - .thenResolve({ production: { '5.0.0': {}, '6.0.0': {} } }) - handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) - - expect(vi.mocked(Cfg.getConfig)).toHaveBeenCalledWith('update') - - expect(vi.mocked(http.fetchJson)).toHaveBeenCalledWith(FLEX_MANIFEST_URL) - }) -}) diff --git a/app-shell-odd/src/actions.ts b/app-shell-odd/src/actions.ts index 588dc88b3e4..bb7c0450210 100644 --- a/app-shell-odd/src/actions.ts +++ b/app-shell-odd/src/actions.ts @@ -119,6 +119,7 @@ import type { export const configInitialized = (config: Config): ConfigInitializedAction => ({ type: CONFIG_INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -128,6 +129,7 @@ export const configValueUpdated = ( ): ConfigValueUpdatedAction => ({ type: VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export const customLabwareList = ( diff --git a/app-shell-odd/src/config/index.ts b/app-shell-odd/src/config/index.ts index df8e0cf317d..a67655976d9 100644 --- a/app-shell-odd/src/config/index.ts +++ b/app-shell-odd/src/config/index.ts @@ -5,7 +5,6 @@ import get from 'lodash/get' import forEach from 'lodash/forEach' import mergeOptions from 'merge-options' import yargsParser from 'yargs-parser' - import { UI_INITIALIZED } from '../constants' import * as Cfg from '../constants' import { configInitialized, configValueUpdated } from '../actions' @@ -13,6 +12,7 @@ import systemd from '../systemd' import { createLogger } from '../log' import { DEFAULTS_V12, migrate } from './migrate' import { shouldUpdate, getNextValue } from './update' +import { setUserDataPath } from '../early' import type { ConfigV12, @@ -24,8 +24,6 @@ import type { Config, Overrides } from './types' export * from './types' -export const ODD_DIR = '/data/ODD' - // make sure all arguments are included in production const argv = process.argv0.endsWith('defaultApp') ? process.argv.slice(2) @@ -48,8 +46,7 @@ const store = (): Store => { // perform store migration if loading for the first time _store = (new Store({ defaults: DEFAULTS_V12, - // dont overwrite config dir if in dev mode because it causes issues - ...(process.env.NODE_ENV === 'production' && { cwd: ODD_DIR }), + cwd: setUserDataPath(), }) as unknown) as Store _store.store = migrate((_store.store as unknown) as ConfigV12) } @@ -66,7 +63,14 @@ const log = (): Logger => _log ?? (_log = createLogger('config')) export function registerConfig(dispatch: Dispatch): (action: Action) => void { return function handleIncomingAction(action: Action) { if (action.type === UI_INITIALIZED) { + log().info('initializing configuration') dispatch(configInitialized(getFullConfig())) + log().info( + `flow route: ${ + getConfig('onDeviceDisplaySettings').unfinishedUnboxingFlowRoute + }` + ) + log().info('configuration initialized') } else if ( action.type === Cfg.UPDATE_VALUE || action.type === Cfg.RESET_VALUE || @@ -120,8 +124,8 @@ export function getOverrides(path?: string): unknown { return path != null ? get(overrides(), path) : overrides() } -export function getConfig

(path: P): Config[P] export function getConfig(): Config +export function getConfig

(path: P): Config[P] export function getConfig(path?: any): any { const result = store().get(path) const over = getOverrides(path as string | undefined) diff --git a/app-shell-odd/src/constants.ts b/app-shell-odd/src/constants.ts index a78e9274ae0..8b92e639cf6 100644 --- a/app-shell-odd/src/constants.ts +++ b/app-shell-odd/src/constants.ts @@ -257,3 +257,5 @@ export const FAILURE_STATUSES = { } as const export const SEND_FILE_PATHS: 'shell:SEND_FILE_PATHS' = 'shell:SEND_FILE_PATHS' + +export const ODD_DATA_DIR = '/data/ODD' diff --git a/app-shell-odd/src/early.ts b/app-shell-odd/src/early.ts new file mode 100644 index 00000000000..134c8957804 --- /dev/null +++ b/app-shell-odd/src/early.ts @@ -0,0 +1,22 @@ +// things intended to execute early in app-shell initialization +// do as little as possible in this file and do none of it at import time + +import { app } from 'electron' +import { ODD_DATA_DIR } from './constants' + +let path: string + +export const setUserDataPath = (): string => { + if (path == null) { + console.log( + `node env is ${process.env.NODE_ENV}, path is ${app.getPath('userData')}` + ) + if (process.env.NODE_ENV === 'production') { + console.log(`setting app path to ${ODD_DATA_DIR}`) + app.setPath('userData', ODD_DATA_DIR) + } + path = app.getPath('userData') + console.log(`app path becomes ${app.getPath('userData')}`) + } + return app.getPath('userData') +} diff --git a/app-shell-odd/src/http.ts b/app-shell-odd/src/http.ts index 6392340fbe7..90d01530da8 100644 --- a/app-shell-odd/src/http.ts +++ b/app-shell-odd/src/http.ts @@ -7,10 +7,13 @@ import FormData from 'form-data' import { Transform } from 'stream' import { HTTP_API_VERSION } from './constants' +import { createLogger } from './log' import type { Readable } from 'stream' import type { Request, RequestInit, Response } from 'node-fetch' +const log = createLogger('http') + type RequestInput = Request | string export interface DownloadProgress { @@ -18,6 +21,16 @@ export interface DownloadProgress { size: number | null } +export class LocalAbortError extends Error { + declare readonly name: 'LocalAbortError' + declare readonly type: 'aborted' + constructor(message: string) { + super(message) + this.name = 'LocalAbortError' + this.type = 'aborted' + } +} + export function fetch( input: RequestInput, init?: RequestInit @@ -35,21 +48,29 @@ export function fetch( }) } -export function fetchJson(input: RequestInput): Promise { - return fetch(input).then(response => response.json()) +export function fetchJson( + input: RequestInput, + init?: RequestInit +): Promise { + return fetch(input, init).then(response => response.json()) +} + +export function fetchText(input: Request, init?: RequestInit): Promise { + return fetch(input, init).then(response => response.text()) } -export function fetchText(input: Request): Promise { - return fetch(input).then(response => response.text()) +export interface FetchToFileOptions { + onProgress: (progress: DownloadProgress) => unknown + signal: AbortSignal } // TODO(mc, 2019-07-02): break this function up and test its components export function fetchToFile( input: RequestInput, destination: string, - options?: Partial<{ onProgress: (progress: DownloadProgress) => unknown }> + options?: Partial ): Promise { - return fetch(input).then(response => { + return fetch(input, { signal: options?.signal }).then(response => { let downloaded = 0 const size = Number(response.headers.get('Content-Length')) || null @@ -75,13 +96,26 @@ export function fetchToFile( // pump calls stream.pipe, handles teardown if streams error, and calls // its callbacks when the streams are done pump(inputStream, progressReader, outputStream, error => { - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - if (error) { + const handleError = (problem: Error): void => { // if we error out, delete the temp dir to clean up - return remove(destination).then(() => { + log.error(`Aborting fetchToFile: ${problem.name}: ${problem.message}`) + remove(destination).then(() => { reject(error) }) } + const listener = (): void => { + handleError( + new LocalAbortError( + (options?.signal?.reason as string | null) ?? 'aborted' + ) + ) + } + options?.signal?.addEventListener('abort', listener, { once: true }) + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (error) { + handleError(error) + } + options?.signal?.removeEventListener('abort', listener, {}) resolve(destination) }) }) diff --git a/app-shell-odd/src/log.ts b/app-shell-odd/src/log.ts index 0c6a087be3f..100c7f275fb 100644 --- a/app-shell-odd/src/log.ts +++ b/app-shell-odd/src/log.ts @@ -4,13 +4,13 @@ import path from 'path' import dateFormat from 'dateformat' import winston from 'winston' +import { setUserDataPath } from './early' import { getConfig } from './config' import type Transport from 'winston-transport' import type { Config } from './config' -const ODD_DIR = '/data/ODD' -const LOG_DIR = path.join(ODD_DIR, 'logs') +const LOG_DIR = path.join(setUserDataPath(), 'logs') const ERROR_LOG = path.join(LOG_DIR, 'error.log') const COMBINED_LOG = path.join(LOG_DIR, 'combined.log') diff --git a/app-shell-odd/src/main.ts b/app-shell-odd/src/main.ts index d271bb1dc87..b0f285fa194 100644 --- a/app-shell-odd/src/main.ts +++ b/app-shell-odd/src/main.ts @@ -6,11 +6,7 @@ import path from 'path' import { createUi, waitForRobotServerAndShowMainWindow } from './ui' import { createLogger } from './log' import { registerDiscovery } from './discovery' -import { - registerUpdate, - updateLatestVersion, - registerUpdateBrightness, -} from './update' +import { registerUpdateBrightness } from './system' import { registerRobotSystemUpdate } from './system-update' import { registerAppRestart } from './restart' import { @@ -19,7 +15,6 @@ import { getOverrides, registerConfig, resetStore, - ODD_DIR, } from './config' import systemd from './systemd' import { registerDataFiles, watchForMassStorage } from './usb' @@ -28,7 +23,9 @@ import { establishBrokerConnection, closeBrokerConnection, } from './notifications' +import { setUserDataPath } from './early' +import type { OTLogger } from './log' import type { BrowserWindow } from 'electron' import type { Action, Dispatch, Logger } from './types' import type { LogEntry } from 'winston' @@ -39,6 +36,7 @@ import type { LogEntry } from 'winston' * https://github.com/node-fetch/node-fetch/issues/1624 */ dns.setDefaultResultOrder('ipv4first') +setUserDataPath() systemd.sendStatus('starting app') const config = getConfig() @@ -87,12 +85,14 @@ function startUp(): void { log.info('Starting App') console.log('Starting App') const storeNeedsReset = fse.existsSync( - path.join(ODD_DIR, `_CONFIG_TO_BE_DELETED_ON_REBOOT`) + path.join(setUserDataPath(), `_CONFIG_TO_BE_DELETED_ON_REBOOT`) ) if (storeNeedsReset) { log.debug('store marked to be reset, resetting store') resetStore() - fse.removeSync(path.join(ODD_DIR, `_CONFIG_TO_BE_DELETED_ON_REBOOT`)) + fse.removeSync( + path.join(app.getPath('userData'), `_CONFIG_TO_BE_DELETED_ON_REBOOT`) + ) } systemd.sendStatus('loading app') process.on('uncaughtException', error => log.error('Uncaught: ', { error })) @@ -102,11 +102,28 @@ function startUp(): void { // wire modules to UI dispatches const dispatch: Dispatch = action => { - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - if (mainWindow) { - log.silly('Sending action via IPC to renderer', { action }) - mainWindow.webContents.send('dispatch', action) - } + // This function now dispatches actions to all the handlers in the app shell. That would make it + // vulnerable to infinite recursion: + // - handler handles action A + // - handler dispatches action A as a response (calls this function) + // - this function calls handler with action A + // By deferring to nextTick(), we would still be executing the code over and over but we should have + // broken the stack. + process.nextTick(() => { + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (mainWindow) { + log.silly('Sending action via IPC to renderer', { action }) + mainWindow.webContents.send('dispatch', action) + } + log.debug( + `bouncing action ${action.type} to ${actionHandlers.length} handlers` + ) + // Make actions that are sourced from the shell also go to the app shell without needing + // round tripping. This call is the reason for the nextTick() above. + actionHandlers.forEach(handler => { + handler(action) + }) + }) } mainWindow = createUi(dispatch) @@ -114,15 +131,9 @@ function startUp(): void { void establishBrokerConnection() mainWindow.once('closed', () => (mainWindow = null)) - log.info('Fetching latest software version') - updateLatestVersion().catch((error: Error) => { - log.error('Error fetching latest software version: ', { error }) - }) - const actionHandlers: Dispatch[] = [ registerConfig(dispatch), registerDiscovery(dispatch), - registerUpdate(dispatch), registerRobotSystemUpdate(dispatch), registerAppRestart(), registerUpdateBrightness(), @@ -143,8 +154,19 @@ function startUp(): void { log.info('First dispatch, showing') systemd.sendStatus('started') systemd.ready() - const stopWatching = watchForMassStorage(dispatch) - ipcMain.once('quit', stopWatching) + try { + const stopWatching = watchForMassStorage(dispatch) + ipcMain.once('quit', stopWatching) + } catch (err: any) { + if (err instanceof Error) { + console.log( + `Failed to watch for mass storage: ${err.name}: ${err.message}`, + err + ) + } else { + console.log(`Failed to watch for mass storage: ${err}`) + } + } // TODO: This is where we render the main window for the first time. See ui.ts // in the createUI function for more. if (!!!mainWindow) { @@ -155,7 +177,7 @@ function startUp(): void { }) } -function createRendererLogger(): Logger { +function createRendererLogger(): OTLogger { log.info('Creating renderer logger') const logger = createLogger('renderer') diff --git a/app-shell-odd/src/system-update/__tests__/handler.test.ts b/app-shell-odd/src/system-update/__tests__/handler.test.ts new file mode 100644 index 00000000000..65769c93729 --- /dev/null +++ b/app-shell-odd/src/system-update/__tests__/handler.test.ts @@ -0,0 +1,777 @@ +// app-shell self-update tests +import { when } from 'vitest-when' +import { rm } from 'fs-extra' +import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' +import tempy from 'tempy' + +import * as Cfg from '../../config' +import { CONFIG_INITIALIZED, VALUE_UPDATED } from '../../constants' +import { + manageDriver, + createUpdateDriver, + CURRENT_SYSTEM_VERSION, +} from '../handler' +import { FLEX_MANIFEST_URL } from '../constants' +import { getSystemUpdateDir as _getSystemUpdateDir } from '../directories' +import { getProvider as _getWebProvider } from '../from-web' +import { getProvider as _getUsbProvider } from '../from-usb' + +import type { UpdateProvider } from '../types' +import type { UpdateDriver } from '../handler' +import type { WebUpdateSource } from '../from-web' +import type { USBUpdateSource } from '../from-usb' +import type { Dispatch } from '../../types' + +import type { + ConfigInitializedAction, + ConfigValueUpdatedAction, +} from '@opentrons/app/src/redux/config' + +vi.unmock('electron-updater') // ? +vi.mock('electron-updater') +vi.mock('../../log') +vi.mock('../../config') +vi.mock('../../http') +vi.mock('../directories') +vi.mock('../from-web') +vi.mock('../from-usb') + +const getSystemUpdateDir = vi.mocked(_getSystemUpdateDir) +const getConfig = vi.mocked(Cfg.getConfig) +const getWebProvider = vi.mocked(_getWebProvider) +const getUsbProvider = vi.mocked(_getUsbProvider) + +describe('update driver manager', () => { + let dispatch: Dispatch + let testDir: string = '' + beforeEach(() => { + const thisTd = tempy.directory() + testDir = thisTd + dispatch = vi.fn() + when(getSystemUpdateDir).calledWith().thenReturn(thisTd) + }) + + afterEach(() => { + vi.resetAllMocks() + const oldTd = testDir + testDir = '' + return oldTd === '' + ? new Promise(resolve => resolve()) + : rm(oldTd, { recursive: true, force: true }) + }) + + it('creates a driver once config is loaded', () => { + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + const driver = manageDriver(dispatch) + expect(driver.getUpdateDriver()).toBeNull() + expect(getConfig).not.toHaveBeenCalled() + return driver + .handleAction({ + type: CONFIG_INITIALIZED, + } as ConfigInitializedAction) + .then(() => { + expect(driver.getUpdateDriver()).not.toBeNull() + expect(getConfig).toHaveBeenCalledOnce() + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + }) + + it('reloads the web driver when appropriate', () => { + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + const fakeProvider = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: vi.fn(), + source: () => (({ channel: 'alpha' } as any) as WebUpdateSource), + } + const fakeProvider2 = { + ...fakeProvider, + source: () => (({ channel: 'beta' } as any) as WebUpdateSource), + } + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider) + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'beta', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider2) + const driverManager = manageDriver(dispatch) + return driverManager + .handleAction({ + type: CONFIG_INITIALIZED, + } as ConfigInitializedAction) + .then(() => { + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + expect(driverManager.getUpdateDriver()).not.toBeNull() + when(fakeProvider.teardown).calledWith().thenResolve() + return driverManager.handleAction({ + type: VALUE_UPDATED, + } as ConfigValueUpdatedAction) + }) + .then(() => { + expect(getWebProvider).toHaveBeenCalledOnce() + when(getConfig) + .calledWith('update') + .thenReturn(({ + channel: 'beta', + } as any) as Cfg.Config['update']) + return driverManager.handleAction({ + type: VALUE_UPDATED, + } as ConfigValueUpdatedAction) + }) + .then(() => { + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + }) +}) + +describe('update driver', () => { + let dispatch: Dispatch + let testDir: string = '' + let subject: UpdateDriver | null = null + const fakeProvider: UpdateProvider = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: vi.fn(), + source: () => (({ channel: 'alpha' } as any) as WebUpdateSource), + } + const fakeUsbProviders: Record> = { + first: { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/usb/path', + source: () => + (({ + massStorageRootPath: '/some/usb/path', + } as any) as USBUpdateSource), + }, + } + + beforeEach(() => { + const thisTd = tempy.directory() + testDir = thisTd + dispatch = vi.fn() + when(getSystemUpdateDir).calledWith().thenReturn(thisTd) + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider) + fakeUsbProviders.first = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/usb/path', + source: () => + (({ + massStorageRootPath: '/some/usb/path', + } as any) as USBUpdateSource), + } + fakeUsbProviders.second = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/other/usb/path', + source: () => + (({ + massStorageRootPath: '/some/other/usb/path', + } as any) as USBUpdateSource), + } + subject = createUpdateDriver(dispatch) + }) + + afterEach(() => { + vi.resetAllMocks() + const oldTd = testDir + testDir = '' + return ( + subject?.teardown() || new Promise(resolve => resolve()) + ).then(() => + oldTd === '' + ? new Promise(resolve => resolve()) + : rm(oldTd, { recursive: true, force: true }) + ) + }) + + it('checks updates when told to check updates', () => { + const thisSubject = subject as UpdateDriver + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenDo( + progress => + new Promise(resolve => { + progress({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + resolve({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + }) + ) + return thisSubject + .handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: null, + releaseNotes: null, + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: null, force: false, target: 'flex' }, + }) + }) + }) + it('forwards in-progress downloads when no USB updates are present', () => { + const thisSubject = subject as UpdateDriver + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenDo( + progress => + new Promise(resolve => { + progress({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: null, + downloadProgress: 50, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: { + system: '/some/path', + releaseNotes: '/some/other/path', + }, + downloadProgress: 100, + releaseNotes: 'some release notes', + }) + resolve({ + version: '1.2.3', + files: { + system: '/some/path', + releaseNotes: '/some/other/path', + }, + downloadProgress: 100, + releaseNotes: 'some release notes', + }) + }) + ) + return thisSubject + .handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) + .then(() => { + expect(dispatch).toHaveBeenNthCalledWith(1, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(2, { + type: 'robotUpdate:DOWNLOAD_PROGRESS', + payload: { progress: 50, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(3, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: '1.2.3', + releaseNotes: 'some release notes', + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(4, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(5, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: '1.2.3', + releaseNotes: 'some release notes', + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(6, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + }) + }) + it('creates a usb provider when it gets a message that a usb device was added', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + }) + }) + it('does not create a usb provider if it already has one for a path', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledOnce() + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_INFO', + payload: { + releaseNotes: 'some fake notes', + version: '0.1.2', + force: true, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: '0.1.2', + force: true, + target: 'flex', + }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '0.1.2', + isManualFile: false, + }, + }) + }) + }) + it('tears down a usb provider when it is removed', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + when(fakeUsbProviders.first.teardown).calledWith().thenResolve() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED', + payload: { rootPath: '/some/usb/path' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(fakeUsbProviders.first.teardown).toHaveBeenCalledOnce() + }) + }) + it('re-adds a usb provider if it is inserted after being removed', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + when(fakeUsbProviders.first.teardown).calledWith().thenResolve() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED', + payload: { rootPath: '/some/usb/path' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(fakeUsbProviders.first.teardown).toHaveBeenCalledOnce() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledTimes(2) + }) + }) + it('prefers usb updates to web updates', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { + system: '/some/file/from/the/web', + releaseNotes: null, + }, + releaseNotes: 'some other notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => + thisSubject.handleAction({ + type: 'shell:CHECK_UPDATE', + meta: { shell: true }, + }) + ) + .then(() => { + expect(dispatch).toHaveBeenLastCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '0.1.2', force: true, target: 'flex' }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '0.1.2', + isManualFile: false, + }, + }) + }) + }) + it('selects the highest version usb update', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/other/usb/path', + massStorageDeviceFiles: ['/some/third/file', '/some/fourth/file'], + }) + .thenReturn(fakeUsbProviders.second) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.second.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/other/file', releaseNotes: null }, + releaseNotes: 'some other fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.second.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/other/filefile', releaseNotes: null }, + releaseNotes: 'some other fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/other/usb/path', + filePaths: ['/some/third/file', '/some/fourth/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + expect(dispatch).toHaveBeenNthCalledWith(1, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + releaseNotes: 'some fake notes', + version: '1.2.3', + force: true, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(2, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: '1.2.3', + force: true, + target: 'flex', + }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '1.2.3', + isManualFile: false, + }, + }) + }) + }) +}) diff --git a/app-shell-odd/src/system-update/__tests__/release-files.test.ts b/app-shell-odd/src/system-update/__tests__/release-files.test.ts deleted file mode 100644 index bd2a421b910..00000000000 --- a/app-shell-odd/src/system-update/__tests__/release-files.test.ts +++ /dev/null @@ -1,72 +0,0 @@ -// TODO(mc, 2020-06-11): test all release-files functions -import { vi, describe, it, expect, afterAll } from 'vitest' -import path from 'path' -import { promises as fs } from 'fs' -import fse from 'fs-extra' -import tempy from 'tempy' - -import { cleanupReleaseFiles } from '../release-files' -vi.mock('electron-store') -vi.mock('../../log') - -describe('system release files utilities', () => { - const tempDirs: string[] = [] - const makeEmptyDir = (): string => { - const dir: string = tempy.directory() - tempDirs.push(dir) - return dir - } - - afterAll(async () => { - await Promise.all(tempDirs.map(d => fse.remove(d))) - }) - - describe('cleanupReleaseFiles', () => { - it('should leave current version files alone', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - - return fs - .mkdir(releaseDir) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0']) - }) - }) - - it('should leave support files alone', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - const releaseManifest = path.join(dir, 'releases.json') - - return Promise.all([ - fs.mkdir(releaseDir), - fse.writeJson(releaseManifest, { hello: 'world' }), - ]) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0', 'releases.json']) - }) - }) - - it('should delete other directories', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - const oldReleaseDir = path.join(dir, '3.9.0') - const olderReleaseDir = path.join(dir, '3.8.0') - - return Promise.all([ - fs.mkdir(releaseDir), - fs.mkdir(oldReleaseDir), - fs.mkdir(olderReleaseDir), - ]) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0']) - }) - }) - }) -}) diff --git a/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts b/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts deleted file mode 100644 index 89091d2731c..00000000000 --- a/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' -import * as Http from '../../http' -import * as Dirs from '../directories' -import { downloadAndCacheReleaseManifest } from '../release-manifest' - -vi.mock('../../http') -vi.mock('../directories') -vi.mock('../../log') -vi.mock('electron-store') -const fetchJson = Http.fetchJson -const getManifestCacheDir = Dirs.getManifestCacheDir - -const MOCK_DIR = 'mock_dir' -const MANIFEST_URL = 'http://example.com/releases.json' -const MOCK_MANIFEST = {} as any - -describe('release manifest utilities', () => { - beforeEach(() => { - vi.mocked(getManifestCacheDir).mockReturnValue(MOCK_DIR) - vi.mocked(fetchJson).mockResolvedValue(MOCK_MANIFEST) - }) - - afterEach(() => { - vi.resetAllMocks() - }) - - it('should download and save the manifest from a url', async () => { - await expect( - downloadAndCacheReleaseManifest(MANIFEST_URL) - ).resolves.toEqual(MOCK_MANIFEST) - expect(fetchJson).toHaveBeenCalledWith(MANIFEST_URL) - }) - - it('should pull the manifest from the file if the manifest download fails', async () => { - const error = new Error('Failed to download') - vi.mocked(fetchJson).mockRejectedValue(error) - await expect( - downloadAndCacheReleaseManifest(MANIFEST_URL) - ).resolves.toEqual(MOCK_MANIFEST) - expect(fetchJson).toHaveBeenCalledWith(MANIFEST_URL) - }) -}) diff --git a/app-shell-odd/src/system-update/constants.ts b/app-shell-odd/src/system-update/constants.ts new file mode 100644 index 00000000000..575b64230b5 --- /dev/null +++ b/app-shell-odd/src/system-update/constants.ts @@ -0,0 +1,11 @@ +const OPENTRONS_PROJECT: string = _OPENTRONS_PROJECT_ + +export const FLEX_MANIFEST_URL = + OPENTRONS_PROJECT && OPENTRONS_PROJECT.includes('robot-stack') + ? 'https://builds.opentrons.com/ot3-oe/releases.json' + : 'https://ot3-development.builds.opentrons.com/ot3-oe/releases.json' + +export const SYSTEM_UPDATE_DIRECTORY = '__ot_system_update__' +export const VERSION_FILENAME = 'VERSION.json' +export const REASONABLE_VERSION_FILE_SIZE_B = 4096 +export const SYSTEM_FILENAME = 'system-update.zip' diff --git a/app-shell-odd/src/system-update/directories.ts b/app-shell-odd/src/system-update/directories.ts index c2723153505..757f47bc44a 100644 --- a/app-shell-odd/src/system-update/directories.ts +++ b/app-shell-odd/src/system-update/directories.ts @@ -1,15 +1,6 @@ import { app } from 'electron' import path from 'path' +import { SYSTEM_UPDATE_DIRECTORY } from './constants' -const SYSTEM_UPDATE_DIRECTORY = path.join( - app.getPath('sessionData'), - '__ot_system_update__' -) - -export const getSystemUpdateDir = (): string => SYSTEM_UPDATE_DIRECTORY - -export const getFileDownloadDir = (version: string): string => - path.join(SYSTEM_UPDATE_DIRECTORY, version) - -export const getManifestCacheDir = (): string => - path.join(SYSTEM_UPDATE_DIRECTORY, 'releases.json') +export const getSystemUpdateDir = (): string => + path.join(app.getPath('userData'), SYSTEM_UPDATE_DIRECTORY) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts new file mode 100644 index 00000000000..cbdf79435dc --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts @@ -0,0 +1,205 @@ +import { it, describe, vi, afterEach, expect } from 'vitest' +import { when } from 'vitest-when' +import { getProvider } from '../provider' +import { getLatestMassStorageUpdateFile as _getLatestMassStorageUpdateFile } from '../scan-device' + +vi.mock('../scan-device') +vi.mock('../../../log') + +const getLatestMassStorageUpdateFile = vi.mocked( + _getLatestMassStorageUpdateFile +) + +describe('system-update/from-usb/provider', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('signals available updates when given available updates', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.2.3' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: '1.2.3', + files: { + system: '/storage/valid-release.zip', + releaseNotes: expect.any(String), + }, + releaseNotes: expect.any(String), + downloadProgress: 100, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when given no available updates', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/blahblah']) + .thenResolve(null) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/blahblah'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when the scan throws', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/blahblah']) + .thenReject(new Error('oh no')) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/blahblah'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when the highest version update is the same version as current', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('throws when torn down before scanning', () => { + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/', + massStorageDeviceFiles: [], + }) + const progress = vi.fn() + return provider + .teardown() + .then(() => + expect(provider.refreshUpdateCache(progress)).rejects.toThrow() + ) + .then(() => + expect(progress).toHaveBeenLastCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) + it('throws when torn down right after scanning', () => { + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/', + massStorageDeviceFiles: [], + }) + const progress = vi.fn() + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenDo(() => + provider.teardown().then(() => ({ + path: '/storage/valid-release.zip', + version: '1.0.0', + })) + ) + return provider + .teardown() + .then(() => + expect(provider.refreshUpdateCache(progress)).rejects.toThrow() + ) + .then(() => + expect(progress).toHaveBeenLastCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) + it('will not run two checks at once', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + const first = provider.refreshUpdateCache(progress) + const second = provider.refreshUpdateCache(progress) + return Promise.all([ + expect(first).resolves.toEqual(expectedUpdate), + expect(second).rejects.toThrow(), + ]).then(() => expect(getLatestMassStorageUpdateFile).toHaveBeenCalledOnce()) + }) + it('will run a second check after the first ends', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => + expect(provider.refreshUpdateCache(progress)).resolves.toEqual( + expectedUpdate + ) + ) + }) +}) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts new file mode 100644 index 00000000000..ff51e89abf3 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts @@ -0,0 +1,59 @@ +import { describe, it, expect, vi, afterEach } from 'vitest' +import { when } from 'vitest-when' + +import { getVersionFromZipIfValid as _getVersionFromZipIfValid } from '../scan-zip' +import { getLatestMassStorageUpdateFile } from '../scan-device' +vi.mock('../../../log') +vi.mock('../scan-zip') +const getVersionFromZipIfValid = vi.mocked(_getVersionFromZipIfValid) + +describe('system-update/from-usb/scan-device', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('returns the single file passed in', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenResolve({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + return expect( + getLatestMassStorageUpdateFile(['/some/random/zip/file.zip']) + ).resolves.toEqual({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + }) + it('returns null if no files are passed in', () => + expect(getLatestMassStorageUpdateFile([])).resolves.toBeNull()) + it('returns null if no suitable zips are found', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenReject(new Error('no version found')) + return expect( + getLatestMassStorageUpdateFile(['/some/random/zip/file.zip']) + ).resolves.toBeNull() + }) + it('checks only the zip file', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenResolve({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + return expect( + getLatestMassStorageUpdateFile([ + '/some/random/zip/file.zip', + '/some/other/random/file', + ]) + ) + .resolves.toEqual({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + .then(() => expect(getVersionFromZipIfValid).toHaveBeenCalledOnce()) + }) + it('returns the highest version', () => { + when(getVersionFromZipIfValid) + .calledWith('higher-version.zip') + .thenResolve({ path: 'higher-version.zip', version: '1.0.0' }) + when(getVersionFromZipIfValid) + .calledWith('lower-version.zip') + .thenResolve({ path: 'higher-version.zip', version: '1.0.0-alpha.0' }) + return expect( + getLatestMassStorageUpdateFile([ + 'higher-version.zip', + 'lower-version.zip', + ]) + ).resolves.toEqual({ path: 'higher-version.zip', version: '1.0.0' }) + }) +}) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts new file mode 100644 index 00000000000..226267a5a11 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts @@ -0,0 +1,151 @@ +import { it, describe, expect, vi } from 'vitest' +import path from 'path' +import { exec as _exec } from 'child_process' +import { promisify } from 'util' +import { writeFile, mkdir } from 'fs/promises' +import { REASONABLE_VERSION_FILE_SIZE_B } from '../../constants' +import { directoryWithCleanup } from '../../utils' +import { getVersionFromZipIfValid } from '../scan-zip' + +vi.mock('../../../log') +const exec = promisify(_exec) + +const zipCommand = ( + tempDir: string, + zipName?: string, + zipContentSubDirectory?: string +): string => + `zip -j ${path.join(tempDir, zipName ?? 'test.zip')} ${path.join( + tempDir, + zipContentSubDirectory ?? 'test', + '*' + )}` + +describe('system-update/from-usb/scan-zip', () => { + it('should read version data from a valid zip file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-3 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).resolves.toEqual({ + path: path.join(directory, 'test.zip'), + version: '1.2.3', + }) + ) + )) + + it('should throw if there is no version file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => writeFile(path.join(directory, 'test', 'dummy'), 'lalala')) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is too big', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + `{data: "${'a'.repeat(REASONABLE_VERSION_FILE_SIZE_B + 1)}"}` + ) + ) + .then(() => + exec( + `head -c ${ + REASONABLE_VERSION_FILE_SIZE_B + 1 + } /dev/zero > ${path.join(directory, 'test', 'VERSION.json')} ` + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is not valid json', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile(path.join(directory, 'test', 'VERSION.json'), 'asdaasdas') + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is for OT-2', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-2 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if not given a zip file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => writeFile(path.join(directory, 'test.zip'), 'aosidasdasd')) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if given a zip file with internal directories', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-3 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => + exec( + `zip ${path.join(directory, 'test.zip')} ${path.join( + directory, + 'test', + '*' + )}` + ) + ) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) +}) diff --git a/app-shell-odd/src/system-update/from-usb/index.ts b/app-shell-odd/src/system-update/from-usb/index.ts new file mode 100644 index 00000000000..9ae1d7e4751 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/index.ts @@ -0,0 +1,2 @@ +export { getProvider } from './provider' +export type { USBUpdateSource } from './provider' diff --git a/app-shell-odd/src/system-update/from-usb/provider.ts b/app-shell-odd/src/system-update/from-usb/provider.ts new file mode 100644 index 00000000000..53913fab790 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/provider.ts @@ -0,0 +1,111 @@ +import tempy from 'tempy' +import path from 'path' +import { rm, writeFile } from 'fs/promises' +import type { UpdateProvider, ResolvedUpdate, ProgressCallback } from '../types' +import { getLatestMassStorageUpdateFile } from './scan-device' +import { createLogger } from '../../log' + +export interface USBUpdateSource { + currentVersion: string + massStorageDeviceRoot: string + massStorageDeviceFiles: string[] +} + +const fakeReleaseNotesForMassStorage = (version: string): string => ` +# Opentrons Robot Software Version ${version} + +This update is from a USB mass storage device connected to your Flex, and release notes cannot be shown. + +Don't remove the USB mass storage device while the update is in progress. +` +const log = createLogger('system-updates/from-usb') + +export function getProvider( + from: USBUpdateSource +): UpdateProvider { + const noUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + let currentUpdate: ResolvedUpdate = noUpdate + let canceller = new AbortController() + let currentCheck: Promise | null = null + const tempdir = tempy.directory() + let tornDown = false + + const checkUpdates = async ( + progress: ProgressCallback + ): Promise => { + const myCanceller = canceller + if (myCanceller.signal.aborted || tornDown) { + progress(noUpdate) + throw new Error('cache torn down') + } + const updateFile = await getLatestMassStorageUpdateFile( + from.massStorageDeviceFiles + ).catch(() => null) + if (myCanceller.signal.aborted) { + progress(noUpdate) + throw new Error('cache torn down') + } + if (updateFile == null) { + log.info(`No update file in presented files`) + progress(noUpdate) + currentUpdate = noUpdate + return noUpdate + } + log.info(`Update file found for version ${updateFile.version}`) + if (updateFile.version === from.currentVersion) { + progress(noUpdate) + currentUpdate = noUpdate + return noUpdate + } + await writeFile( + path.join(tempdir, 'dummy-release-notes.md'), + fakeReleaseNotesForMassStorage(updateFile.version) + ) + if (myCanceller.signal.aborted) { + progress(noUpdate) + throw new Error('cache torn down') + } + const update = { + version: updateFile.version, + files: { + system: updateFile.path, + releaseNotes: path.join(tempdir, 'dummy-release-notes.md'), + }, + releaseNotes: fakeReleaseNotesForMassStorage(updateFile.version), + downloadProgress: 100, + } as const + currentUpdate = update + progress(update) + return update + } + return { + refreshUpdateCache: progressCallback => { + if (currentCheck != null) { + return new Promise((resolve, reject) => { + reject(new Error('Check already ongoing')) + }) + } + const updatePromise = checkUpdates(progressCallback) + currentCheck = updatePromise + return updatePromise.finally(() => { + currentCheck = null + }) + }, + getUpdateDetails: () => currentUpdate, + lockUpdateCache: () => {}, + unlockUpdateCache: () => {}, + teardown: () => { + canceller.abort() + tornDown = true + canceller = new AbortController() + return rm(tempdir, { recursive: true, force: true }) + }, + name: () => `USBUpdateProvider from ${from.massStorageDeviceRoot}`, + source: () => from, + } +} diff --git a/app-shell-odd/src/system-update/from-usb/scan-device.ts b/app-shell-odd/src/system-update/from-usb/scan-device.ts new file mode 100644 index 00000000000..0c0e7f3e40c --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/scan-device.ts @@ -0,0 +1,37 @@ +import Semver from 'semver' +import { getVersionFromZipIfValid } from './scan-zip' +import type { FileDetails } from './scan-zip' + +import { createLogger } from '../../log' +const log = createLogger('system-udpate/from-usb/scan-device') + +const higherVersion = (a: FileDetails | null, b: FileDetails): FileDetails => + a == null ? b : Semver.gt(a.version, b.version) ? a : b + +const mostRecentUpdateOf = (candidates: FileDetails[]): FileDetails | null => + candidates.reduce( + (prev, current) => higherVersion(prev, current), + null + ) + +const getMassStorageUpdateFiles = ( + filePaths: string[] +): Promise => + Promise.all( + filePaths.map(path => + path.endsWith('.zip') + ? getVersionFromZipIfValid(path).catch(() => null) + : new Promise(resolve => { + resolve(null) + }) + ) + ).then(values => { + const filtered = values.filter(entry => entry != null) as FileDetails[] + log.debug(`scan device found ${filtered}`) + return filtered + }) + +export const getLatestMassStorageUpdateFile = ( + filePaths: string[] +): Promise => + getMassStorageUpdateFiles(filePaths).then(mostRecentUpdateOf) diff --git a/app-shell-odd/src/system-update/from-usb/scan-zip.ts b/app-shell-odd/src/system-update/from-usb/scan-zip.ts new file mode 100644 index 00000000000..b6bce376096 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/scan-zip.ts @@ -0,0 +1,88 @@ +import StreamZip from 'node-stream-zip' +import Semver from 'semver' +import { createLogger } from '../../log' +import { REASONABLE_VERSION_FILE_SIZE_B, VERSION_FILENAME } from '../constants' + +const log = createLogger('system-update/from-usb/scan-zip') + +export interface FileDetails { + path: string + version: string +} + +export const getVersionFromZipIfValid = (path: string): Promise => + new Promise((resolve, reject) => { + const zip = new StreamZip({ file: path, storeEntries: true }) + zip.on('ready', () => { + log.info(`Reading zip from ${path}`) + getVersionFromOpenedZipIfValid(zip) + .then(version => { + log.info(`Zip at ${path} has version ${version}`) + zip.close() + resolve({ version, path }) + }) + .catch(err => { + log.info( + `Zip at ${path} was read but could not be parsed: ${err.name}: ${err.message}` + ) + zip.close() + reject(err) + }) + }) + zip.on('error', err => { + log.info(`Zip at ${path} could not be read: ${err.name}: ${err.message}`) + zip.close() + reject(err) + }) + }) + +export const getVersionFromOpenedZipIfValid = ( + zip: StreamZip +): Promise => + new Promise((resolve, reject) => { + const found = Object.values(zip.entries()).reduce((prev, entry) => { + log.debug( + `Checking if ${entry.name} is ${VERSION_FILENAME}, is a file (${entry.isFile}), and ${entry.size}<${REASONABLE_VERSION_FILE_SIZE_B}` + ) + if ( + entry.isFile && + entry.name === VERSION_FILENAME && + entry.size < REASONABLE_VERSION_FILE_SIZE_B + ) { + log.debug(`${entry.name} is a version file candidate`) + const contents = zip.entryDataSync(entry.name).toString('ascii') + log.debug(`version contents: ${contents}`) + try { + const parsedContents = JSON.parse(contents) + if (parsedContents?.robot_type !== 'OT-3 Standard') { + reject(new Error('not a Flex release file')) + } + const fileVersion = parsedContents?.opentrons_api_version + const version = Semver.valid(fileVersion as string) + if (version === null) { + reject(new Error(`${fileVersion} is not a valid version`)) + return prev + } else { + log.info(`Found version file version ${version}`) + resolve(version) + return true + } + } catch (err: any) { + if (err instanceof Error) { + log.error( + `Failed to read ${entry.name}: ${err.name}: ${err.message}` + ) + } else { + log.error(`Failed to ready ${entry.name}: ${err}`) + } + reject(err) + return prev + } + } else { + return prev + } + }, false) + if (!found) { + reject(new Error('No version file found in zip')) + } + }) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts new file mode 100644 index 00000000000..b07d6947861 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts @@ -0,0 +1,40 @@ +import { describe, it, expect } from 'vitest' +import { latestVersionForChannel } from '../latest-update' + +describe('latest-update', () => { + it.each([ + ['8.0.0', '7.0.0', '8.0.0', ''], + ['7.0.0', '8.0.0', '8.0.0', ''], + ['8.10.0', '8.9.0', '8.10.0', ''], + ['8.9.0', '8.10.0', '8.10.0', ''], + ['8.0.0-alpha.0', '8.0.0-alpha.1', '8.0.0-alpha.1', 'alpha'], + ['8.0.0-alpha.1', '8.0.0-alpha.0', '8.0.0-alpha.1', 'alpha'], + ['8.1.0-alpha.0', '8.0.0-alpha.1', '8.1.0-alpha.0', 'alpha'], + ['8.0.0-alpha.1', '8.1.0-alpha.0', '8.1.0-alpha.0', 'alpha'], + ])( + 'choosing between %s and %s should result in %s', + (first, second, higher, channel) => { + expect(latestVersionForChannel([first, second], channel)).toEqual(higher) + } + ) + it('ignores updates from different channels', () => { + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'production' + ) + ).toEqual('8.0.0') + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'alpha' + ) + ).toEqual('9.0.0-alpha.0') + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'beta' + ) + ).toEqual('10.0.0-beta.1') + }) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts new file mode 100644 index 00000000000..3ffe2e4ec08 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts @@ -0,0 +1,774 @@ +import { vi, describe, it, expect, afterEach } from 'vitest' +import { when } from 'vitest-when' + +import { LocalAbortError } from '../../../http' +import { getProvider } from '../provider' +import { getOrDownloadManifest as _getOrDownloadManifest } from '../release-manifest' +import { cleanUpAndGetOrDownloadReleaseFiles as _cleanUpAndGetOrDownloadReleaseFiles } from '../release-files' + +vi.mock('../../../log') +vi.mock('../release-manifest', async importOriginal => { + // eslint-disable-next-line @typescript-eslint/consistent-type-imports + const original = await importOriginal() + return { + ...original, + getOrDownloadManifest: vi.fn(), + } +}) +vi.mock('../release-files') + +const getOrDownloadManifest = vi.mocked(_getOrDownloadManifest) +const cleanUpAndGetOrDownloadReleaseFiles = vi.mocked( + _cleanUpAndGetOrDownloadReleaseFiles +) + +describe('provider.refreshUpdateCache happy paths', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('says there is no update if the latest version is the current version', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + .then(() => { + expect(progressCallback).toHaveBeenCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(cleanUpAndGetOrDownloadReleaseFiles).not.toHaveBeenCalled() + }) + }) + it('says there is an update if a cached update is needed', () => { + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'oh look some release notes cool', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes cool', + downloadProgress: 100, + }) + .then(() => + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes cool', + downloadProgress: 100, + }) + ) + }) + it('says there is an update and forwards progress if an update download is needed', () => { + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'oh look some release notes sweet', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenDo( + ( + _releaseUrls, + _cacheDir, + _version, + progressCallback, + _abortController + ) => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 0 }) + resolve() + }) + .then( + () => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 50 }) + resolve() + }) + ) + .then( + () => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 100 }) + resolve(releaseData) + }) + ) + ) + + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + .then(() => { + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 50, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 100, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + }) + }) +}) + +describe('provider.refreshUpdateCache locking', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('will not start a refresh when locked', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + provider.lockUpdateCache() + return expect(provider.refreshUpdateCache(vi.fn())).rejects.toThrow() + }) + it('will start a refresh when locked then unlocked', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + provider.lockUpdateCache() + provider.unlockUpdateCache() + return expect(provider.refreshUpdateCache(vi.fn())).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + }) + it('will abort when locked in the manifest phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { ...releaseFiles, releaseNotesContent: 'oh hello' } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenDo( + (_manifestUrl, _cacheDirectory, abortController) => + new Promise((resolve, reject) => { + abortController.signal.addEventListener( + 'abort', + () => { + reject(new LocalAbortError(abortController.signal.reason)) + }, + { once: true } + ) + provider.lockUpdateCache() + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + ) + }) + it('will abort when locked between manifest and download phases and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { ...releaseFiles, releaseNotesContent: 'hi' } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + expect.any(String), + expect.any(String), + expect.any(AbortController) + ) + .thenDo( + () => + new Promise(resolve => { + provider.lockUpdateCache() + resolve({ production: { '1.2.3': releaseUrls } }) + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + ) + }) + it('will abort when locked in the file download phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'content', + } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + expect.any(Object), + expect.any(String), + expect.any(String), + expect.any(Function), + expect.any(AbortController) + ) + .thenDo( + ( + _releaseUrls, + _cacheDirectory, + _version, + _progress, + abortController + ) => + new Promise((resolve, reject) => { + abortController.signal.addEventListener( + 'abort', + () => { + reject(new LocalAbortError(abortController.signal.reason)) + }, + { once: true } + ) + provider.lockUpdateCache() + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + ) + }) + .then(() => { + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + }) + }) + it('will abort when locked in the last-chance phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'there is some', + } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + expect.any(Object), + expect.any(String), + expect.any(String), + expect.any(Function), + expect.any(AbortController) + ) + .thenDo( + ( + _releaseUrls, + _cacheDirectory, + _version, + _progress, + _abortController + ) => + new Promise(resolve => { + provider.lockUpdateCache() + resolve(releaseData) + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + ) + }) + it('will not run two checks at once', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + const first = provider.refreshUpdateCache(progressCallback) + const second = provider.refreshUpdateCache(progressCallback) + return Promise.all([ + expect(first).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }), + expect(second).rejects.toThrow(), + ]).then(() => expect(getOrDownloadManifest).toHaveBeenCalledOnce()) + }) + it('will run a second check after the first completes', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + .then(() => + expect(provider.refreshUpdateCache(progressCallback)).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts new file mode 100644 index 00000000000..34df59eaf49 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts @@ -0,0 +1,514 @@ +// TODO(mc, 2020-06-11): test all release-files functions +import { vi, describe, it, expect, afterEach } from 'vitest' +import { when } from 'vitest-when' +import path from 'path' +import { promises as fs } from 'fs' + +import { fetchToFile as httpFetchToFile } from '../../../http' +import { + ensureCleanReleaseCacheForVersion, + getReleaseFiles, + downloadReleaseFiles, + getOrDownloadReleaseFiles, +} from '../release-files' + +import { directoryWithCleanup } from '../../utils' +import type { ReleaseSetUrls } from '../../types' + +vi.mock('../../../http') +vi.mock('../../../log') + +const fetchToFile = vi.mocked(httpFetchToFile) + +describe('ensureCleanReleaseCacheForVersion', () => { + it('should create the appropriate directory tree if it does not exist', () => + directoryWithCleanup(directory => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory', 'someotherrandomdirectory'), + '1.2.3' + ) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join( + directory, + 'somerandomdirectory', + 'someotherrandomdirectory', + 'cached-release-1.2.3' + ) + ) + return fs.stat(cacheDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should create the appropriate directory if the base directory entry is occupied by a file', () => + directoryWithCleanup(directory => + fs + .writeFile( + path.join(directory, 'somerandomdirectory'), + 'somerandomdata' + ) + .then(() => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory'), + '1.2.3' + ) + ) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3') + ) + return fs.stat(cacheDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should create the appropriate directory if the version directory entry is occupied by a file', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'somerandomdirectory')) + .then(() => + fs.writeFile( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3'), + 'somerandomdata' + ) + ) + .then(() => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory'), + '1.2.3' + ) + ) + .then(baseDirectory => { + expect(baseDirectory).toEqual( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3') + ) + return fs.stat(baseDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should remove caches for other versions from the cache directory', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'cached-release-0.1.2')) + .then(() => fs.mkdir(path.join(directory, 'cached-release-4.5.6'))) + .then(() => + fs.writeFile( + path.join(directory, 'cached-release-4.5.6', 'test.zip'), + 'asfjohasda' + ) + ) + .then(() => ensureCleanReleaseCacheForVersion(directory, '1.2.3')) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join(directory, 'cached-release-1.2.3') + ) + return fs.readdir(directory) + }) + .then(contents => expect(contents).toEqual(['cached-release-1.2.3'])) + )) + it('should leave already-existing correct version cache directories untouched', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'cached-release-1.2.3')) + .then(() => + fs.writeFile( + path.join(directory, 'cached-release-1.2.3', 'system.zip'), + '123123' + ) + ) + .then(() => ensureCleanReleaseCacheForVersion(directory, '1.2.3')) + .then(cacheDirectory => fs.readdir(cacheDirectory)) + .then(contents => { + expect(contents).toEqual(['system.zip']) + return fs.readFile( + path.join(directory, 'cached-release-1.2.3', 'system.zip'), + { encoding: 'utf-8' } + ) + }) + .then(contents => expect(contents).toEqual('123123')) + )) +}) + +describe('getReleaseFiles', () => { + it('should fail if no release files are cached', () => + directoryWithCleanup(directory => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).rejects.toThrow() + )) + it('should fail if system is not present but all others are', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'fullImage.zip'), 'aslkdjasd') + .then(() => fs.writeFile(path.join(directory, 'VERSION.json'), 'asdas')) + .then(() => + fs.writeFile(path.join(directory, 'releaseNotes.md'), 'asdalsda') + ) + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).rejects.toThrow() + ) + )) + it('should return available files if system.zip is one of them', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + ) + )) + it('should find release notes if available', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + fs.writeFile(path.join(directory, 'releaseNotes.md'), 'asdasda') + ) + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: path.join(directory, 'releaseNotes.md'), + releaseNotesContent: 'asdasda', + }) + ) + )) +}) + +describe('downloadReleaseFiles', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('should try and fetch both system zip and release notes', () => + directoryWithCleanup(directory => { + let tempSystemPath = '' + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + tempSystemPath = dest + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest) => { + return fs + .writeFile(dest, 'this is the contents of the release notes') + .then(() => dest) + }) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: path.join(directory, 'releaseNotes.md'), + releaseNotesContent: 'this is the contents of the release notes', + }) + return Promise.all([ + fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ), + fs + .readFile(files.releaseNotes as string, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual( + 'this is the contents of the release notes' + ) + ), + expect(fs.stat(path.dirname(tempSystemPath))).rejects.toThrow(), + ]) + }) + })) + it('should fetch only system zip if only system is available', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + return fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + }) + })) + it('should tolerate failing to fetch release notes', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no!')) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + return fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + }) + })) + it('should fail if it cannot fetch system zip', () => + directoryWithCleanup(directory => { + let tempSystemPath = '' + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no')) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest) => { + tempSystemPath = dest + return fs + .writeFile(dest, 'this is the contents of the release notes') + .then(() => dest) + }) + const progress = vi.fn() + return expect( + downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ) + ) + .rejects.toThrow() + .then(() => + expect(fs.stat(path.dirname(tempSystemPath))).rejects.toThrow() + ) + })) + it('should allow the http requests to be aborted', () => + directoryWithCleanup(directory => { + const aborter = new AbortController() + const progressCallback = vi.fn() + when(fetchToFile) + .calledWith('http://opentrons.com/ot3-system.zip', expect.any(String), { + onProgress: progressCallback, + signal: aborter.signal, + }) + .thenDo( + (_url, dest, options) => + new Promise((resolve, reject) => { + const listener = () => { + reject(options.signal.reason) + } + options.signal.addEventListener('abort', listener, { once: true }) + aborter.abort('oh no!') + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + ) + return expect( + downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + progressCallback, + aborter + ) + ).rejects.toThrow() + })) +}) + +describe('getOrDownloadReleaseFiles', () => { + it('should not download release files if they are cached', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ) + .resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + .then(() => expect(fetchToFile).not.toHaveBeenCalled()) + ) + )) + it('should download release files if they are not cached', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + + return expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ) + .resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + .then(() => + fs + .readFile(path.join(directory, 'ot3-system.zip'), { + encoding: 'utf-8', + }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + ) + })) + it('should fail if the file is not cached and can not be downloaded', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no')) + + return expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ).rejects.toThrow() + })) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts new file mode 100644 index 00000000000..8062cd6b28b --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts @@ -0,0 +1,185 @@ +import { describe, it, vi, expect } from 'vitest' +import { when } from 'vitest-when' +import path from 'path' +import { readdir, writeFile, mkdir, readFile } from 'fs/promises' +import { fetchJson as _fetchJson } from '../../../http' +import { ensureCacheDir, getOrDownloadManifest } from '../release-manifest' +import { directoryWithCleanup } from '../../utils' + +vi.mock('../../../http') +// note: this doesn't look like it's needed but it is because http uses log +vi.mock('../../../log') +const fetchJson = vi.mocked(_fetchJson) + +const MOCK_MANIFEST = { + production: { + '1.2.3': { + fullImage: 'https://opentrons.com/no', + system: 'https://opentrons.com/no2', + version: 'https://opentrons.com/no3', + releaseNotes: 'https://opentrons.com/no4', + }, + }, +} + +describe('ensureCacheDirectory', () => { + it('should create the cache directory if it or its parents do not exist', () => + directoryWithCleanup(directory => + ensureCacheDir( + path.join(directory as string, 'somerandomname', 'someotherrandomname') + ) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname', 'someotherrandomname') + ) + return readdir(path.join(directory, 'somerandomname'), { + withFileTypes: true, + }) + }) + .then(contents => { + expect(contents).toHaveLength(1) + expect(contents[0].isDirectory()).toBeTruthy() + expect(contents[0].name).toEqual('someotherrandomname') + return readdir(path.join(contents[0].path, contents[0].name)) + }) + .then(contents => { + expect(contents).toHaveLength(0) + }) + )) + it('should delete and recreate the cache directory if it is a file', () => + directoryWithCleanup(directory => + writeFile(path.join(directory, 'somerandomname'), 'alsdasda') + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname') + ) + return readdir(directory, { withFileTypes: true }) + }) + .then(contents => { + expect(contents).toHaveLength(1) + expect(contents[0].isDirectory()).toBeTruthy() + expect(contents[0].name).toEqual('somerandomname') + return readdir(path.join(contents[0].path, contents[0].name)) + }) + .then(contents => { + expect(contents).toHaveLength(0) + }) + )) + + it('should remove a non-file with the same name as the manifest file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'somerandomname', 'manifest.json'), { + recursive: true, + }) + .then(() => + writeFile( + path.join(directory, 'somerandomname', 'testfile'), + 'testdata' + ) + ) + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => readdir(ensuredDirectory)) + .then(contents => { + expect(contents).not.toContain('manifest.json') + return readFile(path.join(directory, 'somerandomname', 'testfile'), { + encoding: 'utf-8', + }) + }) + .then(contents => expect(contents).toEqual('testdata')) + )) + + it('should preserve extra contents of the directory if the directory exists', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'somerandomname'), { recursive: true }) + .then(() => + writeFile( + path.join(directory, 'somerandomname', 'somerandomfile'), + 'somerandomdata' + ) + ) + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname') + ) + return readFile( + path.join(directory, 'somerandomname', 'somerandomfile'), + { encoding: 'utf-8' } + ) + }) + .then(contents => { + expect(contents).toEqual('somerandomdata') + return readdir(directory) + }) + .then(contents => expect(contents).toEqual(['somerandomname'])) + )) +}) + +describe('getOrDownloadManifest', () => { + const localManifest = { + production: { + '4.5.6': { + fullImage: 'https://opentrons.com/no', + system: 'https://opentrons.com/no2', + version: 'https://opentrons.com/no3', + releaseNotes: 'https://opentrons.com/no4', + }, + }, + } + it('should download a new manifest if possible', () => + directoryWithCleanup(directory => + writeFile( + path.join(directory, 'manifest.json'), + JSON.stringify(localManifest) + ) + .then(() => { + when(fetchJson) + .calledWith( + 'http://opentrons.com/releases.json', + expect.any(Object) + ) + .thenResolve(MOCK_MANIFEST) + return getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + }) + .then(manifest => expect(manifest).toEqual(MOCK_MANIFEST)) + )) + it('should use a cached manifest if the download fails', () => + directoryWithCleanup(directory => + writeFile( + path.join(directory, 'manifest.json'), + JSON.stringify(localManifest) + ) + .then(() => { + when(fetchJson) + .calledWith( + 'http://opentrons.com/releases.json', + expect.any(Object) + ) + .thenReject(new Error('oh no!')) + return getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + }) + .then(manifest => expect(manifest).toEqual(localManifest)) + )) + it('should reject if no manifest is available', () => + directoryWithCleanup(directory => { + when(fetchJson) + .calledWith('http://opentrons.com/releases.json', expect.any(Object)) + .thenReject(new Error('oh no!')) + return expect( + getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + ).rejects.toThrow() + })) +}) diff --git a/app-shell-odd/src/system-update/from-web/index.ts b/app-shell-odd/src/system-update/from-web/index.ts new file mode 100644 index 00000000000..0a9c34e3370 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/index.ts @@ -0,0 +1,2 @@ +export { getProvider } from './provider' +export type { WebUpdateSource } from './provider' diff --git a/app-shell-odd/src/system-update/from-web/latest-update.ts b/app-shell-odd/src/system-update/from-web/latest-update.ts new file mode 100644 index 00000000000..1a270c85ddd --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/latest-update.ts @@ -0,0 +1,28 @@ +import semver from 'semver' + +const channelFinder = (version: string, channel: string): boolean => { + // return the latest alpha/beta if a user subscribes to alpha/beta updates + if (['alpha', 'beta'].includes(channel)) { + return version.includes(channel) + } else { + // otherwise get the latest stable version + return !version.includes('alpha') && !version.includes('beta') + } +} + +export const latestVersionForChannel = ( + availableVersions: string[], + channel: string +): string | null => + availableVersions + .filter(version => channelFinder(version, channel)) + .sort((a, b) => (semver.gt(a, b) ? 1 : -1)) + .pop() ?? null + +export const shouldUpdate = ( + currentVersion: string, + availableVersion: string | null +): string | null => + availableVersion != null && currentVersion !== availableVersion + ? availableVersion + : null diff --git a/app-shell-odd/src/system-update/from-web/provider.ts b/app-shell-odd/src/system-update/from-web/provider.ts new file mode 100644 index 00000000000..ca5c8da9fc9 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/provider.ts @@ -0,0 +1,209 @@ +import path from 'path' +import { rm } from 'fs/promises' + +import { createLogger } from '../../log' +import { LocalAbortError } from '../../http' + +import type { + UpdateProvider, + ResolvedUpdate, + UnresolvedUpdate, + ProgressCallback, + NoUpdate, +} from '../types' + +import { getOrDownloadManifest, getReleaseSet } from './release-manifest' +import { cleanUpAndGetOrDownloadReleaseFiles } from './release-files' +import { latestVersionForChannel, shouldUpdate } from './latest-update' + +import type { DownloadProgress } from '../../http' + +const log = createLogger('systemUpdate/from-web/provider') + +export interface WebUpdateSource { + manifestUrl: string + channel: string + updateCacheDirectory: string + currentVersion: string +} + +export function getProvider( + from: WebUpdateSource +): UpdateProvider { + let locked = false + let canceller = new AbortController() + const lockCache = (): void => { + locked = true + canceller.abort('cache locked') + canceller = new AbortController() + } + const versionCacheDir = path.join(from.updateCacheDirectory, 'versions') + const noUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + let currentUpdate: UnresolvedUpdate = noUpdate + let currentCheck: Promise | null = null + const updater = async ( + progress: ProgressCallback + ): Promise => { + const myCanceller = canceller + // this needs to be an `as`-assertion on the value because we can only guarantee that + // currentUpdate is resolved by the function of the program: we know that this function, + // which is the only thing that can alter currentUpdate, will always end with a resolved update, + // and we know that this function will not be running twice at the same time. + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + const previousUpdate = { + version: currentUpdate.version, + files: currentUpdate.files == null ? null : { ...currentUpdate.files }, + releaseNotes: currentUpdate.releaseNotes, + downloadProgress: currentUpdate.downloadProgress, + } as ResolvedUpdate + if (locked) { + throw new Error('cache locked') + } + const returnNoUpdate = (): NoUpdate => { + currentUpdate = noUpdate + progress(noUpdate) + return noUpdate + } + const manifest = await getOrDownloadManifest( + from.manifestUrl, + from.updateCacheDirectory, + myCanceller + ).catch((error: Error) => { + if (myCanceller.signal.aborted) { + log.info('aborted cache update because cache was locked') + currentUpdate = previousUpdate + progress(previousUpdate) + throw error + } + log.info( + `Failed to get or download update manifest: ${error.name}: ${error.message}` + ) + return null + }) + if (manifest == null) { + log.info(`no manifest found, returning`) + return returnNoUpdate() + } + const latestVersion = latestVersionForChannel( + Object.keys(manifest.production), + from.channel + ) + + const versionToUpdate = shouldUpdate(from.currentVersion, latestVersion) + if (versionToUpdate == null) { + log.debug(`no update found, returning`) + return returnNoUpdate() + } + const releaseUrls = getReleaseSet(manifest, versionToUpdate) + if (releaseUrls == null) { + log.debug(`no release urls found, returning`) + return returnNoUpdate() + } + log.info(`Finding version ${latestVersion}`) + const downloadingUpdate = { + version: latestVersion, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + progress(downloadingUpdate) + currentUpdate = downloadingUpdate + + if (myCanceller.signal.aborted) { + log.info('aborted cache update because cache was locked') + currentUpdate = previousUpdate + progress(previousUpdate) + throw new LocalAbortError('cache locked') + } + const localFiles = await cleanUpAndGetOrDownloadReleaseFiles( + releaseUrls, + versionCacheDir, + versionToUpdate, + (downloadProgress: DownloadProgress): void => { + const downloadProgressPercent = + downloadProgress.size == null || downloadProgress.size === 0.0 + ? 0 + : (downloadProgress.downloaded / downloadProgress.size) * 100 + log.debug( + `Downloading update ${versionToUpdate}: ${downloadProgress.downloaded}/${downloadProgress.size}B (${downloadProgressPercent}%)` + ) + const update = { + version: versionToUpdate, + files: null, + releaseNotes: null, + downloadProgress: downloadProgressPercent, + } + currentUpdate = update + progress(update) + }, + myCanceller + ).catch((err: Error) => { + if (myCanceller.signal.aborted) { + currentUpdate = previousUpdate + progress(previousUpdate) + throw err + } else { + log.warn(`Failed to fetch update data: ${err.name}: ${err.message}`) + } + return null + }) + + if (localFiles == null) { + log.info( + `Download of ${versionToUpdate} failed, no release data is available` + ) + return returnNoUpdate() + } + if (myCanceller.signal.aborted) { + currentUpdate = previousUpdate + progress(previousUpdate) + throw new LocalAbortError('cache locked') + } + + const updateDetails = { + version: versionToUpdate, + files: { + system: localFiles.system, + releaseNotes: localFiles.releaseNotes, + }, + releaseNotes: localFiles.releaseNotesContent, + downloadProgress: 100, + } as const + currentUpdate = updateDetails + progress(updateDetails) + return updateDetails + } + return { + getUpdateDetails: () => currentUpdate, + refreshUpdateCache: (progress: ProgressCallback) => { + if (currentCheck != null) { + return new Promise((resolve, reject) => { + reject(new Error('Check already ongoing')) + }) + } else { + const updaterPromise = updater(progress) + currentCheck = updaterPromise + return updaterPromise.finally(() => { + currentCheck = null + }) + } + }, + + teardown: () => { + lockCache() + return rm(from.updateCacheDirectory, { recursive: true, force: true }) + }, + lockUpdateCache: lockCache, + unlockUpdateCache: () => { + locked = false + }, + name: () => + `WebUpdateProvider from ${from.manifestUrl} channel ${from.channel}`, + source: () => from, + } +} diff --git a/app-shell-odd/src/system-update/from-web/release-files.ts b/app-shell-odd/src/system-update/from-web/release-files.ts new file mode 100644 index 00000000000..a3c45cf5d42 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/release-files.ts @@ -0,0 +1,243 @@ +// functions for downloading and storing release files + +import path from 'path' +import tempy from 'tempy' +import { move, readdir, rm, mkdirp, readFile } from 'fs-extra' +import { fetchToFile } from '../../http' +import { createLogger } from '../../log' + +import type { DownloadProgress } from '../../http' +import type { ReleaseSetUrls, ReleaseSetFilepaths } from '../types' +import type { Dirent } from 'fs' + +const log = createLogger('systemUpdate/from-web/release-files') +const outPath = (dir: string, url: string): string => { + return path.join(dir, path.basename(url)) +} + +const RELEASE_DIRECTORY_PREFIX = 'cached-release-' + +export const directoryNameForRelease = (version: string): string => + `${RELEASE_DIRECTORY_PREFIX}${version}` + +export const directoryForRelease = ( + baseDirectory: string, + version: string +): string => path.join(baseDirectory, directoryNameForRelease(version)) + +async function ensureReleaseCache(baseDirectory: string): Promise { + try { + return await readdir(baseDirectory, { withFileTypes: true }) + } catch (error: any) { + console.log( + `Could not read download cache base directory: ${error.name}: ${error.message}: remaking` + ) + await rm(baseDirectory, { force: true, recursive: true }) + await mkdirp(baseDirectory) + return [] + } +} + +export const ensureCleanReleaseCacheForVersion = ( + baseDirectory: string, + version: string +): Promise => + ensureReleaseCache(baseDirectory) + .then(contents => + Promise.all( + contents.map(contained => + !contained.isDirectory() || + contained.name !== directoryNameForRelease(version) + ? rm(path.join(baseDirectory, contained.name), { + force: true, + recursive: true, + }) + : new Promise(resolve => { + resolve() + }) + ) + ) + ) + .then(() => mkdirp(directoryForRelease(baseDirectory, version))) + .then(() => directoryForRelease(baseDirectory, version)) + +export interface ReleaseSetData extends ReleaseSetFilepaths { + releaseNotesContent: string | null +} + +export const augmentWithReleaseNotesContent = ( + releaseFiles: ReleaseSetFilepaths +): Promise => + releaseFiles.releaseNotes == null + ? new Promise(resolve => { + resolve({ ...releaseFiles, releaseNotesContent: null }) + }) + : readReleaseNotes(releaseFiles.releaseNotes) + .then(releaseNotesContent => ({ ...releaseFiles, releaseNotesContent })) + .catch(err => { + log.error( + `Release notes should be present but cannot be read: ${err.name}: ${err.message}` + ) + return { ...releaseFiles, releaseNotesContent: null } + }) + +// checks `directory` for system update files matching the given `urls`, and +// downloads them if they can't be found +export function getReleaseFiles( + urls: ReleaseSetUrls, + directory: string +): Promise { + return readdir(directory).then((files: string[]) => { + log.info(`Files in system update download directory ${directory}: ${files}`) + const expected = { + system: path.basename(urls.system), + releaseNotes: + urls?.releaseNotes == null ? null : path.basename(urls.releaseNotes), + } + const foundFiles = files.reduce>( + ( + releaseSetFilePaths: Partial, + thisFile: string + ): Partial => { + if (thisFile === expected.system) { + return { ...releaseSetFilePaths, system: thisFile } + } + if ( + expected.releaseNotes != null && + thisFile === expected.releaseNotes + ) { + return { ...releaseSetFilePaths, releaseNotes: thisFile } + } + return releaseSetFilePaths + }, + {} + ) + if (foundFiles?.system != null) { + const files = { + system: outPath(directory, foundFiles.system), + releaseNotes: + foundFiles?.releaseNotes != null + ? outPath(directory, foundFiles.releaseNotes) + : null, + } + log.info( + `Found system file ${foundFiles.system} in cache directory ${directory}` + ) + return augmentWithReleaseNotesContent(files) + } + + throw new Error( + `no release files cached: could not find system file ${outPath( + directory, + urls.system + )} in ${files}` + ) + }) +} + +// downloads the entire release set to a temporary directory, and once they're +// all successfully downloaded, renames the directory to `directory` +export function downloadReleaseFiles( + urls: ReleaseSetUrls, + directory: string, + // `onProgress` will be called with download progress as the files are read + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise { + const tempDir: string = tempy.directory() + const tempSystemPath = outPath(tempDir, urls.system) + const tempNotesPath = outPath(tempDir, urls.releaseNotes ?? '') + // downloads are streamed directly to the filesystem to avoid loading them + // all into memory simultaneously + const notesReq = + urls.releaseNotes != null + ? fetchToFile(urls.releaseNotes, tempNotesPath, { + signal: canceller.signal, + }).catch(err => { + log.warn( + `release notes not available from ${urls.releaseNotes}: ${err.name}: ${err.message}` + ) + return null + }) + : Promise.resolve(null) + if (urls.releaseNotes != null) { + log.info(`Downloading ${urls.releaseNotes} to ${tempNotesPath}`) + } else { + log.info('No release notes available, not downloading') + } + log.info(`Downloading ${urls.system} to ${tempSystemPath}`) + const systemReq = fetchToFile(urls.system, tempSystemPath, { + onProgress, + signal: canceller.signal, + }) + return Promise.all([systemReq, notesReq]) + .then(results => { + const [systemTemp, releaseNotesTemp] = results + const systemPath = outPath(directory, systemTemp) + const notesPath = releaseNotesTemp + ? outPath(directory, releaseNotesTemp) + : null + + log.info(`Download complete, ${tempDir}=>${directory}`) + + return move(tempDir, directory, { overwrite: true }).then(() => { + log.info(`Move complete`) + return augmentWithReleaseNotesContent({ + system: systemPath, + releaseNotes: notesPath, + }) + }) + }) + .catch(error => { + log.error( + `Failed to download release files: ${error.name}: ${error.message}` + ) + return rm(tempDir, { force: true, recursive: true }).then(() => { + throw error + }) + }) +} + +export async function getOrDownloadReleaseFiles( + urls: ReleaseSetUrls, + releaseCacheDirectory: string, + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise { + try { + return await getReleaseFiles(urls, releaseCacheDirectory) + } catch (error: any) { + log.info( + `Could not find cached release files for ${releaseCacheDirectory}: ${error.name}: ${error.message}, attempting to download` + ) + return await downloadReleaseFiles( + urls, + releaseCacheDirectory, + onProgress, + canceller + ) + } +} + +export const cleanUpAndGetOrDownloadReleaseFiles = ( + urls: ReleaseSetUrls, + baseDirectory: string, + version: string, + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise => + ensureCleanReleaseCacheForVersion(baseDirectory, version).then(versionCache => + getOrDownloadReleaseFiles(urls, versionCache, onProgress, canceller) + ) + +const readReleaseNotes = (path: string | null): Promise => + path == null + ? new Promise(resolve => { + resolve(null) + }) + : readFile(path, { encoding: 'utf-8' }).catch(err => { + log.warn( + `Could not read release notes from ${path}: ${err.name}: ${err.message}` + ) + return null + }) diff --git a/app-shell-odd/src/system-update/from-web/release-manifest.ts b/app-shell-odd/src/system-update/from-web/release-manifest.ts new file mode 100644 index 00000000000..9433067cb17 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/release-manifest.ts @@ -0,0 +1,101 @@ +import * as FS from 'fs/promises' +import path from 'path' +import { readJson, outputJson } from 'fs-extra' + +import type { Stats } from 'fs' +import { fetchJson, LocalAbortError } from '../../http' +import type { ReleaseManifest, ReleaseSetUrls } from '../types' +import { createLogger } from '../../log' + +const log = createLogger('systemUpdate/from-web/provider') + +export function getReleaseSet( + manifest: ReleaseManifest, + version: string +): ReleaseSetUrls | null { + return manifest.production[version] ?? null +} + +export const getCachedReleaseManifest = ( + cacheDir: string +): Promise => readJson(`${cacheDir}/manifest.json`) + +const removeAndRemake = (directory: string): Promise => + FS.rm(directory, { recursive: true, force: true }) + .then(() => FS.mkdir(directory, { recursive: true })) + .then(() => FS.stat(directory)) + +export const ensureCacheDir = (directory: string): Promise => + FS.stat(directory) + .catch(() => removeAndRemake(directory)) + .then(stats => + stats.isDirectory() + ? new Promise(resolve => { + resolve(stats) + }) + : removeAndRemake(directory) + ) + .then(() => FS.readdir(directory, { withFileTypes: true })) + .then(contents => { + const manifestCandidate = contents.find( + entry => entry.name === 'manifest.json' + ) + if (manifestCandidate == null || manifestCandidate.isFile()) { + return new Promise(resolve => { + resolve(directory) + }) + } + return FS.rm(path.join(directory, 'manifest.json'), { + force: true, + recursive: true, + }).then(() => directory) + }) + +export const downloadManifest = ( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise => { + log.info(`Attempting to fetch release manifest from ${manifestUrl}`) + return fetchJson(manifestUrl, { + signal: cancel.signal, + }).then(manifest => { + log.info('Fetched release manifest OK') + return outputJson(path.join(cacheDir, 'manifest.json'), manifest).then( + () => manifest + ) + }) +} + +export const ensureCacheDirAndDownloadManifest = ( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise => + ensureCacheDir(cacheDir).then(ensuredCacheDir => + downloadManifest(manifestUrl, ensuredCacheDir, cancel) + ) + +export async function getOrDownloadManifest( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise { + try { + return await ensureCacheDirAndDownloadManifest( + manifestUrl, + cacheDir, + cancel + ) + } catch (error: any) { + if (error instanceof LocalAbortError) { + log.info('Aborted during manifest fetch') + throw error + } else { + log.info( + `Could not fetch manifest: ${error.name}: ${error.message}, falling back to cached` + ) + return await getCachedReleaseManifest(cacheDir) + } + } +} diff --git a/app-shell-odd/src/system-update/handler.ts b/app-shell-odd/src/system-update/handler.ts new file mode 100644 index 00000000000..8344578e9fa --- /dev/null +++ b/app-shell-odd/src/system-update/handler.ts @@ -0,0 +1,380 @@ +// system update handler + +import Semver from 'semver' + +import { CONFIG_INITIALIZED, VALUE_UPDATED } from '../constants' +import { createLogger } from '../log' +import { postFile } from '../http' +import { getConfig } from '../config' +import { getSystemUpdateDir } from './directories' +import { SYSTEM_FILENAME, FLEX_MANIFEST_URL } from './constants' +import { getProvider as getWebUpdateProvider } from './from-web' +import { getProvider as getUsbUpdateProvider } from './from-usb' + +import type { Action, Dispatch } from '../types' +import type { UpdateProvider, UnresolvedUpdate, ReadyUpdate } from './types' +import type { USBUpdateSource } from './from-usb' + +export const CURRENT_SYSTEM_VERSION = _PKG_VERSION_ + +const log = createLogger('system-update/handler') + +export interface UpdateDriver { + handleAction: (action: Action) => Promise + reload: () => Promise + shouldReload: () => boolean + teardown: () => Promise +} + +export function createUpdateDriver(dispatch: Dispatch): UpdateDriver { + log.info(`Running robot system updates storing to ${getSystemUpdateDir()}`) + + let webUpdate: UnresolvedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + let webProvider = getWebUpdateProvider({ + manifestUrl: FLEX_MANIFEST_URL, + channel: getConfig('update').channel, + updateCacheDirectory: getSystemUpdateDir(), + currentVersion: CURRENT_SYSTEM_VERSION, + }) + const usbProviders: Record> = {} + let currentBestUsbUpdate: + | (ReadyUpdate & { providerName: string }) + | null = null + + const updateBestUsbUpdate = (): void => { + currentBestUsbUpdate = null + Object.values(usbProviders).forEach(provider => { + const providerUpdate = provider.getUpdateDetails() + if (providerUpdate.files == null) { + // nothing to do, keep null + } else if (currentBestUsbUpdate == null) { + currentBestUsbUpdate = { + ...(providerUpdate as ReadyUpdate), + providerName: provider.name(), + } + } else if ( + Semver.gt(providerUpdate.version, currentBestUsbUpdate.version) + ) { + currentBestUsbUpdate = { + ...(providerUpdate as ReadyUpdate), + providerName: provider.name(), + } + } + }) + } + + const dispatchStaticUpdateData = (): void => { + if (currentBestUsbUpdate != null) { + dispatchUpdateInfo( + { + version: currentBestUsbUpdate.version, + releaseNotes: currentBestUsbUpdate.releaseNotes, + force: true, + }, + dispatch + ) + } else { + dispatchUpdateInfo( + { + version: webUpdate.version, + releaseNotes: webUpdate.releaseNotes, + force: false, + }, + dispatch + ) + } + } + + return { + handleAction: (action: Action): Promise => { + switch (action.type) { + case 'shell:CHECK_UPDATE': + return webProvider + .refreshUpdateCache(updateStatus => { + webUpdate = updateStatus + if (currentBestUsbUpdate == null) { + if ( + updateStatus.version != null && + updateStatus.files == null && + updateStatus.downloadProgress === 0 + ) { + dispatch({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: updateStatus.version, + force: false, + target: 'flex', + }, + }) + } else if ( + updateStatus.version != null && + updateStatus.files == null && + updateStatus.downloadProgress !== 0 + ) { + dispatch({ + // TODO: change this action type to 'systemUpdate:DOWNLOAD_PROGRESS' + type: 'robotUpdate:DOWNLOAD_PROGRESS', + payload: { + progress: updateStatus.downloadProgress, + target: 'flex', + }, + }) + } else if (updateStatus.files != null) { + dispatchStaticUpdateData() + } + } + }) + .catch(err => { + log.warn( + `Error finding updates with ${webProvider.name()}: ${ + err.name + }: ${err.message}` + ) + return { + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + } as const + }) + .then(result => { + webUpdate = result + dispatchStaticUpdateData() + }) + case 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED': + log.info( + `mass storage device enumerated at ${action.payload.rootPath}` + ) + if (usbProviders[action.payload.rootPath] != null) { + return new Promise(resolve => { + resolve() + }) + } + usbProviders[action.payload.rootPath] = getUsbUpdateProvider({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: action.payload.rootPath, + massStorageDeviceFiles: action.payload.filePaths, + }) + return usbProviders[action.payload.rootPath] + .refreshUpdateCache(() => {}) + .then(() => { + updateBestUsbUpdate() + dispatchStaticUpdateData() + }) + .catch(err => { + log.error( + `Failed to get updates from ${action.payload.rootPath}: ${err.name}: ${err.message}` + ) + }) + + case 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED': + log.info(`mass storage removed at ${action.payload.rootPath}`) + const provider = usbProviders[action.payload.rootPath] + if (provider != null) { + return provider + .teardown() + .then(() => { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete usbProviders[action.payload.rootPath] + updateBestUsbUpdate() + }) + .catch(err => { + log.error( + `Failed to tear down provider ${provider.name()}: ${ + err.name + }: ${err.message}` + ) + }) + .then(() => { + dispatchStaticUpdateData() + }) + } + return new Promise(resolve => { + resolve() + }) + case 'robotUpdate:UPLOAD_FILE': { + const { host, path, systemFile } = action.payload + // eslint-disable-next-line @typescript-eslint/no-floating-promises + return postFile( + `http://${host.ip}:${host.port}${path}`, + SYSTEM_FILENAME, + systemFile + ) + .then(() => ({ + type: 'robotUpdate:FILE_UPLOAD_DONE' as const, + payload: host.name, + })) + .catch((error: Error) => { + log.warn('Error uploading update to robot', { + path, + systemFile, + error, + }) + + return { + type: 'robotUpdate:UNEXPECTED_ERROR' as const, + payload: { + message: `Error uploading update to robot: ${error.message}`, + }, + } + }) + .then(dispatch) + } + case 'robotUpdate:READ_SYSTEM_FILE': { + const getDetails = (): { + systemFile: string + version: string + isManualFile: false + } | null => { + if (currentBestUsbUpdate) { + return { + systemFile: currentBestUsbUpdate.files.system, + version: currentBestUsbUpdate.version, + isManualFile: false, + } + } else if (webUpdate.files?.system != null) { + return { + systemFile: webUpdate.files.system, + version: webUpdate.version as string, // version is string if files is not null + isManualFile: false, + } + } else { + return null + } + } + return new Promise(resolve => { + const details = getDetails() + if (details == null) { + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { message: 'System update file not downloaded' }, + }) + resolve() + return + } + + dispatch({ + type: 'robotUpdate:FILE_INFO' as const, + payload: details, + }) + resolve() + }) + } + case 'robotUpdate:READ_USER_FILE': { + return new Promise(resolve => { + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { + message: 'Updates of this kind are not implemented for ODD', + }, + }) + resolve() + }) + } + } + return new Promise(resolve => { + resolve() + }) + }, + reload: () => { + webProvider.lockUpdateCache() + return webProvider + .teardown() + .catch(err => { + log.error( + `Failed to tear down web provider ${webProvider.name()}: ${ + err.name + }: ${err.message}` + ) + }) + .then(() => { + webProvider = getWebUpdateProvider({ + manifestUrl: FLEX_MANIFEST_URL, + channel: getConfig('update').channel, + updateCacheDirectory: getSystemUpdateDir(), + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + .catch(err => { + const message = `System updates failed to handle config change: ${err.name}: ${err.message}` + log.error(message) + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { message: message }, + }) + }) + }, + shouldReload: () => + getConfig('update').channel !== webProvider.source().channel, + teardown: () => { + return Promise.allSettled([ + webProvider.teardown(), + ...Object.values(usbProviders).map(provider => provider.teardown()), + ]) + .catch(errs => { + log.error(`Failed to tear down some providers: ${errs}`) + }) + .then(results => { + log.info('all providers torn down') + }) + }, + } +} + +export interface UpdatableDriver { + getUpdateDriver: () => UpdateDriver | null + handleAction: (action: Action) => Promise +} + +export function manageDriver(dispatch: Dispatch): UpdatableDriver { + let updateDriver: UpdateDriver | null = null + return { + handleAction: action => { + if (action.type === CONFIG_INITIALIZED) { + log.info('Initializing update driver') + return new Promise(resolve => { + updateDriver = createUpdateDriver(dispatch) + resolve() + }) + } else if (updateDriver != null) { + if (action.type === VALUE_UPDATED && updateDriver.shouldReload()) { + return updateDriver.reload() + } else { + return updateDriver.handleAction(action) + } + } else { + return new Promise(resolve => { + log.warn( + `update driver manager received action ${action.type} before initialization` + ) + resolve() + }) + } + }, + getUpdateDriver: () => updateDriver, + } +} + +export function registerRobotSystemUpdate(dispatch: Dispatch): Dispatch { + return manageDriver(dispatch).handleAction +} + +const dispatchUpdateInfo = ( + info: { version: string | null; releaseNotes: string | null; force: boolean }, + dispatch: Dispatch +): void => { + const { version, releaseNotes, force } = info + dispatch({ + type: 'robotUpdate:UPDATE_INFO', + payload: { releaseNotes, version, force, target: 'flex' }, + }) + dispatch({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version, force, target: 'flex' }, + }) +} diff --git a/app-shell-odd/src/system-update/index.ts b/app-shell-odd/src/system-update/index.ts index 7d8e62fb8ac..4ec36b05a57 100644 --- a/app-shell-odd/src/system-update/index.ts +++ b/app-shell-odd/src/system-update/index.ts @@ -1,394 +1,2 @@ // system update files -import path from 'path' -import { ensureDir } from 'fs-extra' -import { readFile } from 'fs/promises' -import StreamZip from 'node-stream-zip' -import Semver from 'semver' -import { UI_INITIALIZED } from '../constants' -import { createLogger } from '../log' -import { - getLatestSystemUpdateUrls, - getLatestVersion, - isUpdateAvailable, - updateLatestVersion, -} from '../update' -import { - getReleaseFiles, - readUserFileInfo, - cleanupReleaseFiles, -} from './release-files' -import { uploadSystemFile } from './update' -import { getSystemUpdateDir } from './directories' - -import type { DownloadProgress } from '../http' -import type { Action, Dispatch } from '../types' -import type { ReleaseSetFilepaths } from './types' - -const log = createLogger('systemUpdate/index') -const REASONABLE_VERSION_FILE_SIZE_B = 4096 - -let isGettingLatestSystemFiles = false -const isGettingMassStorageUpdatesFrom: Set = new Set() -let massStorageUpdateSet: ReleaseSetFilepaths | null = null -let systemUpdateSet: ReleaseSetFilepaths | null = null - -const readFileInfoAndDispatch = ( - dispatch: Dispatch, - fileName: string, - isManualFile: boolean = false -): Promise => - readUserFileInfo(fileName) - .then(fileInfo => ({ - type: 'robotUpdate:FILE_INFO' as const, - payload: { - systemFile: fileInfo.systemFile, - version: fileInfo.versionInfo.opentrons_api_version, - isManualFile, - }, - })) - .catch((error: Error) => ({ - type: 'robotUpdate:UNEXPECTED_ERROR' as const, - payload: { message: error.message }, - })) - .then(dispatch) - -export function registerRobotSystemUpdate(dispatch: Dispatch): Dispatch { - log.info(`Running robot system updates storing to ${getSystemUpdateDir()}`) - return function handleAction(action: Action) { - switch (action.type) { - case UI_INITIALIZED: - case 'shell:CHECK_UPDATE': - // short circuit early if we're already downloading the latest system files - if (isGettingLatestSystemFiles) { - log.info(`system update download already in progress`) - return - } - updateLatestVersion() - .then(() => { - if (isUpdateAvailable() && !isGettingLatestSystemFiles) { - isGettingLatestSystemFiles = true - return getLatestSystemUpdateFiles(dispatch) - } - }) - .then(() => { - isGettingLatestSystemFiles = false - }) - .catch((error: Error) => { - log.warn('Error checking for update', { - error, - }) - isGettingLatestSystemFiles = false - }) - - break - - case 'robotUpdate:UPLOAD_FILE': { - const { host, path, systemFile } = action.payload - // eslint-disable-next-line @typescript-eslint/no-floating-promises - uploadSystemFile(host, path, systemFile) - .then(() => ({ - type: 'robotUpdate:FILE_UPLOAD_DONE' as const, - payload: host.name, - })) - .catch((error: Error) => { - log.warn('Error uploading update to robot', { - path, - systemFile, - error, - }) - - return { - type: 'robotUpdate:UNEXPECTED_ERROR' as const, - payload: { - message: `Error uploading update to robot: ${error.message}`, - }, - } - }) - .then(dispatch) - - break - } - - case 'robotUpdate:READ_USER_FILE': { - const { systemFile } = action.payload as { systemFile: string } - // eslint-disable-next-line @typescript-eslint/no-floating-promises - readFileInfoAndDispatch(dispatch, systemFile, true) - break - } - case 'robotUpdate:READ_SYSTEM_FILE': { - const systemFile = - massStorageUpdateSet?.system ?? systemUpdateSet?.system - if (systemFile == null) { - dispatch({ - type: 'robotUpdate:UNEXPECTED_ERROR', - payload: { message: 'System update file not downloaded' }, - }) - return - } - // eslint-disable-next-line @typescript-eslint/no-floating-promises - readFileInfoAndDispatch(dispatch, systemFile) - break - } - case 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED': - if (isGettingMassStorageUpdatesFrom.has(action.payload.rootPath)) { - return - } - isGettingMassStorageUpdatesFrom.add(action.payload.rootPath) - getLatestMassStorageUpdateFiles(action.payload.filePaths, dispatch) - .then(() => { - isGettingMassStorageUpdatesFrom.delete(action.payload.rootPath) - }) - .catch(() => { - isGettingMassStorageUpdatesFrom.delete(action.payload.rootPath) - }) - break - case 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED': - if ( - massStorageUpdateSet !== null && - massStorageUpdateSet.system.startsWith(action.payload.rootPath) - ) { - console.log( - `Mass storage device ${action.payload.rootPath} removed, reverting to non-usb updates` - ) - massStorageUpdateSet = null - getCachedSystemUpdateFiles(dispatch) - } else { - console.log( - `Mass storage device ${action.payload.rootPath} removed but this was not an update source` - ) - } - break - } - } -} - -const getVersionFromOpenedZipIfValid = (zip: StreamZip): Promise => - new Promise((resolve, reject) => { - Object.values(zip.entries()).forEach(entry => { - if ( - entry.isFile && - entry.name === 'VERSION.json' && - entry.size < REASONABLE_VERSION_FILE_SIZE_B - ) { - const contents = zip.entryDataSync(entry.name).toString('ascii') - try { - const parsedContents = JSON.parse(contents) - if (parsedContents?.robot_type !== 'OT-3 Standard') { - reject(new Error('not a Flex release file')) - } - const fileVersion = parsedContents?.opentrons_api_version - const version = Semver.valid(fileVersion as string) - if (version === null) { - reject(new Error(`${fileVersion} is not a valid version`)) - } else { - resolve(version) - } - } catch (error) { - reject(error) - } - } - }) - }) - -interface FileDetails { - path: string - version: string -} - -const getVersionFromZipIfValid = (path: string): Promise => - new Promise((resolve, reject) => { - const zip = new StreamZip({ file: path, storeEntries: true }) - zip.on('ready', () => { - getVersionFromOpenedZipIfValid(zip) - .then(version => { - zip.close() - resolve({ version, path }) - }) - .catch(err => { - zip.close() - reject(err) - }) - }) - zip.on('error', err => { - zip.close() - reject(err) - }) - }) - -const fakeReleaseNotesForMassStorage = (version: string): string => ` -# Opentrons Robot Software Version ${version} - -This update is from a USB mass storage device connected to your Flex, and release notes cannot be shown. - -Don't remove the USB mass storage device while the update is in progress. -` - -export const getLatestMassStorageUpdateFiles = ( - filePaths: string[], - dispatch: Dispatch -): Promise => - Promise.all( - filePaths.map(path => - path.endsWith('.zip') - ? getVersionFromZipIfValid(path).catch(() => null) - : new Promise(resolve => { - resolve(null) - }) - ) - ).then(values => { - const update = values.reduce( - (prev, current) => - prev === null - ? current === null - ? prev - : current - : current === null - ? prev - : Semver.gt(current.version, prev.version) - ? current - : prev, - null - ) - if (update === null) { - console.log('no updates found in mass storage device') - } else { - console.log(`found update to version ${update.version} on mass storage`) - const releaseNotes = fakeReleaseNotesForMassStorage(update.version) - massStorageUpdateSet = { system: update.path, releaseNotes } - dispatchUpdateInfo( - { version: update.version, releaseNotes, force: true }, - dispatch - ) - } - }) - -const dispatchUpdateInfo = ( - info: { version: string | null; releaseNotes: string | null; force: boolean }, - dispatch: Dispatch -): void => { - const { version, releaseNotes, force } = info - dispatch({ - type: 'robotUpdate:UPDATE_INFO', - payload: { releaseNotes, version, force, target: 'flex' }, - }) - dispatch({ - type: 'robotUpdate:UPDATE_VERSION', - payload: { version, force, target: 'flex' }, - }) -} - -// Get latest system update version -// 1. Ensure the system update directory exists -// 2. Get the manifest file from the local cache -// 3. Get the release files according to the manifest -// a. If the files need downloading, dispatch progress updates to UI -// 4. Cache the filepaths of the update files in memory -// 5. Dispatch info or error to UI -export function getLatestSystemUpdateFiles( - dispatch: Dispatch -): Promise { - const fileDownloadDir = path.join( - getSystemUpdateDir(), - 'robot-system-updates' - ) - - return ensureDir(getSystemUpdateDir()) - .then(() => getLatestSystemUpdateUrls()) - .then(urls => { - if (urls === null) { - const latestVersion = getLatestVersion() - log.warn('No release files in manifest', { - version: latestVersion, - }) - return Promise.reject( - new Error(`No release files in manifest for version ${latestVersion}`) - ) - } - - let prevPercentDone = 0 - - const handleProgress = (progress: DownloadProgress): void => { - const { downloaded, size } = progress - if (size !== null) { - const percentDone = Math.round((downloaded / size) * 100) - if (Math.abs(percentDone - prevPercentDone) > 0) { - if (massStorageUpdateSet === null) { - dispatch({ - // TODO: change this action type to 'systemUpdate:DOWNLOAD_PROGRESS' - type: 'robotUpdate:DOWNLOAD_PROGRESS', - payload: { progress: percentDone, target: 'flex' }, - }) - } - prevPercentDone = percentDone - } - } - } - - return getReleaseFiles(urls, fileDownloadDir, handleProgress) - .then(filepaths => { - return cacheUpdateSet(filepaths) - }) - .then(updateInfo => { - massStorageUpdateSet === null && - dispatchUpdateInfo({ force: false, ...updateInfo }, dispatch) - }) - .catch((error: Error) => { - dispatch({ - type: 'robotUpdate:DOWNLOAD_ERROR', - payload: { error: error.message, target: 'flex' }, - }) - }) - .then(() => - cleanupReleaseFiles(getSystemUpdateDir(), 'robot-system-updates') - ) - .catch((error: Error) => { - log.warn('Unable to cleanup old release files', { error }) - }) - }) -} - -export function getCachedSystemUpdateFiles( - dispatch: Dispatch -): Promise { - if (systemUpdateSet) { - return getInfoFromUpdateSet(systemUpdateSet) - .then(updateInfo => { - dispatchUpdateInfo({ force: false, ...updateInfo }, dispatch) - }) - .catch(err => { - console.log(`Could not get info from update set: ${err}`) - }) - } else { - dispatchUpdateInfo( - { version: null, releaseNotes: null, force: false }, - dispatch - ) - return new Promise(resolve => { - resolve('no files') - }) - } -} - -function getInfoFromUpdateSet( - filepaths: ReleaseSetFilepaths -): Promise<{ version: string; releaseNotes: string | null }> { - const version = getLatestVersion() - const releaseNotesContentPromise = filepaths.releaseNotes - ? readFile(filepaths.releaseNotes, 'utf8') - : new Promise(resolve => { - resolve(null) - }) - return releaseNotesContentPromise - .then(releaseNotes => ({ - version: version, - releaseNotes, - })) - .catch(() => ({ version: version, releaseNotes: '' })) -} - -function cacheUpdateSet( - filepaths: ReleaseSetFilepaths -): Promise<{ version: string; releaseNotes: string | null }> { - systemUpdateSet = filepaths - return getInfoFromUpdateSet(systemUpdateSet) -} +export { registerRobotSystemUpdate } from './handler' diff --git a/app-shell-odd/src/system-update/release-files.ts b/app-shell-odd/src/system-update/release-files.ts deleted file mode 100644 index 6ea57648d05..00000000000 --- a/app-shell-odd/src/system-update/release-files.ts +++ /dev/null @@ -1,148 +0,0 @@ -// functions for downloading and storing release files -import assert from 'assert' -import path from 'path' -import { promisify } from 'util' -import tempy from 'tempy' -import { move, readdir, remove } from 'fs-extra' -import StreamZip from 'node-stream-zip' -import getStream from 'get-stream' - -import { createLogger } from '../log' -import { fetchToFile } from '../http' -import type { DownloadProgress } from '../http' -import type { ReleaseSetUrls, ReleaseSetFilepaths, UserFileInfo } from './types' - -const VERSION_FILENAME = 'VERSION.json' - -const log = createLogger('systemUpdate/release-files') -const outPath = (dir: string, url: string): string => { - return path.join(dir, path.basename(url)) -} - -// checks `directory` for system update files matching the given `urls`, and -// downloads them if they can't be found -export function getReleaseFiles( - urls: ReleaseSetUrls, - directory: string, - onProgress: (progress: DownloadProgress) => unknown -): Promise { - return readdir(directory) - .catch(error => { - log.warn('Error retrieving files from filesystem', { error }) - return [] - }) - .then((files: string[]) => { - log.debug('Files in system update download directory', { files }) - const system = outPath(directory, urls.system) - const releaseNotes = outPath(directory, urls.releaseNotes ?? '') - - // TODO: check for release notes when OT-3 manifest points to real release notes - if (files.some(f => f === path.basename(system))) { - return { system, releaseNotes } - } - - return downloadReleaseFiles(urls, directory, onProgress) - }) -} - -// downloads the entire release set to a temporary directory, and once they're -// all successfully downloaded, renames the directory to `directory` -// TODO(mc, 2019-07-09): DRY this up if/when more than 2 files are required -export function downloadReleaseFiles( - urls: ReleaseSetUrls, - directory: string, - // `onProgress` will be called with download progress as the files are read - onProgress: (progress: DownloadProgress) => unknown -): Promise { - const tempDir: string = tempy.directory() - const tempSystemPath = outPath(tempDir, urls.system) - const tempNotesPath = outPath(tempDir, urls.releaseNotes ?? '') - - log.debug('directory created for robot update downloads', { tempDir }) - - // downloads are streamed directly to the filesystem to avoid loading them - // all into memory simultaneously - const systemReq = fetchToFile(urls.system, tempSystemPath, { onProgress }) - const notesReq = urls.releaseNotes - ? fetchToFile(urls.releaseNotes, tempNotesPath) - : Promise.resolve(null) - - return Promise.all([systemReq, notesReq]).then(results => { - const [systemTemp, releaseNotesTemp] = results - const systemPath = outPath(directory, systemTemp) - const notesPath = releaseNotesTemp - ? outPath(directory, releaseNotesTemp) - : null - - log.debug('renaming directory', { from: tempDir, to: directory }) - - return move(tempDir, directory, { overwrite: true }).then(() => ({ - system: systemPath, - releaseNotes: notesPath, - })) - }) -} - -export function readUserFileInfo(systemFile: string): Promise { - const openZip = new Promise((resolve, reject) => { - const zip = new StreamZip({ file: systemFile, storeEntries: true }) - .once('ready', handleReady) - .once('error', handleError) - - function handleReady(): void { - cleanup() - resolve(zip) - } - - function handleError(error: Error): void { - cleanup() - zip.close() - reject(error) - } - - function cleanup(): void { - zip.removeListener('ready', handleReady) - zip.removeListener('error', handleError) - } - }) - - return openZip.then(zip => { - const entries = zip.entries() - const streamFromZip = promisify(zip.stream.bind(zip)) - - assert(VERSION_FILENAME in entries, `${VERSION_FILENAME} not in archive`) - - const result = streamFromZip(VERSION_FILENAME) - // @ts-expect-error(mc, 2021-02-17): stream may be undefined - .then(getStream) - .then(JSON.parse) - .then(versionInfo => ({ - systemFile, - versionInfo, - })) - - result.finally(() => { - zip.close() - }) - - return result - }) -} - -export function cleanupReleaseFiles( - downloadsDir: string, - currentRelease: string -): Promise { - log.debug('deleting release files not part of release ', currentRelease) - - return readdir(downloadsDir, { withFileTypes: true }) - .then(files => { - return ( - files - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - .filter(f => f.isDirectory() && f.name !== currentRelease) - .map(f => path.join(downloadsDir, f.name)) - ) - }) - .then(removals => Promise.all(removals.map(f => remove(f)))) -} diff --git a/app-shell-odd/src/system-update/release-manifest.ts b/app-shell-odd/src/system-update/release-manifest.ts deleted file mode 100644 index d27c8a04449..00000000000 --- a/app-shell-odd/src/system-update/release-manifest.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { readJson, outputJson } from 'fs-extra' -import { fetchJson } from '../http' -import { createLogger } from '../log' -import { getManifestCacheDir } from './directories' -import type { ReleaseManifest, ReleaseSetUrls } from './types' - -const log = createLogger('systemUpdate/release-manifest') - -export function getReleaseSet( - manifest: ReleaseManifest, - version: string -): ReleaseSetUrls | null { - return manifest.production[version] ?? null -} - -export const getCachedReleaseManifest = (): Promise => - readJson(getManifestCacheDir()) - -export const downloadAndCacheReleaseManifest = ( - manifestUrl: string -): Promise => - fetchJson(manifestUrl) - .then(manifest => { - return outputJson(getManifestCacheDir(), manifest).then(() => manifest) - }) - .catch((error: Error) => { - log.error('Error downloading the release manifest', { error }) - return readJson(getManifestCacheDir()) - }) diff --git a/app-shell-odd/src/system-update/types.ts b/app-shell-odd/src/system-update/types.ts index 8555d980791..12c2f5dc674 100644 --- a/app-shell-odd/src/system-update/types.ts +++ b/app-shell-odd/src/system-update/types.ts @@ -16,24 +16,47 @@ export interface ReleaseSetFilepaths { releaseNotes: string | null } -// shape of VERSION.json in update file -export interface VersionInfo { - buildroot_version: string - buildroot_sha: string - buildroot_branch: string - buildroot_buildid: string - build_type: string - opentrons_api_version: string - opentrons_api_sha: string - opentrons_api_branch: string - update_server_version: string - update_server_sha: string - update_server_branch: string +export interface NoUpdate { + version: null + files: null + releaseNotes: null + downloadProgress: 0 } -export interface UserFileInfo { - // filepath of update file - systemFile: string - // parsed contents of VERSION.json - versionInfo: VersionInfo +export interface FoundUpdate { + version: string + files: null + releaseNotes: null + downloadProgress: number +} + +export interface ReadyUpdate { + version: string + files: ReleaseSetFilepaths + releaseNotes: string | null + downloadProgress: 100 +} + +export type ResolvedUpdate = NoUpdate | ReadyUpdate +export type UnresolvedUpdate = ResolvedUpdate | FoundUpdate +export type ProgressCallback = (status: UnresolvedUpdate) => void + +// Interface provided by the web and usb sourced updaters. Type variable is +// specified by the updater implementation. +export interface UpdateProvider { + // Call before disposing to make sure any temporary storage is removed + teardown: () => Promise + // Scan an implementation-defined location for updates + refreshUpdateCache: (progress: ProgressCallback) => Promise + // Get the details of a found update, if any. + getUpdateDetails: () => UnresolvedUpdate + // Lock the update cache, which will prevent anything from accidentally overwriting stuff + // while it's being sent as an update + lockUpdateCache: () => void + // Reverse lockUpdateCache() + unlockUpdateCache: () => void + // get an identifier for logging + name: () => string + // get the current source + source: () => UpdateSourceDetails } diff --git a/app-shell-odd/src/system-update/update.ts b/app-shell-odd/src/system-update/update.ts deleted file mode 100644 index d1adb6e9c3d..00000000000 --- a/app-shell-odd/src/system-update/update.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { postFile } from '../http' -import type { - RobotModel, - ViewableRobot, -} from '@opentrons/app/src/redux/discovery/types' - -const OT2_FILENAME = 'ot2-system.zip' -const SYSTEM_FILENAME = 'system-update.zip' - -const getSystemFileName = (robotModel: RobotModel): string => { - if (robotModel === 'OT-2 Standard' || robotModel === null) { - return OT2_FILENAME - } - return SYSTEM_FILENAME -} - -export function uploadSystemFile( - robot: ViewableRobot, - urlPath: string, - file: string -): Promise { - const url = `http://${robot.ip}:${robot.port}${urlPath}` - - return postFile(url, getSystemFileName(robot.robotModel), file) -} diff --git a/app-shell-odd/src/system-update/utils.ts b/app-shell-odd/src/system-update/utils.ts new file mode 100644 index 00000000000..e0a334ba5d4 --- /dev/null +++ b/app-shell-odd/src/system-update/utils.ts @@ -0,0 +1,18 @@ +import { rm } from 'fs/promises' +import tempy from 'tempy' + +export const directoryWithCleanup = ( + task: (directory: string) => Promise +): Promise => { + const directory = tempy.directory() + return new Promise((resolve, reject) => + task(directory as string) + .then(result => { + resolve(result) + }) + .catch(err => { + reject(err) + }) + .finally(() => rm(directory as string, { recursive: true, force: true })) + ) +} diff --git a/app-shell-odd/src/system.ts b/app-shell-odd/src/system.ts new file mode 100644 index 00000000000..36c427a7e94 --- /dev/null +++ b/app-shell-odd/src/system.ts @@ -0,0 +1,22 @@ +import { UPDATE_BRIGHTNESS } from './constants' +import { createLogger } from './log' +import systemd from './systemd' + +import type { Action } from './types' + +const log = createLogger('system') + +export function registerUpdateBrightness(): (action: Action) => void { + return function handleAction(action: Action) { + switch (action.type) { + case UPDATE_BRIGHTNESS: + console.log('update the brightness') + systemd + .updateBrightness(action.payload.message) + .catch(err => + log.debug('Something wrong when updating the brightness', err) + ) + break + } + } +} diff --git a/app-shell-odd/src/types.ts b/app-shell-odd/src/types.ts index 2899171a08b..5d8f8a9502a 100644 --- a/app-shell-odd/src/types.ts +++ b/app-shell-odd/src/types.ts @@ -112,11 +112,13 @@ export type CLEAR_CACHE_TYPE = 'discovery:CLEAR_CACHE' export interface ConfigInitializedAction { type: CONFIG_INITIALIZED_TYPE payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: CONFIG_VALUE_UPDATED_TYPE payload: { path: string; value: any } + meta: { shell: true } } export interface StartDiscoveryAction { diff --git a/app-shell-odd/src/update.ts b/app-shell-odd/src/update.ts deleted file mode 100644 index d1ea2f154b3..00000000000 --- a/app-shell-odd/src/update.ts +++ /dev/null @@ -1,113 +0,0 @@ -import semver from 'semver' -import { UI_INITIALIZED, UPDATE_BRIGHTNESS } from './constants' -import { createLogger } from './log' -import { getConfig } from './config' -import { - downloadAndCacheReleaseManifest, - getCachedReleaseManifest, - getReleaseSet, -} from './system-update/release-manifest' -import systemd from './systemd' - -import type { Action, Dispatch } from './types' -import type { ReleaseSetUrls } from './system-update/types' - -const log = createLogger('update') - -const OPENTRONS_PROJECT: string = _OPENTRONS_PROJECT_ - -export const FLEX_MANIFEST_URL = - OPENTRONS_PROJECT && OPENTRONS_PROJECT.includes('robot-stack') - ? 'https://builds.opentrons.com/ot3-oe/releases.json' - : 'https://ot3-development.builds.opentrons.com/ot3-oe/releases.json' - -const PKG_VERSION = _PKG_VERSION_ -let LATEST_OT_SYSTEM_VERSION = PKG_VERSION - -const channelFinder = (version: string, channel: string): boolean => { - // return the latest alpha/beta if a user subscribes to alpha/beta updates - if (['alpha', 'beta'].includes(channel)) { - return version.includes(channel) - } else { - // otherwise get the latest stable version - return !version.includes('alpha') && !version.includes('beta') - } -} - -export const getLatestSystemUpdateUrls = (): Promise => { - return getCachedReleaseManifest() - .then(manifest => getReleaseSet(manifest, getLatestVersion())) - .catch((error: Error) => { - log.warn('Error retrieving release manifest', { - version: getLatestVersion(), - error, - }) - return Promise.reject(error) - }) -} - -export const updateLatestVersion = (): Promise => { - const channel = getConfig('update').channel - - return downloadAndCacheReleaseManifest(FLEX_MANIFEST_URL) - .then(response => { - const latestAvailableVersion = Object.keys(response.production) - .sort((a, b) => { - if (semver.lt(a, b)) { - return 1 - } - return -1 - }) - .find(verson => channelFinder(verson, channel)) - const changed = LATEST_OT_SYSTEM_VERSION !== latestAvailableVersion - LATEST_OT_SYSTEM_VERSION = latestAvailableVersion ?? PKG_VERSION - if (changed) { - log.info( - `Update: latest version available from ${FLEX_MANIFEST_URL} is ${latestAvailableVersion}` - ) - } - return LATEST_OT_SYSTEM_VERSION - }) - .catch((e: Error) => { - log.warn( - `Update: error fetching latest system version from ${FLEX_MANIFEST_URL}: ${e.message}, keeping latest version at ${LATEST_OT_SYSTEM_VERSION}` - ) - return LATEST_OT_SYSTEM_VERSION - }) -} - -export const getLatestVersion = (): string => { - return LATEST_OT_SYSTEM_VERSION -} - -export const getCurrentVersion = (): string => PKG_VERSION - -export const isUpdateAvailable = (): boolean => - getLatestVersion() !== getCurrentVersion() - -export function registerUpdate( - dispatch: Dispatch -): (action: Action) => unknown { - return function handleAction(action: Action) { - switch (action.type) { - case UI_INITIALIZED: - case 'shell:CHECK_UPDATE': - return updateLatestVersion() - } - } -} - -export function registerUpdateBrightness(): (action: Action) => unknown { - return function handleAction(action: Action) { - switch (action.type) { - case UPDATE_BRIGHTNESS: - console.log('update the brightness') - systemd - .updateBrightness(action.payload.message) - .catch(err => - log.debug('Something wrong when updating the brightness', err) - ) - break - } - } -} diff --git a/app-shell-odd/src/usb.ts b/app-shell-odd/src/usb.ts index 44252c6a339..1c5e6bd14a7 100644 --- a/app-shell-odd/src/usb.ts +++ b/app-shell-odd/src/usb.ts @@ -2,6 +2,7 @@ import * as fs from 'fs' import * as fsPromises from 'fs/promises' import { join } from 'path' import { flatten } from 'lodash' +import { createLogger } from './log' import { robotMassStorageDeviceAdded, robotMassStorageDeviceEnumerated, @@ -16,7 +17,12 @@ import type { Dispatch, Action } from './types' const FLEX_USB_MOUNT_DIR = '/media/' const FLEX_USB_DEVICE_DIR = '/dev/' -const FLEX_USB_MOUNT_FILTER = /sd[a-z]+[0-9]+$/ +// filter matches sda0, sdc9, sdb +const FLEX_USB_DEVICE_FILTER = /sd[a-z]+[0-9]*$/ +// filter matches sda0, sdc9, sdb, VOLUME-sdc10 +const FLEX_USB_MOUNT_FILTER = /([^/]+-)?(sd[a-z]+[0-9]*)$/ + +const log = createLogger('mass-storage') // These are for backoff algorithm // apply the delay from 1 sec 64 sec @@ -48,11 +54,15 @@ const isWeirdDirectoryAndShouldSkip = (dirName: string): boolean => .map(keyword => dirName.includes(keyword)) .reduce((prev, current) => prev || current, false) -const enumerateMassStorage = (path: string): Promise => { +const doEnumerateMassStorage = ( + path: string, + depth: number +): Promise => { + log.info(`Enumerating mass storage path ${path}`) return callWithRetry(() => fsPromises.readdir(path).then(entries => { - if (entries.length === 0) { - throw new Error('No entries found, retrying...') + if (entries.length === 0 && depth === 0) { + throw new Error('No entries found for top level, retrying...') } return entries }) @@ -62,29 +72,44 @@ const enumerateMassStorage = (path: string): Promise => { Promise.all( entries.map(entry => entry.isDirectory() && !isWeirdDirectoryAndShouldSkip(entry.name) - ? enumerateMassStorage(join(path, entry.name)) + ? doEnumerateMassStorage(join(path, entry.name), depth + 1) : new Promise(resolve => { resolve([join(path, entry.name)]) }) ) ) ) - .catch(error => { - console.error(`Error enumerating mass storage: ${error}`) + .catch((error: Error) => { + log.error( + `Error enumerating mass storage path ${path}: ${error.name}: ${error.message}` + ) return [] }) .then(flatten) - .then(result => { - return result - }) + .then(result => result) +} + +const enumerateMassStorage = (path: string): Promise => { + log.info(`Beginning scan of mass storage device at ${path}`) + return doEnumerateMassStorage(path, 0).then(results => { + log.info(`Found ${results.length} files in ${path}`) + return results + }) } + export function watchForMassStorage(dispatch: Dispatch): () => void { - console.log('watching for mass storage') + log.info('watching for mass storage') let prevDirs: string[] = [] const handleNewlyPresent = (path: string): Promise => { dispatch(robotMassStorageDeviceAdded(path)) return enumerateMassStorage(path) .then(contents => { + log.debug( + `mass storage device at ${path} enumerated: ${JSON.stringify( + contents + )}` + ) + log.info(`Enumerated ${path} with ${contents.length} results`) dispatch(robotMassStorageDeviceEnumerated(path, contents)) }) .then(() => path) @@ -101,6 +126,9 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { const newlyAbsent = prevDirs.filter( entry => !sortedEntries.includes(entry) ) + log.info( + `rescan: newly present: ${newlyPresent} newly absent: ${newlyAbsent}` + ) return Promise.all([ ...newlyAbsent.map(entry => { if (entry.match(FLEX_USB_MOUNT_FILTER)) { @@ -119,6 +147,7 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { ]) }) .then(present => { + log.info(`now present: ${present}`) prevDirs = present.filter((entry): entry is string => entry !== null) }) @@ -133,6 +162,9 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { return } if (!fileName.match(FLEX_USB_MOUNT_FILTER)) { + log.debug( + `mediaWatcher: filename ${fileName} does not match ${FLEX_USB_MOUNT_FILTER}` + ) return } const fullPath = join(FLEX_USB_MOUNT_DIR, fileName) @@ -140,25 +172,36 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { .stat(fullPath) .then(info => { if (!info.isDirectory) { + log.debug(`mediaWatcher: ${fullPath} is not a directory`) return } if (prevDirs.includes(fullPath)) { + log.debug(`mediaWatcher: ${fullPath} is known`) return } - console.log(`New mass storage device ${fileName} detected`) + log.info(`New mass storage device ${fileName} detected`) prevDirs.push(fullPath) return handleNewlyPresent(fullPath) }) - .catch(() => { + .catch(err => { if (prevDirs.includes(fullPath)) { - console.log(`Mass storage device at ${fileName} removed`) + log.info( + `Mass storage device at ${fileName} removed because its mount point disappeared`, + err + ) prevDirs = prevDirs.filter(entry => entry !== fullPath) dispatch(robotMassStorageDeviceRemoved(fullPath)) + } else { + log.debug( + `Mass storage device candidate mountpoint at ${fileName} disappeared`, + err + ) } }) } ) } catch { + log.error(`Failed to start watcher for ${FLEX_USB_MOUNT_DIR}`) return null } } @@ -170,21 +213,42 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { { persistent: true }, (event, fileName) => { if (!!!fileName) return - if (!fileName.match(FLEX_USB_MOUNT_FILTER)) return - const fullPath = join(FLEX_USB_DEVICE_DIR, fileName) - const mountPath = join(FLEX_USB_MOUNT_DIR, fileName) - fsPromises.stat(fullPath).catch(() => { - if (prevDirs.includes(mountPath)) { - console.log(`Mass storage device at ${fileName} removed`) - prevDirs = prevDirs.filter(entry => entry !== mountPath) - dispatch( - robotMassStorageDeviceRemoved(join(FLEX_USB_MOUNT_DIR, fileName)) + if (!fileName.match(FLEX_USB_DEVICE_FILTER)) return + if (event !== 'rename') { + log.debug( + `devWatcher: ignoring ${event} event for ${fileName} (not rename)` + ) + return + } + log.debug(`devWatcher: ${event} event for ${fileName}`) + fsPromises + .readdir(FLEX_USB_DEVICE_DIR) + .then(contents => { + if (contents.includes(fileName)) { + log.debug( + `devWatcher: ${fileName} found in /dev, this is an attach` + ) + // this is an attach + return + } + const prevDir = prevDirs.filter(dir => dir.includes(fileName)).at(0) + log.debug( + `devWatcher: ${fileName} not in /dev, this is a remove, previously mounted at ${prevDir}` ) - // we don't care if this fails because it's racing the system removing - // the mount dir in the common case - fsPromises.unlink(mountPath).catch(() => {}) - } - }) + if (prevDir != null) { + log.info(`Mass storage device at ${fileName} removed`) + prevDirs = prevDirs.filter(entry => entry !== prevDir) + dispatch(robotMassStorageDeviceRemoved(prevDir)) + // we don't care if this fails because it's racing the system removing + // the mount dir in the common case + fsPromises.unlink(prevDir).catch(() => {}) + } + }) + .catch(err => { + log.info( + `Failed to handle mass storage device ${fileName}: ${err.name}: ${err.message}` + ) + }) } ) diff --git a/app-shell/src/config/actions.ts b/app-shell/src/config/actions.ts index eabc9b47a16..5d96e6c1171 100644 --- a/app-shell/src/config/actions.ts +++ b/app-shell/src/config/actions.ts @@ -111,6 +111,7 @@ import type { export const configInitialized = (config: Config): ConfigInitializedAction => ({ type: CONFIG_INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -120,6 +121,7 @@ export const configValueUpdated = ( ): ConfigValueUpdatedAction => ({ type: VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export const customLabwareList = ( diff --git a/app-shell/src/main.ts b/app-shell/src/main.ts index ef422a455cc..0f4ab41733b 100644 --- a/app-shell/src/main.ts +++ b/app-shell/src/main.ts @@ -18,7 +18,6 @@ import { registerProtocolStorage } from './protocol-storage' import { getConfig, getStore, getOverrides, registerConfig } from './config' import { registerUsb } from './usb' import { registerNotify, closeAllNotifyConnections } from './notifications' - import type { BrowserWindow } from 'electron' import type { Action, Dispatch, Logger } from './types' import type { LogEntry } from 'winston' diff --git a/app-shell/src/types.ts b/app-shell/src/types.ts index 8a1bea51a20..f608b4512af 100644 --- a/app-shell/src/types.ts +++ b/app-shell/src/types.ts @@ -96,9 +96,11 @@ export type CLEAR_CACHE_TYPE = 'discovery:CLEAR_CACHE' export interface ConfigInitializedAction { type: CONFIG_INITIALIZED_TYPE payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: CONFIG_VALUE_UPDATED_TYPE payload: { path: string; value: any } + meta: { shell: true } } diff --git a/app/src/redux/config/actions.ts b/app/src/redux/config/actions.ts index e0a6906b17f..915fce0a8f0 100644 --- a/app/src/redux/config/actions.ts +++ b/app/src/redux/config/actions.ts @@ -55,6 +55,7 @@ export const configInitialized = ( ): Types.ConfigInitializedAction => ({ type: Constants.INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -64,6 +65,7 @@ export const configValueUpdated = ( ): Types.ConfigValueUpdatedAction => ({ type: Constants.VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export function toggleDevtools(): Types.ToggleConfigValueAction { diff --git a/app/src/redux/config/types.ts b/app/src/redux/config/types.ts index b408a2204e2..5d6b4b83ac9 100644 --- a/app/src/redux/config/types.ts +++ b/app/src/redux/config/types.ts @@ -16,11 +16,13 @@ export type ConfigState = Config | null export interface ConfigInitializedAction { type: typeof INITIALIZED payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: typeof VALUE_UPDATED payload: { path: string; value: any } + meta: { shell: true } } export interface UpdateConfigValueAction { diff --git a/app/src/redux/shell/update.ts b/app/src/redux/shell/update.ts index 7c9e3be1f58..aa5fb601840 100644 --- a/app/src/redux/shell/update.ts +++ b/app/src/redux/shell/update.ts @@ -3,11 +3,7 @@ import { createSelector } from 'reselect' import type { State } from '../types' -import type { - ShellUpdateAction, - ShellUpdateState, - RobotMassStorageDeviceEnumerated, -} from './types' +import type { ShellUpdateAction, ShellUpdateState } from './types' // command sent to app-shell via meta.shell === true export function checkShellUpdate(): ShellUpdateAction { @@ -37,16 +33,3 @@ export const getAvailableShellUpdate: ( ) => string | null = createSelector(getShellUpdateState, state => state.available && state.info ? state.info.version : null ) - -export function checkMassStorage( - state: State -): RobotMassStorageDeviceEnumerated { - return { - type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', - payload: { - rootPath: '', - filePaths: state.shell.filePaths, - }, - meta: { shell: true }, - } -} diff --git a/update-server/oe_upload.py b/update-server/oe_upload.py index 43d8bf47525..9d70dcaf430 100644 --- a/update-server/oe_upload.py +++ b/update-server/oe_upload.py @@ -26,7 +26,7 @@ async def do_update(update_file: str, host: str, timeout = aiohttp.ClientTimeout(total=7200) async with aiohttp.ClientSession(timeout=timeout) as session: root = host + '/server/update' - filename = "ot3-system.zip" + filename = "system-update.zip" print(f"Starting update of {update_file.name} to {host}") begin_resp = await session.post(root + '/begin') if begin_resp.status == 409: From ce46b246cb8c6582dfa6e5c0a21fde004e716259 Mon Sep 17 00:00:00 2001 From: koji Date: Mon, 21 Oct 2024 12:54:25 -0400 Subject: [PATCH 7/9] fix(protocol-designer): fix TC module rendering for OT-2 (#16536) * fix(protocol-designer): fix TC module rendering for OT-2 --- .../ProtocolOverview/DeckThumbnailDetails.tsx | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx b/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx index dda66237feb..36caf29c4ad 100644 --- a/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx +++ b/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx @@ -10,6 +10,7 @@ import { inferModuleOrientationFromXCoordinate, isAddressableAreaStandardSlot, THERMOCYCLER_MODULE_TYPE, + SPAN7_8_10_11_SLOT, } from '@opentrons/shared-data' import { LabwareOnDeck } from '../../components/DeckSetup/LabwareOnDeck' import { getStagingAreaAddressableAreas } from '../../utils' @@ -65,22 +66,23 @@ export const DeckThumbnailDetails = ( return ( <> {/* all modules */} - {allModules.map(moduleOnDeck => { - const slotId = moduleOnDeck.slot + {allModules.map(({ id, slot, model, type, moduleState }) => { + const slotId = + slot === SPAN7_8_10_11_SLOT && type === THERMOCYCLER_MODULE_TYPE + ? '7' + : slot const slotPosition = getPositionFromSlotId(slotId, deckDef) if (slotPosition == null) { - console.warn(`no slot ${slotId} for module ${moduleOnDeck.id}`) + console.warn(`no slot ${slotId} for module ${id}`) return null } - const moduleDef = getModuleDef2(moduleOnDeck.model) - const labwareLoadedOnModule = allLabware.find( - lw => lw.slot === moduleOnDeck.id - ) + const moduleDef = getModuleDef2(model) + const labwareLoadedOnModule = allLabware.find(lw => lw.slot === id) return ( - + Date: Mon, 21 Oct 2024 14:08:51 -0400 Subject: [PATCH 8/9] feat(opentrons-ai-client): inject mixpanel tokens into build (#16546) --- ...rons-ai-client-staging-continuous-deploy.yaml | 3 +++ ...deploy.yaml => opentrons-ai-client-test.yaml} | 16 ++++++---------- .../opentrons-ai-production-deploy.yaml | 2 ++ 3 files changed, 11 insertions(+), 10 deletions(-) rename .github/workflows/{opentrons-ai-client-test-build-deploy.yaml => opentrons-ai-client-test.yaml} (90%) diff --git a/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml b/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml index af767b36adc..7a89bfa02dd 100644 --- a/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml +++ b/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml @@ -52,6 +52,9 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'build' + env: + # inject dev id since this is for staging + OT_AI_CLIENT_MIXPANEL_ID: ${{ secrets.OT_AI_CLIENT_MIXPANEL_DEV_ID }} run: | make -C opentrons-ai-client build-staging - name: Configure AWS Credentials diff --git a/.github/workflows/opentrons-ai-client-test-build-deploy.yaml b/.github/workflows/opentrons-ai-client-test.yaml similarity index 90% rename from .github/workflows/opentrons-ai-client-test-build-deploy.yaml rename to .github/workflows/opentrons-ai-client-test.yaml index 2f569d9bf78..2c5cc6cfc64 100644 --- a/.github/workflows/opentrons-ai-client-test-build-deploy.yaml +++ b/.github/workflows/opentrons-ai-client-test.yaml @@ -9,12 +9,9 @@ on: paths: - 'Makefile' - 'opentrons-ai-client/**/*' - - 'components/**/*' - - '*.js' - - '*.json' - - 'yarn.lock' - - '.github/workflows/app-test-build-deploy.yaml' - - '.github/workflows/utils.js' + - 'components/**' + - 'shared-data/**' + - '.github/workflows/opentrons-ai-client-test.yml' branches: - '**' tags: @@ -24,10 +21,9 @@ on: paths: - 'Makefile' - 'opentrons-ai-client/**/*' - - 'components/**/*' - - '*.js' - - '*.json' - - 'yarn.lock' + - 'components/**' + - 'shared-data/**' + - '.github/workflows/opentrons-ai-client-test.yml' workflow_dispatch: concurrency: diff --git a/.github/workflows/opentrons-ai-production-deploy.yaml b/.github/workflows/opentrons-ai-production-deploy.yaml index 825c3561f25..2327b48ecad 100644 --- a/.github/workflows/opentrons-ai-production-deploy.yaml +++ b/.github/workflows/opentrons-ai-production-deploy.yaml @@ -52,6 +52,8 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'build' + env: + OT_AI_CLIENT_MIXPANEL_ID: ${{ secrets.OT_AI_CLIENT_MIXPANEL_ID }} run: | make -C opentrons-ai-client build-production - name: Configure AWS Credentials From 1b0a5a52526b4f8ed1d7a69dac0c413b1790a7ff Mon Sep 17 00:00:00 2001 From: koji Date: Mon, 21 Oct 2024 15:29:30 -0400 Subject: [PATCH 9/9] feat(protocol-designer): remove multi modules feature flag (#16553) * feat(protocol-designer): remove multi modules feature flag --- .../src/assets/localization/en/feature_flags.json | 4 ---- protocol-designer/src/components/EditModules.tsx | 10 +++++----- .../src/components/__tests__/EditModules.test.tsx | 2 -- .../CreateFileWizard/ModulesAndOtherTile.tsx | 11 ++++++----- .../__tests__/ModulesAndOtherTile.test.tsx | 6 +----- protocol-designer/src/feature-flags/reducers.ts | 1 - protocol-designer/src/feature-flags/selectors.ts | 4 ---- protocol-designer/src/feature-flags/types.ts | 2 -- .../CreateNewProtocolWizard/SelectModules.tsx | 14 ++++++-------- .../__tests__/SelectModules.test.tsx | 6 +----- .../pages/Designer/DeckSetup/DeckSetupTools.tsx | 12 +++--------- .../DeckSetup/__tests__/DeckSetupTools.test.tsx | 6 +----- 12 files changed, 23 insertions(+), 55 deletions(-) diff --git a/protocol-designer/src/assets/localization/en/feature_flags.json b/protocol-designer/src/assets/localization/en/feature_flags.json index f83f09e345c..a70e23931c9 100644 --- a/protocol-designer/src/assets/localization/en/feature_flags.json +++ b/protocol-designer/src/assets/localization/en/feature_flags.json @@ -20,10 +20,6 @@ "title": "Enable redesign", "description": "A whole new world." }, - "OT_PD_ENABLE_MOAM": { - "title": "Enable multiple modules", - "description": "Enable multiple heater-shakers and magnetic blocks for Flex only." - }, "OT_PD_ENABLE_COMMENT": { "title": "Enable comment step", "description": "You can add comments anywhere between timeline steps." diff --git a/protocol-designer/src/components/EditModules.tsx b/protocol-designer/src/components/EditModules.tsx index 6ed7c8a6054..31b392c29a5 100644 --- a/protocol-designer/src/components/EditModules.tsx +++ b/protocol-designer/src/components/EditModules.tsx @@ -12,7 +12,6 @@ import { } from '../step-forms' import { moveDeckItem } from '../labware-ingred/actions/actions' import { getRobotType } from '../file-data/selectors' -import { getEnableMoam } from '../feature-flags/selectors' import { EditMultipleModulesModal } from './modals/EditModulesModal/EditMultipleModulesModal' import { useBlockingHint } from './Hints/useBlockingHint' import { MagneticModuleWarningModalContent } from './modals/EditModulesModal/MagneticModuleWarningModalContent' @@ -34,14 +33,15 @@ export interface ModelModuleInfo { export const EditModules = (props: EditModulesProps): JSX.Element => { const { onCloseClick, moduleToEdit } = props - const enableMoam = useSelector(getEnableMoam) const { moduleId, moduleType } = moduleToEdit const _initialDeckSetup = useSelector(stepFormSelectors.getInitialDeckSetup) const robotType = useSelector(getRobotType) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] const showMultipleModuleModal = robotType === FLEX_ROBOT_TYPE && MOAM_MODULE_TYPES.includes(moduleType) diff --git a/protocol-designer/src/components/__tests__/EditModules.test.tsx b/protocol-designer/src/components/__tests__/EditModules.test.tsx index 6c980fecd25..06a36b19f06 100644 --- a/protocol-designer/src/components/__tests__/EditModules.test.tsx +++ b/protocol-designer/src/components/__tests__/EditModules.test.tsx @@ -9,7 +9,6 @@ import { import { i18n } from '../../assets/localization' import { getInitialDeckSetup } from '../../step-forms/selectors' import { getDismissedHints } from '../../tutorial/selectors' -import { getEnableMoam } from '../../feature-flags/selectors' import { renderWithProviders } from '../../__testing-utils__' import { getRobotType } from '../../file-data/selectors' import { EditMultipleModulesModal } from '../modals/EditModulesModal/EditMultipleModulesModal' @@ -58,7 +57,6 @@ describe('EditModules', () => { labware: {}, additionalEquipmentOnDeck: {}, }) - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(EditModulesModal).mockReturnValue(

mock EditModulesModal
) diff --git a/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx b/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx index 2f63d6667bb..5409217dfd3 100644 --- a/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx +++ b/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx @@ -37,7 +37,6 @@ import gripperImage from '../../../assets/images/flex_gripper.png' import wasteChuteImage from '../../../assets/images/waste_chute.png' import trashBinImage from '../../../assets/images/flex_trash_bin.png' import { uuid } from '../../../utils' -import { getEnableMoam } from '../../../feature-flags/selectors' import { selectors as featureFlagSelectors } from '../../../feature-flags' import { CrashInfoBox, ModuleDiagram } from '../../modules' import { ModuleFields } from '../FilePipettesModal/ModuleFields' @@ -202,15 +201,17 @@ export function ModulesAndOtherTile(props: WizardTileProps): JSX.Element { function FlexModuleFields(props: WizardTileProps): JSX.Element { const { watch, setValue } = props - const enableMoam = useSelector(getEnableMoam) const modules = watch('modules') const additionalEquipment = watch('additionalEquipment') const enableAbsorbanceReader = useSelector( featureFlagSelectors.getEnableAbsorbanceReader ) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] + const moduleTypesOnDeck = modules != null ? Object.values(modules).map(module => module.type) : [] diff --git a/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx b/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx index fdc4e9b86e5..0b4e99952a6 100644 --- a/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx +++ b/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx @@ -5,10 +5,7 @@ import { fireEvent, screen, cleanup } from '@testing-library/react' import { FLEX_ROBOT_TYPE, OT2_ROBOT_TYPE } from '@opentrons/shared-data' import { renderWithProviders } from '../../../../__testing-utils__' import { i18n } from '../../../../assets/localization' -import { - getDisableModuleRestrictions, - getEnableMoam, -} from '../../../../feature-flags/selectors' +import { getDisableModuleRestrictions } from '../../../../feature-flags/selectors' import { CrashInfoBox } from '../../../modules' import { ModuleFields } from '../../FilePipettesModal/ModuleFields' import { ModulesAndOtherTile } from '../ModulesAndOtherTile' @@ -61,7 +58,6 @@ describe('ModulesAndOtherTile', () => { ...props, ...mockWizardTileProps, } as WizardTileProps - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(CrashInfoBox).mockReturnValue(
mock CrashInfoBox
) vi.mocked(EquipmentOption).mockReturnValue(
mock EquipmentOption
) vi.mocked(getDisableModuleRestrictions).mockReturnValue(false) diff --git a/protocol-designer/src/feature-flags/reducers.ts b/protocol-designer/src/feature-flags/reducers.ts index bcb586acf9a..1f9999be001 100644 --- a/protocol-designer/src/feature-flags/reducers.ts +++ b/protocol-designer/src/feature-flags/reducers.ts @@ -26,7 +26,6 @@ const initialFlags: Flags = { OT_PD_ENABLE_ABSORBANCE_READER: process.env.OT_PD_ENABLE_ABSORBANCE_READER === '1' || false, OT_PD_ENABLE_REDESIGN: process.env.OT_PD_ENABLE_REDESIGN === '1' || false, - OT_PD_ENABLE_MOAM: process.env.OT_PD_ENABLE_MOAM === '1' || false, OT_PD_ENABLE_COMMENT: process.env.OT_PD_ENABLE_COMMENT === '1' || false, OT_PD_ENABLE_RETURN_TIP: process.env.OT_PD_ENABLE_RETURN_TIP === '1' || false, OT_PD_ENABLE_HOT_KEYS_DISPLAY: diff --git a/protocol-designer/src/feature-flags/selectors.ts b/protocol-designer/src/feature-flags/selectors.ts index a4c3baf05be..896eb5db254 100644 --- a/protocol-designer/src/feature-flags/selectors.ts +++ b/protocol-designer/src/feature-flags/selectors.ts @@ -33,10 +33,6 @@ export const getEnableRedesign: Selector = createSelector( getFeatureFlagData, flags => flags.OT_PD_ENABLE_REDESIGN ?? false ) -export const getEnableMoam: Selector = createSelector( - getFeatureFlagData, - flags => flags.OT_PD_ENABLE_MOAM ?? false -) export const getEnableComment: Selector = createSelector( getFeatureFlagData, flags => flags.OT_PD_ENABLE_COMMENT ?? false diff --git a/protocol-designer/src/feature-flags/types.ts b/protocol-designer/src/feature-flags/types.ts index f37d77bc814..774d2c5fa5e 100644 --- a/protocol-designer/src/feature-flags/types.ts +++ b/protocol-designer/src/feature-flags/types.ts @@ -31,7 +31,6 @@ export type FlagTypes = | 'OT_PD_ALLOW_ALL_TIPRACKS' | 'OT_PD_ENABLE_ABSORBANCE_READER' | 'OT_PD_ENABLE_REDESIGN' - | 'OT_PD_ENABLE_MOAM' | 'OT_PD_ENABLE_COMMENT' | 'OT_PD_ENABLE_RETURN_TIP' | 'OT_PD_ENABLE_HOT_KEYS_DISPLAY' @@ -46,7 +45,6 @@ export const allFlags: FlagTypes[] = [ 'PRERELEASE_MODE', 'OT_PD_ENABLE_ABSORBANCE_READER', 'OT_PD_ENABLE_REDESIGN', - 'OT_PD_ENABLE_MOAM', 'OT_PD_ENABLE_COMMENT', 'OT_PD_ENABLE_RETURN_TIP', ] diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx index 4cf2576c25e..6f46ae2be1a 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx @@ -25,10 +25,7 @@ import { TEMPERATURE_MODULE_TYPE, } from '@opentrons/shared-data' import { uuid } from '../../utils' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../feature-flags/selectors' import { useKitchen } from '../../organisms/Kitchen/hooks' import { ModuleDiagram } from '../../components/modules' import { WizardBody } from './WizardBody' @@ -56,7 +53,6 @@ export function SelectModules(props: WizardTileProps): JSX.Element | null { const fields = watch('fields') const modules = watch('modules') const additionalEquipment = watch('additionalEquipment') - const enableMoam = useSelector(getEnableMoam) const enableAbsorbanceReader = useSelector(getEnableAbsorbanceReader) const robotType = fields.robotType const supportedModules = @@ -83,9 +79,11 @@ export function SelectModules(props: WizardTileProps): JSX.Element | null { ) ) ) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] const handleAddModule = (moduleModel: ModuleModel): void => { if (hasNoAvailableSlots) { diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx index bddd3174b7f..a18f06bf508 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx @@ -4,10 +4,7 @@ import '@testing-library/jest-dom/vitest' import { FLEX_ROBOT_TYPE, OT2_ROBOT_TYPE } from '@opentrons/shared-data' import { fireEvent, screen } from '@testing-library/react' import { i18n } from '../../../assets/localization' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../feature-flags/selectors' import { renderWithProviders } from '../../../__testing-utils__' import { SelectModules } from '../SelectModules' import type { WizardFormState, WizardTileProps } from '../types' @@ -46,7 +43,6 @@ describe('SelectModules', () => { props = { ...mockWizardTileProps, } as WizardTileProps - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(getEnableAbsorbanceReader).mockReturnValue(true) }) diff --git a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx index 0df813cf114..e062fa4784d 100644 --- a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx +++ b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx @@ -37,14 +37,11 @@ import { selectNestedLabware, selectZoomedIntoSlot, } from '../../../labware-ingred/actions' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../feature-flags/selectors' import { selectors } from '../../../labware-ingred/selectors' import { useKitchen } from '../../../organisms/Kitchen/hooks' import { createContainerAboveModule } from '../../../step-forms/actions/thunks' -import { FIXTURES, MOAM_MODELS, MOAM_MODELS_WITH_FF } from './constants' +import { FIXTURES, MOAM_MODELS } from './constants' import { getSlotInformation } from '../utils' import { getModuleModelsBySlot, getDeckErrors } from './utils' import { LabwareTools } from './LabwareTools' @@ -70,7 +67,6 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { const robotType = useSelector(getRobotType) const dispatch = useDispatch>() const enableAbsorbanceReader = useSelector(getEnableAbsorbanceReader) - const enableMoam = useSelector(getEnableMoam) const deckSetup = useSelector(getDeckSetupForActiveItem) const { selectedLabwareDefUri, @@ -293,9 +289,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { module.type === getModuleType(model) && module.slot !== slot ) - const moamModels = enableMoam - ? MOAM_MODELS - : MOAM_MODELS_WITH_FF + const moamModels = MOAM_MODELS const collisionError = getDeckErrors({ modules: deckSetupModules, diff --git a/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx b/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx index a3d63343389..cb85cf12693 100644 --- a/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx +++ b/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx @@ -12,10 +12,7 @@ import { renderWithProviders } from '../../../../__testing-utils__' import { deleteContainer } from '../../../../labware-ingred/actions' import { createModule, deleteModule } from '../../../../step-forms/actions' import { getRobotType } from '../../../../file-data/selectors' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../../feature-flags/selectors' import { createDeckFixture, deleteDeckFixture, @@ -63,7 +60,6 @@ describe('DeckSetupTools', () => { vi.mocked(LabwareTools).mockReturnValue(
mock labware tools
) vi.mocked(getRobotType).mockReturnValue(FLEX_ROBOT_TYPE) vi.mocked(getEnableAbsorbanceReader).mockReturnValue(true) - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(getDeckSetupForActiveItem).mockReturnValue({ labware: {}, modules: {},