diff --git a/api-client/src/protocols/getProtocolAnalysisAsDocument.ts b/api-client/src/protocols/getProtocolAnalysisAsDocument.ts new file mode 100644 index 00000000000..bdfc78aa2cd --- /dev/null +++ b/api-client/src/protocols/getProtocolAnalysisAsDocument.ts @@ -0,0 +1,18 @@ +import { GET, request } from '../request' + +import type { ResponsePromise } from '../request' +import type { HostConfig } from '../types' +import type { CompletedProtocolAnalysis } from '@opentrons/shared-data' + +export function getProtocolAnalysisAsDocument( + config: HostConfig, + protocolId: string, + analysisId: string +): ResponsePromise { + return request( + GET, + `/protocols/${protocolId}/analyses/${analysisId}/asDocument`, + null, + config + ) +} diff --git a/api-client/src/protocols/index.ts b/api-client/src/protocols/index.ts index 58b790eadab..6febd0795cf 100644 --- a/api-client/src/protocols/index.ts +++ b/api-client/src/protocols/index.ts @@ -1,5 +1,6 @@ export { getProtocol } from './getProtocol' export { getProtocolAnalyses } from './getProtocolAnalyses' +export { getProtocolAnalysisAsDocument } from './getProtocolAnalysisAsDocument' export { deleteProtocol } from './deleteProtocol' export { createProtocol } from './createProtocol' export { getProtocols } from './getProtocols' diff --git a/app/src/assets/localization/en/device_details.json b/app/src/assets/localization/en/device_details.json index 5c6dfe536a8..5c42282b6b9 100644 --- a/app/src/assets/localization/en/device_details.json +++ b/app/src/assets/localization/en/device_details.json @@ -118,6 +118,7 @@ "run_again": "Run again", "run_duration": "Run duration", "serial_number": "Serial Number", + "robot_initializing": "Initializing...", "set_block_temp": "Set temperature", "set_block_temperature": "Set block temperature", "set_engage_height": "Set Engage Height", diff --git a/app/src/atoms/buttons/__tests__/FloatingActionButton.test.tsx b/app/src/atoms/buttons/__tests__/FloatingActionButton.test.tsx index 9975d9ba03b..0c14cec1f55 100644 --- a/app/src/atoms/buttons/__tests__/FloatingActionButton.test.tsx +++ b/app/src/atoms/buttons/__tests__/FloatingActionButton.test.tsx @@ -8,9 +8,12 @@ import { } from '@opentrons/components' import { FloatingActionButton } from '..' +import { i18n } from '../../../i18n' const render = (props: React.ComponentProps) => { - return renderWithProviders()[0] + return renderWithProviders(, { + i18nInstance: i18n, + })[0] } describe('FloatingActionButton', () => { diff --git a/app/src/organisms/Devices/hooks/__tests__/useProtocolAnalysisErrors.test.tsx b/app/src/organisms/Devices/hooks/__tests__/useProtocolAnalysisErrors.test.tsx index 18f38231814..86f4d6c1180 100644 --- a/app/src/organisms/Devices/hooks/__tests__/useProtocolAnalysisErrors.test.tsx +++ b/app/src/organisms/Devices/hooks/__tests__/useProtocolAnalysisErrors.test.tsx @@ -3,7 +3,8 @@ import { UseQueryResult } from 'react-query' import { renderHook } from '@testing-library/react-hooks' import { - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, useRunQuery, } from '@opentrons/react-api-client' @@ -11,7 +12,7 @@ import { useProtocolAnalysisErrors } from '..' import { RUN_ID_2 } from '../../../../organisms/RunTimeControl/__fixtures__' -import type { Run, ProtocolAnalyses } from '@opentrons/api-client' +import type { Run, Protocol } from '@opentrons/api-client' import type { CompletedProtocolAnalysis, PendingProtocolAnalysis, @@ -20,8 +21,12 @@ import type { jest.mock('@opentrons/react-api-client') const mockUseRunQuery = useRunQuery as jest.MockedFunction -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery + +const mockUseProtocolQuery = useProtocolQuery as jest.MockedFunction< + typeof useProtocolQuery +> +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > describe('useProtocolAnalysisErrors hook', () => { @@ -29,11 +34,14 @@ describe('useProtocolAnalysisErrors hook', () => { when(mockUseRunQuery) .calledWith(null, { staleTime: Infinity }) .mockReturnValue({} as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(null, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(null) + .mockReturnValue({} as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(null, null, { enabled: false }) .mockReturnValue({ - data: { data: [] } as any, - } as UseQueryResult) + data: null, + } as UseQueryResult) }) afterEach(() => { @@ -63,12 +71,18 @@ describe('useProtocolAnalysisErrors hook', () => { .mockReturnValue({ data: { data: { protocolId: PROTOCOL_ID } } as any, } as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(PROTOCOL_ID) .mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) - + data: { + data: { analysisSummaries: [{ id: PROTOCOL_ANALYSIS.id }] }, + } as any, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, PROTOCOL_ANALYSIS.id, { enabled: true }) + .mockReturnValue({ + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) const { result } = renderHook(() => useProtocolAnalysisErrors(RUN_ID_2)) expect(result.current).toStrictEqual({ analysisErrors: null, @@ -86,12 +100,18 @@ describe('useProtocolAnalysisErrors hook', () => { .mockReturnValue({ data: { data: { protocolId: PROTOCOL_ID } } as any, } as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(PROTOCOL_ID) .mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) - + data: { + data: { analysisSummaries: [{ id: PROTOCOL_ANALYSIS.id }] }, + } as any, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, PROTOCOL_ANALYSIS.id, { enabled: true }) + .mockReturnValue({ + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) const { result } = renderHook(() => useProtocolAnalysisErrors(RUN_ID_2)) expect(result.current).toStrictEqual({ analysisErrors: null, @@ -110,12 +130,22 @@ describe('useProtocolAnalysisErrors hook', () => { .mockReturnValue({ data: { data: { protocolId: PROTOCOL_ID } } as any, } as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(PROTOCOL_ID) .mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS_WITH_ERRORS as any] }, - } as UseQueryResult) - + data: { + data: { + analysisSummaries: [{ id: PROTOCOL_ANALYSIS_WITH_ERRORS.id }], + }, + } as any, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, PROTOCOL_ANALYSIS_WITH_ERRORS.id, { + enabled: true, + }) + .mockReturnValue({ + data: PROTOCOL_ANALYSIS_WITH_ERRORS, + } as UseQueryResult) const { result } = renderHook(() => useProtocolAnalysisErrors(RUN_ID_2)) expect(result.current).toStrictEqual({ analysisErrors: [{ detail: 'fake error' }], diff --git a/app/src/organisms/Devices/hooks/__tests__/useProtocolDetailsForRun.test.tsx b/app/src/organisms/Devices/hooks/__tests__/useProtocolDetailsForRun.test.tsx index 0d3ed0db887..2c94afeedf1 100644 --- a/app/src/organisms/Devices/hooks/__tests__/useProtocolDetailsForRun.test.tsx +++ b/app/src/organisms/Devices/hooks/__tests__/useProtocolDetailsForRun.test.tsx @@ -3,7 +3,7 @@ import { UseQueryResult } from 'react-query' import { renderHook } from '@testing-library/react-hooks' import { - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, useProtocolQuery, useRunQuery, } from '@opentrons/react-api-client' @@ -12,25 +12,32 @@ import { useProtocolDetailsForRun } from '..' import { RUN_ID_2 } from '../../../../organisms/RunTimeControl/__fixtures__' -import type { Protocol, Run, ProtocolAnalyses } from '@opentrons/api-client' +import type { Protocol, Run } from '@opentrons/api-client' +import { CompletedProtocolAnalysis } from '@opentrons/shared-data' jest.mock('@opentrons/react-api-client') const mockUseProtocolQuery = useProtocolQuery as jest.MockedFunction< typeof useProtocolQuery > -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > const mockUseRunQuery = useRunQuery as jest.MockedFunction +const PROTOCOL_ID = 'fake_protocol_id' +const PROTOCOL_ANALYSIS = { + id: 'fake analysis', + status: 'completed', + labware: [], +} as any const PROTOCOL_RESPONSE = { data: { protocolType: 'json', createdAt: 'now', - id: '1', + id: PROTOCOL_ID, metadata: { protocolName: 'fake protocol' }, - analysisSummaries: [{ id: 'fake analysis', status: 'completed' }], + analysisSummaries: [{ id: PROTOCOL_ANALYSIS.id, status: 'completed' }], key: 'fakeProtocolKey', }, } as Protocol @@ -43,11 +50,11 @@ describe('useProtocolDetailsForRun hook', () => { when(mockUseProtocolQuery) .calledWith(null, { staleTime: Infinity }) .mockReturnValue({} as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(null, { staleTime: Infinity }, true) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(null, null, { enabled: false, refetchInterval: 5000 }) .mockReturnValue({ - data: { data: [] } as any, - } as UseQueryResult) + data: null, + } as UseQueryResult) }) afterEach(() => { @@ -66,12 +73,6 @@ describe('useProtocolDetailsForRun hook', () => { }) it('returns the protocol file when given a run id', async () => { - const PROTOCOL_ID = 'fake_protocol_id' - const PROTOCOL_ANALYSIS = { - id: 'fake analysis', - status: 'completed', - labware: [], - } as any when(mockUseRunQuery) .calledWith(RUN_ID_2, { staleTime: Infinity }) .mockReturnValue({ @@ -80,11 +81,22 @@ describe('useProtocolDetailsForRun hook', () => { when(mockUseProtocolQuery) .calledWith(PROTOCOL_ID, { staleTime: Infinity }) .mockReturnValue({ data: PROTOCOL_RESPONSE } as UseQueryResult) - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }, expect.any(Boolean)) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, 'fake analysis', { + enabled: true, + refetchInterval: 5000, + }) .mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, 'fake analysis', { + enabled: false, + refetchInterval: 5000, + }) + .mockReturnValue({ + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) const { result } = renderHook(() => useProtocolDetailsForRun(RUN_ID_2)) diff --git a/app/src/organisms/Devices/hooks/useProtocolAnalysisErrors.ts b/app/src/organisms/Devices/hooks/useProtocolAnalysisErrors.ts index 7e43fa65f85..8e50c6b153c 100644 --- a/app/src/organisms/Devices/hooks/useProtocolAnalysisErrors.ts +++ b/app/src/organisms/Devices/hooks/useProtocolAnalysisErrors.ts @@ -1,6 +1,7 @@ import last from 'lodash/last' import { - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, useRunQuery, } from '@opentrons/react-api-client' @@ -15,16 +16,19 @@ export function useProtocolAnalysisErrors( ): ProtocolAnalysisErrors { const { data: runRecord } = useRunQuery(runId, { staleTime: Infinity }) const protocolId = runRecord?.data?.protocolId ?? null - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId, { - staleTime: Infinity, - }) + const { data: protocolData } = useProtocolQuery(protocolId) + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } + ) if (protocolId === null || runRecord?.data?.current === false) { return { analysisErrors: null } } - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null - if (mostRecentAnalysis?.status !== 'completed') { return { analysisErrors: null } } diff --git a/app/src/organisms/Devices/hooks/useProtocolDetailsForRun.ts b/app/src/organisms/Devices/hooks/useProtocolDetailsForRun.ts index f9834364345..ec61b88967f 100644 --- a/app/src/organisms/Devices/hooks/useProtocolDetailsForRun.ts +++ b/app/src/organisms/Devices/hooks/useProtocolDetailsForRun.ts @@ -3,8 +3,8 @@ import last from 'lodash/last' import { getRobotTypeFromLoadedLabware } from '@opentrons/shared-data' import { useProtocolQuery, - useProtocolAnalysesQuery, useRunQuery, + useProtocolAnalysisAsDocumentQuery, } from '@opentrons/react-api-client' import type { @@ -13,6 +13,7 @@ import type { PendingProtocolAnalysis, } from '@opentrons/shared-data' +const ANALYSIS_POLL_MS = 5000 export interface ProtocolDetails { displayName: string | null protocolData: CompletedProtocolAnalysis | PendingProtocolAnalysis | null @@ -34,17 +35,15 @@ export function useProtocolDetailsForRun( const { data: protocolRecord } = useProtocolQuery(protocolId, { staleTime: Infinity, }) - - const { data: protocolAnalyses } = useProtocolAnalysesQuery( + const { data: mostRecentAnalysis } = useProtocolAnalysisAsDocumentQuery( protocolId, + last(protocolRecord?.data.analysisSummaries)?.id ?? null, { - staleTime: Infinity, - }, - isPollingProtocolAnalyses + enabled: protocolRecord != null && isPollingProtocolAnalyses, + refetchInterval: ANALYSIS_POLL_MS, + } ) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null - React.useEffect(() => { if (mostRecentAnalysis?.status === 'completed') { setIsPollingProtocolAnalyses(false) @@ -61,8 +60,7 @@ export function useProtocolDetailsForRun( displayName: displayName ?? null, protocolData: mostRecentAnalysis ?? null, protocolKey: protocolRecord?.data.key ?? null, - isProtocolAnalyzing: - mostRecentAnalysis != null && mostRecentAnalysis?.status === 'pending', + isProtocolAnalyzing: protocolRecord != null && mostRecentAnalysis == null, // this should be deleted as soon as analysis tells us intended robot type robotType: mostRecentAnalysis?.status === 'completed' diff --git a/app/src/organisms/LabwarePositionCheck/useMostRecentCompletedAnalysis.ts b/app/src/organisms/LabwarePositionCheck/useMostRecentCompletedAnalysis.ts index 21ee417f638..4d0ece68dc0 100644 --- a/app/src/organisms/LabwarePositionCheck/useMostRecentCompletedAnalysis.ts +++ b/app/src/organisms/LabwarePositionCheck/useMostRecentCompletedAnalysis.ts @@ -1,5 +1,7 @@ +import last from 'lodash/last' import { - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, useRunQuery, } from '@opentrons/react-api-client' import { CompletedProtocolAnalysis } from '@opentrons/shared-data' @@ -9,14 +11,14 @@ export function useMostRecentCompletedAnalysis( ): CompletedProtocolAnalysis | null { const { data: runRecord } = useRunQuery(runId) const protocolId = runRecord?.data?.protocolId ?? null - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId) - - return ( - (protocolAnalyses?.data ?? []) - .reverse() - .find( - (analysis): analysis is CompletedProtocolAnalysis => - analysis.status === 'completed' - ) ?? null + const { data: protocolData } = useProtocolQuery(protocolId, { + enabled: protocolId != null, + }) + const { data: analysis } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } ) + + return analysis ?? null } diff --git a/app/src/organisms/OnDeviceDisplay/RobotDashboard/RecentRunProtocolCard.tsx b/app/src/organisms/OnDeviceDisplay/RobotDashboard/RecentRunProtocolCard.tsx index 9532f3ed507..edd44dbd90a 100644 --- a/app/src/organisms/OnDeviceDisplay/RobotDashboard/RecentRunProtocolCard.tsx +++ b/app/src/organisms/OnDeviceDisplay/RobotDashboard/RecentRunProtocolCard.tsx @@ -23,7 +23,6 @@ import { useTrackEvent } from '../../../redux/analytics' import { Skeleton } from '../../../atoms/Skeleton' import { useMissingProtocolHardware } from '../../../pages/Protocols/hooks' import { useCloneRun } from '../../ProtocolUpload/hooks' -import { useTrackProtocolRunEvent } from '../../Devices/hooks' import { useMissingHardwareText } from './hooks' import { RUN_STATUS_FAILED, @@ -74,7 +73,8 @@ export function ProtocolWithLastRun({ const isReadyToBeReRun = missingProtocolHardware.length === 0 const chipText = useMissingHardwareText(missingProtocolHardware) const trackEvent = useTrackEvent() - const { trackProtocolRunEvent } = useTrackProtocolRunEvent(runData.id) + // TODO(BC, 08/29/23): reintroduce this analytics event when we refactor the hook to fetch data lazily (performance concern) + // const { trackProtocolRunEvent } = useTrackProtocolRunEvent(runData.id) const onResetSuccess = (createRunResponse: Run): void => history.push(`runs/${createRunResponse.data.id}/setup`) const { cloneRun } = useCloneRun(runData.id, onResetSuccess) @@ -121,7 +121,8 @@ export function ProtocolWithLastRun({ name: 'proceedToRun', properties: { sourceLocation: 'RecentRunProtocolCard' }, }) - trackProtocolRunEvent({ name: 'runAgain' }) + // TODO(BC, 08/29/23): reintroduce this analytics event when we refactor the hook to fetch data lazily (performance concern) + // trackProtocolRunEvent({ name: 'runAgain' }) } const terminationTypeMap: { [runStatus in RunStatus]?: string } = { diff --git a/app/src/organisms/OnDeviceDisplay/RobotDashboard/ServerInitializing.tsx b/app/src/organisms/OnDeviceDisplay/RobotDashboard/ServerInitializing.tsx new file mode 100644 index 00000000000..dc38804d42b --- /dev/null +++ b/app/src/organisms/OnDeviceDisplay/RobotDashboard/ServerInitializing.tsx @@ -0,0 +1,40 @@ +import * as React from 'react' +import { useTranslation } from 'react-i18next' + +import { + Flex, + DIRECTION_COLUMN, + ALIGN_CENTER, + COLORS, + JUSTIFY_CENTER, + TYPOGRAPHY, + BORDERS, + Icon, + SPACING, +} from '@opentrons/components' + +import { StyledText } from '../../../atoms/text' + +export function ServerInitializing(): JSX.Element { + const { t } = useTranslation('device_details') + return ( + + + + {t('robot_initializing')} + + + ) +} diff --git a/app/src/organisms/OnDeviceDisplay/RobotDashboard/__tests__/RecentRunProtocolCard.test.tsx b/app/src/organisms/OnDeviceDisplay/RobotDashboard/__tests__/RecentRunProtocolCard.test.tsx index f7955a89194..b20cb8fddc4 100644 --- a/app/src/organisms/OnDeviceDisplay/RobotDashboard/__tests__/RecentRunProtocolCard.test.tsx +++ b/app/src/organisms/OnDeviceDisplay/RobotDashboard/__tests__/RecentRunProtocolCard.test.tsx @@ -200,7 +200,8 @@ describe('RecentRunProtocolCard', () => { name: 'proceedToRun', properties: { sourceLocation: 'RecentRunProtocolCard' }, }) - expect(mockTrackProtocolRunEvent).toBeCalledWith({ name: 'runAgain' }) + // TODO(BC, 08/30/23): reintroduce check for tracking when tracking is reintroduced lazily + // expect(mockTrackProtocolRunEvent).toBeCalledWith({ name: 'runAgain' }) getByLabelText('icon_ot-spinner') expect(button).toHaveStyle(`background-color: ${COLORS.green3Pressed}`) }) diff --git a/app/src/organisms/TakeoverModal/MaintenanceRunTakeover.tsx b/app/src/organisms/TakeoverModal/MaintenanceRunTakeover.tsx index 9a83c2be7d3..89224ade4e7 100644 --- a/app/src/organisms/TakeoverModal/MaintenanceRunTakeover.tsx +++ b/app/src/organisms/TakeoverModal/MaintenanceRunTakeover.tsx @@ -6,6 +6,7 @@ import { import { TakeoverModal } from './TakeoverModal' import { TakeoverModalContext } from './TakeoverModalContext' +const MAINTENANCE_RUN_POLL_MS = 10000 interface MaintenanceRunTakeoverProps { children: React.ReactNode } @@ -17,7 +18,7 @@ export function MaintenanceRunTakeover( setIsODDMaintenanceInProgress, ] = React.useState(false) const maintenanceRunId = useCurrentMaintenanceRun({ - refetchInterval: 5000, + refetchInterval: MAINTENANCE_RUN_POLL_MS, }).data?.data.id const isMaintenanceRunCurrent = maintenanceRunId != null diff --git a/app/src/pages/OnDeviceDisplay/ProtocolDetails/Deck.tsx b/app/src/pages/OnDeviceDisplay/ProtocolDetails/Deck.tsx index 988b5c53ec1..286213fb915 100644 --- a/app/src/pages/OnDeviceDisplay/ProtocolDetails/Deck.tsx +++ b/app/src/pages/OnDeviceDisplay/ProtocolDetails/Deck.tsx @@ -1,19 +1,23 @@ import * as React from 'react' import last from 'lodash/last' -import { useProtocolAnalysesQuery } from '@opentrons/react-api-client' +import { + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, +} from '@opentrons/react-api-client' import { DeckThumbnail } from '../../../molecules/DeckThumbnail' import type { CompletedProtocolAnalysis } from '@opentrons/shared-data' export const Deck = (props: { protocolId: string }): JSX.Element => { - const { data: protocolAnalyses } = useProtocolAnalysesQuery( + const { data: protocolData } = useProtocolQuery(props.protocolId) + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( props.protocolId, - { - staleTime: Infinity, - } + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } ) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null return ( { const { protocolId } = props - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId, { - staleTime: Infinity, - }) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? [] + const { data: protocolData } = useProtocolQuery(protocolId) + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } + ) const liquidsInOrder = parseLiquidsInLoadOrder( (mostRecentAnalysis as CompletedProtocolAnalysis).liquids ?? [], (mostRecentAnalysis as CompletedProtocolAnalysis).commands ?? [] diff --git a/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/Liquids.test.tsx b/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/Liquids.test.tsx index c430267a5f0..a6c8f434fc0 100644 --- a/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/Liquids.test.tsx +++ b/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/Liquids.test.tsx @@ -1,21 +1,28 @@ import * as React from 'react' import { UseQueryResult } from 'react-query' import { when } from 'jest-when' -import { useProtocolAnalysesQuery } from '@opentrons/react-api-client' +import { + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, +} from '@opentrons/react-api-client' import { parseLabwareInfoByLiquidId, parseLiquidsInLoadOrder, - ProtocolAnalyses, + Protocol, } from '@opentrons/api-client' import { renderWithProviders } from '@opentrons/components' import { i18n } from '../../../../i18n' import { Liquids } from '../Liquids' +import { CompletedProtocolAnalysis } from '@opentrons/shared-data' jest.mock('@opentrons/api-client') jest.mock('@opentrons/react-api-client') -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery +const mockUseProtocolQuery = useProtocolQuery as jest.MockedFunction< + typeof useProtocolQuery +> +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > const mockParseLiquidsInLoadOrder = parseLiquidsInLoadOrder as jest.MockedFunction< typeof parseLiquidsInLoadOrder @@ -25,6 +32,7 @@ const mockParseLabwareInfoByLiquidId = parseLabwareInfoByLiquidId as jest.Mocked > const MOCK_PROTOCOL_ID = 'mockProtocolId' const MOCK_PROTOCOL_ANALYSIS = { + id: 'fake_protocol_analysis', commands: [ { id: '97ba49a5-04f6-4f91-986a-04a0eb632882', @@ -188,11 +196,20 @@ describe('Liquids', () => { mockParseLabwareInfoByLiquidId.mockReturnValue( MOCK_LABWARE_INFO_BY_LIQUID_ID ) - when(mockUseProtocolAnalysesQuery) - .calledWith(MOCK_PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(MOCK_PROTOCOL_ID) + .mockReturnValue({ + data: { + data: { analysisSummaries: [{ id: MOCK_PROTOCOL_ANALYSIS.id }] }, + } as any, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(MOCK_PROTOCOL_ID, MOCK_PROTOCOL_ANALYSIS.id, { + enabled: true, + }) .mockReturnValue({ - data: { data: [MOCK_PROTOCOL_ANALYSIS] } as any, - } as UseQueryResult) + data: MOCK_PROTOCOL_ANALYSIS as any, + } as UseQueryResult) }) it('should render the correct headers and liquids', () => { const { getByRole, getByText, getByLabelText } = render(props)[0] diff --git a/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/ProtocolDetails.test.tsx b/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/ProtocolDetails.test.tsx index e05db746405..309dc783b72 100644 --- a/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/ProtocolDetails.test.tsx +++ b/app/src/pages/OnDeviceDisplay/ProtocolDetails/__tests__/ProtocolDetails.test.tsx @@ -14,7 +14,7 @@ import { useCreateRunMutation, useHost, useProtocolQuery, - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, } from '@opentrons/react-api-client' import { i18n } from '../../../../i18n' import { useMissingHardwareText } from '../../../../organisms/OnDeviceDisplay/RobotDashboard/hooks' @@ -67,8 +67,8 @@ const mockDeleteRun = deleteRun as jest.MockedFunction const mockUseProtocolQuery = useProtocolQuery as jest.MockedFunction< typeof useProtocolQuery > -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > const mockUseMissingProtocolHardware = useMissingProtocolHardware as jest.MockedFunction< typeof useMissingProtocolHardware @@ -130,14 +130,10 @@ describe('ODDProtocolDetails', () => { data: MOCK_DATA, isLoading: false, } as any) - mockUseProtocolAnalysesQuery.mockReturnValue({ + mockUseProtocolAnalysisAsDocumentQuery.mockReturnValue({ data: { - data: [ - { - id: 'mockAnalysisId', - status: 'completed', - }, - ], + id: 'mockAnalysisId', + status: 'completed', }, } as any) when(mockuseHost).calledWith().mockReturnValue(MOCK_HOST_CONFIG) diff --git a/app/src/pages/OnDeviceDisplay/ProtocolDetails/index.tsx b/app/src/pages/OnDeviceDisplay/ProtocolDetails/index.tsx index 89cb0d2fc59..259f57a49f9 100644 --- a/app/src/pages/OnDeviceDisplay/ProtocolDetails/index.tsx +++ b/app/src/pages/OnDeviceDisplay/ProtocolDetails/index.tsx @@ -1,4 +1,5 @@ import * as React from 'react' +import last from 'lodash/last' import { useTranslation } from 'react-i18next' import { useQueryClient } from 'react-query' import { deleteProtocol, deleteRun, getProtocol } from '@opentrons/api-client' @@ -22,10 +23,9 @@ import { import { useCreateRunMutation, useHost, - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, useProtocolQuery, } from '@opentrons/react-api-client' -import { CompletedProtocolAnalysis } from '@opentrons/shared-data' import { MAXIMUM_PINNED_PROTOCOLS } from '../../../App/constants' import { MediumButton, SmallButton, TabbedButton } from '../../../atoms/buttons' import { Chip } from '../../../atoms/Chip' @@ -308,20 +308,21 @@ export function ProtocolDetails(): JSX.Element | null { let pinnedProtocolIds = useSelector(getPinnedProtocolIds) ?? [] const pinned = pinnedProtocolIds.includes(protocolId) - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId) - const mostRecentAnalysis = - (protocolAnalyses?.data ?? []) - .reverse() - .find( - (analysis): analysis is CompletedProtocolAnalysis => - analysis.status === 'completed' - ) ?? null + const { data: protocolData } = useProtocolQuery(protocolId) + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } + ) + const shouldApplyOffsets = useSelector(getApplyHistoricOffsets) // I'd love to skip scraping altogether if we aren't applying // conditional offsets, but React won't let us use hooks conditionally. // So, we'll scrape regardless and just toss them if we don't need them. const scrapedLabwareOffsets = useOffsetCandidatesForAnalysis( - mostRecentAnalysis + mostRecentAnalysis ?? null ).map(({ vector, location, definitionUri }) => ({ vector, location, diff --git a/app/src/pages/OnDeviceDisplay/RobotDashboard/index.tsx b/app/src/pages/OnDeviceDisplay/RobotDashboard/index.tsx index 71c1ed4f8c1..6cf5ed2c773 100644 --- a/app/src/pages/OnDeviceDisplay/RobotDashboard/index.tsx +++ b/app/src/pages/OnDeviceDisplay/RobotDashboard/index.tsx @@ -21,12 +21,13 @@ import { import { getOnDeviceDisplaySettings } from '../../../redux/config' import { WelcomedModal } from './WelcomeModal' import { RunData } from '@opentrons/api-client' +import { ServerInitializing } from '../../../organisms/OnDeviceDisplay/RobotDashboard/ServerInitializing' export const MAXIMUM_RECENT_RUN_PROTOCOLS = 8 export function RobotDashboard(): JSX.Element { const { t } = useTranslation('device_details') - const allRuns = useAllRunsQuery().data?.data ?? [] + const { data: allRunsQueryData, error: allRunsQueryError } = useAllRunsQuery() const { unfinishedUnboxingFlowRoute } = useSelector( getOnDeviceDisplaySettings @@ -35,7 +36,7 @@ export function RobotDashboard(): JSX.Element { unfinishedUnboxingFlowRoute !== null ) - const recentRunsOfUniqueProtocols = allRuns + const recentRunsOfUniqueProtocols = (allRunsQueryData?.data ?? []) .reverse() // newest runs first .reduce((acc, run) => { if ( @@ -48,6 +49,29 @@ export function RobotDashboard(): JSX.Element { }, []) .slice(0, MAXIMUM_RECENT_RUN_PROTOCOLS) + let contents: JSX.Element = + // GET runs query will error with 503 if database is initializing + // this should be momentary, and the type of error to come from this endpoint + // so, all errors will be mapped to an initializing spinner + if (allRunsQueryError != null) { + contents = + } else if (recentRunsOfUniqueProtocols.length > 0) { + contents = ( + <> + + {t('run_again')} + + + + ) + } + return ( @@ -59,22 +83,7 @@ export function RobotDashboard(): JSX.Element { {showWelcomeModal ? ( ) : null} - {recentRunsOfUniqueProtocols.length === 0 ? ( - - ) : ( - <> - - {t('run_again')} - - - - )} + {contents} ) diff --git a/app/src/pages/ProtocolDashboard/ProtocolCard.tsx b/app/src/pages/ProtocolDashboard/ProtocolCard.tsx index f69da042383..2da777c135b 100644 --- a/app/src/pages/ProtocolDashboard/ProtocolCard.tsx +++ b/app/src/pages/ProtocolDashboard/ProtocolCard.tsx @@ -19,7 +19,10 @@ import { TYPOGRAPHY, useLongPress, } from '@opentrons/components' -import { useHost, useProtocolAnalysesQuery } from '@opentrons/react-api-client' +import { + useHost, + useProtocolAnalysisAsDocumentQuery, +} from '@opentrons/react-api-client' import { deleteProtocol, deleteRun, getProtocol } from '@opentrons/api-client' import { StyledText } from '../../atoms/text' @@ -57,10 +60,15 @@ export function ProtocolCard(props: { const longpress = useLongPress() const queryClient = useQueryClient() const host = useHost() - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocol.id, { - staleTime: Infinity, - }) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null + + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( + protocol.id, + last(protocol.analysisSummaries)?.id ?? null, + { enabled: protocol != null } + ) + const isFailedAnalysis = (mostRecentAnalysis != null && 'result' in mostRecentAnalysis && diff --git a/app/src/pages/ProtocolDashboard/__tests__/ProtocolCard.test.tsx b/app/src/pages/ProtocolDashboard/__tests__/ProtocolCard.test.tsx index 7af543cf50f..2e52520c22a 100644 --- a/app/src/pages/ProtocolDashboard/__tests__/ProtocolCard.test.tsx +++ b/app/src/pages/ProtocolDashboard/__tests__/ProtocolCard.test.tsx @@ -2,14 +2,16 @@ import * as React from 'react' import { fireEvent } from '@testing-library/react' import { MemoryRouter } from 'react-router-dom' import { UseQueryResult } from 'react-query' -import { useProtocolAnalysesQuery } from '@opentrons/react-api-client' +import { useProtocolAnalysisAsDocumentQuery } from '@opentrons/react-api-client' import { renderWithProviders } from '@opentrons/components' import { i18n } from '../../../i18n' import { ProtocolCard } from '../ProtocolCard' -import type { ProtocolResource } from '@opentrons/shared-data' -import type { ProtocolAnalyses } from '@opentrons/api-client' +import type { + CompletedProtocolAnalysis, + ProtocolResource, +} from '@opentrons/shared-data' const mockPush = jest.fn() @@ -22,8 +24,8 @@ jest.mock('react-router-dom', () => { }) jest.mock('@opentrons/react-api-client') -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > const mockProtocol: ProtocolResource = { @@ -65,9 +67,9 @@ describe('ProtocolCard', () => { jest.useFakeTimers() beforeEach(() => { - mockUseProtocolAnalysesQuery.mockReturnValue({ - data: { data: [{ result: 'ok' }] } as any, - } as UseQueryResult) + mockUseProtocolAnalysisAsDocumentQuery.mockReturnValue({ + data: { result: 'ok' } as any, + } as UseQueryResult) }) it('should redirect to protocol details after short click', () => { const [{ getByText }] = render() @@ -77,9 +79,9 @@ describe('ProtocolCard', () => { }) it('should display the analysis failed error modal when clicking on the protocol', () => { - mockUseProtocolAnalysesQuery.mockReturnValue({ - data: { data: [{ result: 'error' }] } as any, - } as UseQueryResult) + mockUseProtocolAnalysisAsDocumentQuery.mockReturnValue({ + data: { result: 'error' } as any, + } as UseQueryResult) const [{ getByText, getByLabelText, queryByText }] = render() getByLabelText('failedAnalysis_icon') getByText('Failed analysis') @@ -105,9 +107,9 @@ describe('ProtocolCard', () => { }) it('should display the analysis failed error modal when clicking on the protocol when doing a long pressing', async () => { - mockUseProtocolAnalysesQuery.mockReturnValue({ - data: { data: [{ result: 'error' }] } as any, - } as UseQueryResult) + mockUseProtocolAnalysisAsDocumentQuery.mockReturnValue({ + data: { result: 'error' } as any, + } as UseQueryResult) const [{ getByText, getByLabelText }] = render() const name = getByText('yay mock protocol') fireEvent.mouseDown(name) diff --git a/app/src/pages/Protocols/hooks/__tests__/hooks.test.tsx b/app/src/pages/Protocols/hooks/__tests__/hooks.test.tsx index c11ef452557..f0cbf0aa9dd 100644 --- a/app/src/pages/Protocols/hooks/__tests__/hooks.test.tsx +++ b/app/src/pages/Protocols/hooks/__tests__/hooks.test.tsx @@ -4,24 +4,31 @@ import { renderHook } from '@testing-library/react-hooks' import { when, resetAllWhenMocks } from 'jest-when' import { - useProtocolAnalysesQuery, + useProtocolQuery, useInstrumentsQuery, useModulesQuery, + useProtocolAnalysisAsDocumentQuery, } from '@opentrons/react-api-client' import { mockHeaterShaker } from '../../../../redux/modules/__fixtures__' import { useRequiredProtocolLabware, useMissingProtocolHardware } from '..' import fixture_tiprack_300_ul from '@opentrons/shared-data/labware/fixtures/2/fixture_tiprack_300_ul.json' -import type { ProtocolAnalyses } from '@opentrons/api-client' -import type { LabwareDefinition2 } from '@opentrons/shared-data' +import type { Protocol } from '@opentrons/api-client' +import type { + CompletedProtocolAnalysis, + LabwareDefinition2, +} from '@opentrons/shared-data' jest.mock('@opentrons/react-api-client') jest.mock('../../../../organisms/Devices/hooks') const PROTOCOL_ID = 'fake_protocol_id' -const mockUseProtocolAnalysesQuery = useProtocolAnalysesQuery as jest.MockedFunction< - typeof useProtocolAnalysesQuery +const mockUseProtocolQuery = useProtocolQuery as jest.MockedFunction< + typeof useProtocolQuery +> +const mockUseProtocolAnalysisAsDocumentQuery = useProtocolAnalysisAsDocumentQuery as jest.MockedFunction< + typeof useProtocolAnalysisAsDocumentQuery > const mockUseModulesQuery = useModulesQuery as jest.MockedFunction< typeof useModulesQuery @@ -87,16 +94,29 @@ const NULL_COMMAND = { } const NULL_PROTOCOL_ANALYSIS = { ...PROTOCOL_ANALYSIS, + id: 'null_analysis', commands: [NULL_COMMAND], } as any describe('useRequiredProtocolLabware', () => { beforeEach(() => { - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(PROTOCOL_ID) + .mockReturnValue({ + data: { + data: { analysisSummaries: [{ id: PROTOCOL_ANALYSIS.id } as any] }, + }, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, PROTOCOL_ANALYSIS.id, { enabled: true }) + .mockReturnValue({ + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) + when(mockUseProtocolAnalysisAsDocumentQuery) + .calledWith(PROTOCOL_ID, NULL_PROTOCOL_ANALYSIS.id, { enabled: true }) .mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) + data: NULL_PROTOCOL_ANALYSIS, + } as UseQueryResult) }) afterEach(() => { @@ -114,11 +134,15 @@ describe('useRequiredProtocolLabware', () => { }) it('should return empty array when there is no match with protocol id', () => { - when(mockUseProtocolAnalysesQuery) - .calledWith(PROTOCOL_ID, { staleTime: Infinity }) + when(mockUseProtocolQuery) + .calledWith(PROTOCOL_ID) .mockReturnValue({ - data: { data: [NULL_PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) + data: { + data: { + analysisSummaries: [{ id: NULL_PROTOCOL_ANALYSIS.id } as any], + }, + }, + } as UseQueryResult) const { result } = renderHook(() => useRequiredProtocolLabware(PROTOCOL_ID)) expect(result.current.length).toBe(0) }) @@ -135,9 +159,14 @@ describe('useMissingProtocolHardware', () => { data: { data: [] }, isLoading: false, } as any) - mockUseProtocolAnalysesQuery.mockReturnValue({ - data: { data: [PROTOCOL_ANALYSIS as any] }, - } as UseQueryResult) + mockUseProtocolQuery.mockReturnValue({ + data: { + data: { analysisSummaries: [{ id: PROTOCOL_ANALYSIS.id } as any] }, + }, + } as UseQueryResult) + mockUseProtocolAnalysisAsDocumentQuery.mockReturnValue({ + data: PROTOCOL_ANALYSIS, + } as UseQueryResult) }) afterEach(() => { diff --git a/app/src/pages/Protocols/hooks/index.ts b/app/src/pages/Protocols/hooks/index.ts index 24fc92c1548..96a5164ed19 100644 --- a/app/src/pages/Protocols/hooks/index.ts +++ b/app/src/pages/Protocols/hooks/index.ts @@ -2,7 +2,8 @@ import last from 'lodash/last' import { useInstrumentsQuery, useModulesQuery, - useProtocolAnalysesQuery, + useProtocolAnalysisAsDocumentQuery, + useProtocolQuery, } from '@opentrons/react-api-client' import { getLabwareSetupItemGroups } from '../utils' @@ -50,10 +51,12 @@ export type ProtocolHardware = export const useRequiredProtocolHardware = ( protocolId: string ): { requiredProtocolHardware: ProtocolHardware[]; isLoading: boolean } => { - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId, { - staleTime: Infinity, - }) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null + const { data: protocolData } = useProtocolQuery(protocolId) + const { data: analysis } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } + ) const { data: attachedModulesData, @@ -67,16 +70,11 @@ export const useRequiredProtocolHardware = ( } = useInstrumentsQuery() const attachedInstruments = attachedInstrumentsData?.data ?? [] - if ( - mostRecentAnalysis == null || - mostRecentAnalysis?.status !== 'completed' - ) { + if (analysis == null || analysis?.status !== 'completed') { return { requiredProtocolHardware: [], isLoading: true } } - const requiredGripper: ProtocolGripper[] = getProtocolUsesGripper( - mostRecentAnalysis - ) + const requiredGripper: ProtocolGripper[] = getProtocolUsesGripper(analysis) ? [ { hardwareType: 'gripper', @@ -87,7 +85,7 @@ export const useRequiredProtocolHardware = ( ] : [] - const requiredModules: ProtocolModule[] = mostRecentAnalysis.modules.map( + const requiredModules: ProtocolModule[] = analysis.modules.map( ({ location, model }) => { return { hardwareType: 'module', @@ -99,7 +97,7 @@ export const useRequiredProtocolHardware = ( } ) - const requiredPipettes: ProtocolPipette[] = mostRecentAnalysis.pipettes.map( + const requiredPipettes: ProtocolPipette[] = analysis.pipettes.map( ({ mount, pipetteName }) => ({ hardwareType: 'pipette', pipetteName: pipetteName, @@ -134,10 +132,14 @@ export const useRequiredProtocolHardware = ( export const useRequiredProtocolLabware = ( protocolId: string ): LabwareSetupItem[] => { - const { data: protocolAnalyses } = useProtocolAnalysesQuery(protocolId, { - staleTime: Infinity, - }) - const mostRecentAnalysis = last(protocolAnalyses?.data ?? []) ?? null + const { data: protocolData } = useProtocolQuery(protocolId) + const { + data: mostRecentAnalysis, + } = useProtocolAnalysisAsDocumentQuery( + protocolId, + last(protocolData?.data.analysisSummaries)?.id ?? null, + { enabled: protocolData != null } + ) const commands = (mostRecentAnalysis as CompletedProtocolAnalysis)?.commands ?? [] const { onDeckItems, offDeckItems } = getLabwareSetupItemGroups(commands) diff --git a/react-api-client/src/protocols/index.ts b/react-api-client/src/protocols/index.ts index 13a4b94a9b0..ddf7c3eeaac 100644 --- a/react-api-client/src/protocols/index.ts +++ b/react-api-client/src/protocols/index.ts @@ -2,5 +2,6 @@ export { useAllProtocolsQuery } from './useAllProtocolsQuery' export { useAllProtocolIdsQuery } from './useAllProtocolIdsQuery' export { useProtocolQuery } from './useProtocolQuery' export { useProtocolAnalysesQuery } from './useProtocolAnalysesQuery' +export { useProtocolAnalysisAsDocumentQuery } from './useProtocolAnalysisAsDocumentQuery' export { useCreateProtocolMutation } from './useCreateProtocolMutation' export { useDeleteProtocolMutation } from './useDeleteProtocolMutation' diff --git a/react-api-client/src/protocols/useProtocolAnalysisAsDocumentQuery.ts b/react-api-client/src/protocols/useProtocolAnalysisAsDocumentQuery.ts new file mode 100644 index 00000000000..6cfebeb2c47 --- /dev/null +++ b/react-api-client/src/protocols/useProtocolAnalysisAsDocumentQuery.ts @@ -0,0 +1,26 @@ +import { UseQueryResult, useQuery } from 'react-query' +import { getProtocolAnalysisAsDocument } from '@opentrons/api-client' +import { useHost } from '../api' +import type { HostConfig } from '@opentrons/api-client' +import type { UseQueryOptions } from 'react-query' +import { CompletedProtocolAnalysis } from '@opentrons/shared-data' + +export function useProtocolAnalysisAsDocumentQuery( + protocolId: string | null, + analysisId: string | null, + options?: UseQueryOptions +): UseQueryResult { + const host = useHost() + const query = useQuery( + [host, 'protocols', protocolId, 'analyses', analysisId], + () => + getProtocolAnalysisAsDocument( + host as HostConfig, + protocolId as string, + analysisId as string + ).then(response => response.data), + options + ) + + return query +} diff --git a/robot-server/robot_server/hardware.py b/robot-server/robot_server/hardware.py index 4308df9f354..faaf63ce959 100644 --- a/robot-server/robot_server/hardware.py +++ b/robot-server/robot_server/hardware.py @@ -500,6 +500,12 @@ async def _initialize_hardware_api( if callback[1]: await callback[0](app_state, hardware.wrapped()) + # This ties systemd notification to hardware initialization. We might want to move + # systemd notification so it also waits for DB migration+initialization. + # If we do that, we need to be careful: + # - systemd timeouts might need to be increased to allow for DB migration time + # - There might be UI implications for loading states on the Flex's on-device display, + # because it polls for the server's systemd status. _systemd_notify(systemd_available) if should_use_ot3(): diff --git a/robot-server/robot_server/persistence/_migrations.py b/robot-server/robot_server/persistence/_migrations.py index 8329cb4ba37..295f86ad1ee 100644 --- a/robot-server/robot_server/persistence/_migrations.py +++ b/robot-server/robot_server/persistence/_migrations.py @@ -18,11 +18,14 @@ - Version 0 - Initial schema version + - Version 1 - - `run_table.state_summary` column added - - `run_table.commands` column added - - `run_table.engine_status` column added - - `run_table._updated_at` column added + This migration adds the following nullable columns to the run table: + + - Column("state_summary, sqlalchemy.PickleType, nullable=True) + - Column("commands", sqlalchemy.PickleType, nullable=True) + - Column("engine_status", sqlalchemy.String, nullable=True) + - Column("_updated_at", sqlalchemy.DateTime, nullable=True) """ import logging from datetime import datetime, timezone @@ -31,9 +34,11 @@ import sqlalchemy -from ._tables import migration_table, run_table +from ._tables import analysis_table, migration_table, run_table +from . import legacy_pickle + -_LATEST_SCHEMA_VERSION: Final = 1 +_LATEST_SCHEMA_VERSION: Final = 2 _log = logging.getLogger(__name__) @@ -48,26 +53,52 @@ def migrate(sql_engine: sqlalchemy.engine.Engine) -> None: NOTE: added columns should be nullable. """ with sql_engine.begin() as transaction: - version = _get_schema_version(transaction) + starting_version = _get_schema_version(transaction) - if version is not None: - if version < 1: - _migrate_0_to_1(transaction) + if starting_version is None: + _log.info( + f"Marking fresh database as schema version {_LATEST_SCHEMA_VERSION}." + ) + _stamp_schema_version(transaction) + elif starting_version == _LATEST_SCHEMA_VERSION: _log.info( - f"Migrated database from schema {version}" - f" to version {_LATEST_SCHEMA_VERSION}" + f"Database has schema version {_LATEST_SCHEMA_VERSION}." + " no migrations needed." ) + else: _log.info( - f"Marking fresh database as schema version {_LATEST_SCHEMA_VERSION}" + f"Database has schema version {starting_version}." + f" Migrating to {_LATEST_SCHEMA_VERSION}..." ) - if version != _LATEST_SCHEMA_VERSION: - _insert_migration(transaction) - - -def _insert_migration(transaction: sqlalchemy.engine.Connection) -> None: + if starting_version < 1: + _log.info("Migrating database schema from 0 to 1...") + _migrate_schema_0_to_1(transaction) + if starting_version < 2: + _log.info("Migrating database schema from 1 to 2...") + _migrate_schema_1_to_2(transaction) + + _log.info("Database schema migrations complete.") + _stamp_schema_version(transaction) + + # We migrate data 1->2 unconditionally, even when we haven't just performed a 1->2 schema + # migration. This is to solve the following edge case: + # + # 1) Start on schema 1. + # 2) Update robot software, triggering a migration to schema 2. + # 3) Roll back to older robot software that doesn't understand schema 2. + # Now we've got old software working in a schema 2 database, causing rows to be added + # where schema 2's `completed_analysis_as_document` column is NULL. + # 4) Update robot software again. This won't trigger a schema migration, because the + # database was already migrated to schema 2 once. But we want to get rid of those NULL + # `completed_analysis_as_document` values. + _migrate_data_1_to_2(transaction) + + +def _stamp_schema_version(transaction: sqlalchemy.engine.Connection) -> None: + """Mark the database as having the latest schema version.""" transaction.execute( sqlalchemy.insert(migration_table).values( created_at=datetime.now(tz=timezone.utc), @@ -77,7 +108,7 @@ def _insert_migration(transaction: sqlalchemy.engine.Connection) -> None: def _get_schema_version(transaction: sqlalchemy.engine.Connection) -> Optional[int]: - """Get the starting version of the database. + """Get the current schema version of the given database. Returns: The version found, or None if this is a fresh database that @@ -86,6 +117,12 @@ def _get_schema_version(transaction: sqlalchemy.engine.Connection) -> Optional[i if _is_version_0(transaction): return 0 + # It's important that this takes the highest schema version that we've seen, + # not the most recent one to be added. If you downgrade robot software across + # a schema boundary, the old software will leave the database at its newer schema, + # but stamp it as having "migrated" to the old one. We need to see it as having the newer + # schema, to avoid incorrectly doing a redundant migration when the software is upgraded + # again later. select_latest_version = sqlalchemy.select(migration_table).order_by( sqlalchemy.desc(migration_table.c.version) ) @@ -111,16 +148,8 @@ def _is_version_0(transaction: sqlalchemy.engine.Connection) -> bool: return True -def _migrate_0_to_1(transaction: sqlalchemy.engine.Connection) -> None: - """Migrate to schema version 1. - - This migration adds the following nullable columns to the run table: - - - Column("state_summary, sqlalchemy.PickleType, nullable=True) - - Column("commands", sqlalchemy.PickleType, nullable=True) - - Column("engine_status", sqlalchemy.String, nullable=True) - - Column("_updated_at", sqlalchemy.DateTime, nullable=True) - """ +def _migrate_schema_0_to_1(transaction: sqlalchemy.engine.Connection) -> None: + """Migrate the database from schema 0 to schema 1.""" add_summary_column = sqlalchemy.text("ALTER TABLE run ADD state_summary BLOB") add_commands_column = sqlalchemy.text("ALTER TABLE run ADD commands BLOB") # NOTE: The column type of `STRING` here is mistaken. SQLite won't recognize it, @@ -135,3 +164,66 @@ def _migrate_0_to_1(transaction: sqlalchemy.engine.Connection) -> None: transaction.execute(add_commands_column) transaction.execute(add_status_column) transaction.execute(add_updated_at_column) + + +def _migrate_schema_1_to_2(transaction: sqlalchemy.engine.Connection) -> None: + """Migrate the database from schema 1 to schema 2.""" + add_completed_analysis_as_document_column = sqlalchemy.text( + "ALTER TABLE analysis ADD completed_analysis_as_document VARCHAR" + ) + transaction.execute(add_completed_analysis_as_document_column) + + +def _migrate_data_1_to_2(transaction: sqlalchemy.engine.Connection) -> None: + """Migrate the data that the database contains to take advantage of schema 2. + + Find rows where the `completed_analysis_as_document` column, introduced in schema 2, + is NULL. Populate them with values computed from schema 1's `completed_analysis` column. + + The database is expected to already be at schema 2. This is safe to run again on a database + whose data has already been migrated by this function. + """ + # Local import to work around a circular dependency: + # 1) This module is part of robot_server.persistence + # 2) We're trying to import something from robot_server.protocols + # 3) ...which re-exports stuff from robot_server.protocols.protocol_store + # 4) ...which depends on robot_server.persistence + from robot_server.protocols.analysis_models import CompletedAnalysis + + rows_needing_migration = transaction.execute( + sqlalchemy.select( + analysis_table.c.id, analysis_table.c.completed_analysis + ).where(analysis_table.c.completed_analysis_as_document.is_(None)) + ).all() + + if rows_needing_migration: + _log.info( + f"Migrating {len(rows_needing_migration)} analysis documents." + f" This may take a while..." + ) + + for index, row in enumerate(rows_needing_migration): + _log.info( + f"Migrating analysis {index+1}/{len(rows_needing_migration)}, {row.id}..." + ) + + v1_completed_analysis = CompletedAnalysis.parse_obj( + legacy_pickle.loads(row.completed_analysis) + ) + + v2_completed_analysis_as_document = v1_completed_analysis.json( + # by_alias and exclude_none should match how + # FastAPI + Pydantic + our customizations serialize these objects + # over the `GET /protocols/:id/analyses/:id` endpoint. + by_alias=True, + exclude_none=True, + ) + + transaction.execute( + sqlalchemy.update(analysis_table) + .where(analysis_table.c.id == row.id) + .values(completed_analysis_as_document=v2_completed_analysis_as_document) + ) + + if rows_needing_migration: + _log.info("Done migrating analysis documents.") diff --git a/robot-server/robot_server/persistence/_tables.py b/robot-server/robot_server/persistence/_tables.py index edb10200588..ab30c512e05 100644 --- a/robot-server/robot_server/persistence/_tables.py +++ b/robot-server/robot_server/persistence/_tables.py @@ -56,9 +56,19 @@ ), sqlalchemy.Column( "completed_analysis", + # Stores a pickled dict. See CompletedAnalysisStore. + # TODO(mm, 2023-08-30): Remove this. See https://opentrons.atlassian.net/browse/RSS-98. sqlalchemy.LargeBinary, nullable=False, ), + sqlalchemy.Column( + "completed_analysis_as_document", + # Stores the same data as completed_analysis, but serialized as a JSON string. + sqlalchemy.String, + # This column should never be NULL in practice. + # It needs to be nullable=True because of limitations in SQLite and our migration code. + nullable=True, + ), ) diff --git a/robot-server/robot_server/protocols/analysis_store.py b/robot-server/robot_server/protocols/analysis_store.py index 4bb9d53846a..29ddb0d82ec 100644 --- a/robot-server/robot_server/protocols/analysis_store.py +++ b/robot-server/robot_server/protocols/analysis_store.py @@ -179,7 +179,7 @@ async def get(self, analysis_id: str) -> ProtocolAnalysis: """Get a single protocol analysis by its ID. Raises: - AnalysisNotFoundError + AnalysisNotFoundError: If there is no analysis with the given ID. """ pending_analysis = self._pending_store.get(analysis_id=analysis_id) completed_analysis_resource = await self._completed_store.get_by_id( @@ -193,6 +193,21 @@ async def get(self, analysis_id: str) -> ProtocolAnalysis: else: raise AnalysisNotFoundError(analysis_id=analysis_id) + async def get_as_document(self, analysis_id: str) -> str: + """Get a single completed protocol analysis by its ID, as a pre-serialized JSON document. + + Raises: + AnalysisNotFoundError: If there is no completed analysis with the given ID. + Unlike `get()`, this is raised if the analysis exists, but is pending. + """ + completed_analysis_document = await self._completed_store.get_by_id_as_document( + analysis_id=analysis_id + ) + if completed_analysis_document is not None: + return completed_analysis_document + else: + raise AnalysisNotFoundError(analysis_id=analysis_id) + def get_summaries_by_protocol(self, protocol_id: str) -> List[AnalysisSummary]: """Get summaries of all analyses for a protocol, in order from oldest first. diff --git a/robot-server/robot_server/protocols/completed_analysis_store.py b/robot-server/robot_server/protocols/completed_analysis_store.py index f6db14d0057..77f4eea682f 100644 --- a/robot-server/robot_server/protocols/completed_analysis_store.py +++ b/robot-server/robot_server/protocols/completed_analysis_store.py @@ -2,7 +2,7 @@ from __future__ import annotations import asyncio -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Tuple from logging import getLogger from dataclasses import dataclass @@ -42,10 +42,16 @@ async def to_sql_values(self) -> Dict[str, object]: Avoid calling this from inside a SQL transaction, since it might be slow. """ - def serialize_completed_analysis() -> bytes: - return legacy_pickle.dumps(self.completed_analysis.dict()) + def serialize_completed_analysis() -> Tuple[bytes, str]: + serialized_pickle = _serialize_completed_analysis_to_pickle( + self.completed_analysis + ) + serialized_json = _serialize_completed_analysis_to_json( + self.completed_analysis + ) + return serialized_pickle, serialized_json - serialized_completed_analysis = await anyio.to_thread.run_sync( + serialized_pickle, serialized_json = await anyio.to_thread.run_sync( serialize_completed_analysis, # Cancellation may orphan the worker thread, # but that should be harmless in this case. @@ -56,7 +62,8 @@ def serialize_completed_analysis() -> bytes: "id": self.id, "protocol_id": self.protocol_id, "analyzer_version": self.analyzer_version, - "completed_analysis": serialized_completed_analysis, + "completed_analysis": serialized_pickle, + "completed_analysis_as_document": serialized_json, } @classmethod @@ -167,6 +174,29 @@ async def get_by_id(self, analysis_id: str) -> Optional[CompletedAnalysisResourc return resource + async def get_by_id_as_document(self, analysis_id: str) -> Optional[str]: + """Return the analysis with the given ID, if it exists. + + This is like `get_by_id()`, except it returns the analysis as a pre-serialized JSON + document. + """ + statement = sqlalchemy.select( + analysis_table.c.completed_analysis_as_document + ).where(analysis_table.c.id == analysis_id) + + with self._sql_engine.begin() as transaction: + try: + document: Optional[str] = transaction.execute(statement).scalar_one() + except sqlalchemy.exc.NoResultFound: + # No analysis with this ID. + return None + + # Although the completed_analysis_as_document column is nullable, + # our migration code is supposed to ensure that it's never NULL in practice. + assert document is not None + + return document + async def get_by_protocol( self, protocol_id: str ) -> List[CompletedAnalysisResource]: @@ -254,3 +284,19 @@ async def add(self, completed_analysis_resource: CompletedAnalysisResource) -> N self._memcache.insert( completed_analysis_resource.id, completed_analysis_resource ) + + +def _serialize_completed_analysis_to_pickle( + completed_analysis: CompletedAnalysis, +) -> bytes: + return legacy_pickle.dumps(completed_analysis.dict()) + + +def _serialize_completed_analysis_to_json(completed_analysis: CompletedAnalysis) -> str: + return completed_analysis.json( + # by_alias and exclude_none should match how + # FastAPI + Pydantic + our customizations serialize these objects + # over the `GET /protocols/:id/analyses/:id` endpoint. + by_alias=True, + exclude_none=True, + ) diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index 6bad69afb33..1ce0337469b 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -3,11 +3,12 @@ from textwrap import dedent from datetime import datetime from pathlib import Path +from typing import List, Optional, Union +from typing_extensions import Literal from fastapi import APIRouter, Depends, File, UploadFile, status, Form +from fastapi.responses import PlainTextResponse from pydantic import BaseModel, Field -from typing import List, Optional, Union -from typing_extensions import Literal from opentrons.protocol_reader import ( ProtocolReader, @@ -307,7 +308,7 @@ async def get_protocols( @protocols_router.get( path="/protocols/ids", - summary="[Internal] Get uploaded protocol ids", + summary="[Internal] Get uploaded protocol IDs", description=( "Get the IDs of all protocols stored on the server." "\n\n" @@ -501,3 +502,58 @@ async def get_protocol_analysis_by_id( ) from error return await PydanticResponse.create(content=SimpleBody.construct(data=analysis)) + + +@protocols_router.get( + path="/protocols/{protocolId}/analyses/{analysisId}/asDocument", + summary="[Experimental] Get one of a protocol's analyses as a raw document", + description=( + "**Warning:** This endpoint is experimental. We may change or remove it without warning." + "\n\n" + "This is a faster alternative to `GET /protocols/{protocolId}/analyses`" + " and `GET /protocols/{protocolId}/analyses/{analysisId}`." + " For large analyses (10k+ commands), those other endpoints can take minutes to respond," + " whereas this one should only take a few seconds." + "\n\n" + "For a completed analysis, this returns the same JSON data as the" + " `GET /protocols/:id/analyses/:id` endpoint, except that it's not wrapped in a top-level" + " `data` object." + "\n\n" + "For a *pending* analysis, this returns a 404 response, unlike those other" + ' endpoints, which return a 200 response with `"status": "pending"`.' + ), + responses={ + status.HTTP_404_NOT_FOUND: { + "model": ErrorBody[Union[ProtocolNotFound, AnalysisNotFound]] + }, + }, +) +async def get_protocol_analysis_as_document( + protocolId: str, + analysisId: str, + protocol_store: ProtocolStore = Depends(get_protocol_store), + analysis_store: AnalysisStore = Depends(get_analysis_store), +) -> PlainTextResponse: + """Get a protocol analysis by analysis ID. + + Arguments: + protocolId: The ID of the protocol, pulled from the URL. + analysisId: The ID of the analysis, pulled from the URL. + protocol_store: Protocol resource storage. + analysis_store: Analysis resource storage. + """ + if not protocol_store.has(protocolId): + raise ProtocolNotFound(detail=f"Protocol {protocolId} not found").as_error( + status.HTTP_404_NOT_FOUND + ) + + try: + # TODO(mm, 2022-04-28): This will erroneously return an analysis even if + # this analysis isn't owned by this protocol. This should be an error. + analysis = await analysis_store.get_as_document(analysisId) + except AnalysisNotFoundError as error: + raise AnalysisNotFound(detail=str(error)).as_error( + status.HTTP_404_NOT_FOUND + ) from error + + return PlainTextResponse(content=analysis, media_type="application/json") diff --git a/robot-server/tests/integration/http_api/persistence/test_compatibility.py b/robot-server/tests/integration/http_api/persistence/test_compatibility.py index ebdf26e9037..6df9a36602f 100644 --- a/robot-server/tests/integration/http_api/persistence/test_compatibility.py +++ b/robot-server/tests/integration/http_api/persistence/test_compatibility.py @@ -13,6 +13,8 @@ from .persistence_snapshots_dir import PERSISTENCE_SNAPSHOTS_DIR +# Allow plenty of time for database migrations, which can take a while in our CI runners. +_STARTUP_TIMEOUT = 60 _POLL_INTERVAL = 0.1 _RUN_TIMEOUT = 5 @@ -93,8 +95,8 @@ async def test_protocols_analyses_and_runs_available_from_older_persistence_dir( ), "Dev Robot is running and must not be." with DevServer(port=_PORT, persistence_directory=snapshot.get_copy()) as server: server.start() - assert ( - await robot_client.wait_until_alive() + assert await robot_client.wait_until_alive( + _STARTUP_TIMEOUT ), "Dev Robot never became available." all_protocols = (await robot_client.get_protocols()).json()["data"] @@ -121,6 +123,12 @@ async def test_protocols_analyses_and_runs_available_from_older_persistence_dir( == analysis_ids_from_all_analyses_endpoint ) + for analysis_id in analysis_ids_from_all_protocols_endpoint: + # Make sure this doesn't 404. + await robot_client.get_analysis_as_document( + protocol_id=protocol_id, analysis_id=analysis_id + ) + number_of_analyses = len(analysis_ids_from_all_protocols_endpoint) if protocol_id in snapshot.protocols_with_no_analyses: assert number_of_analyses == 0 @@ -167,8 +175,9 @@ async def test_rerun_flex_dev_compat() -> None: ), "Dev Robot is running but it should not be." with DevServer(persistence_directory=snapshot.get_copy(), port=_PORT) as server: server.start() - await client.wait_until_alive() - assert await client.wait_until_alive(), "Dev Robot never became available." + assert await client.wait_until_alive( + _STARTUP_TIMEOUT + ), "Dev Robot never became available." [protocol] = (await client.get_protocols()).json()["data"] new_run = ( diff --git a/robot-server/tests/integration/http_api/protocols/test_analyses.tavern.yaml b/robot-server/tests/integration/http_api/protocols/test_analyses.tavern.yaml new file mode 100644 index 00000000000..a756ea10e1b --- /dev/null +++ b/robot-server/tests/integration/http_api/protocols/test_analyses.tavern.yaml @@ -0,0 +1,86 @@ +test_name: Test the protocol analysis endpoints + +marks: + - usefixtures: + - ot2_server_base_url + +stages: + - name: Upload a protocol + request: + url: '{ot2_server_base_url}/protocols' + method: POST + files: + files: 'tests/integration/protocols/basic_transfer_standalone.py' + response: + save: + json: + protocol_id: data.id + analysis_id: data.analysisSummaries[0].id + strict: + - json:off + status_code: 201 + json: + data: + analyses: [] + analysisSummaries: + - id: !anystr + status: pending + + - name: Check that the analysis summary is present in /protocols/:id; retry until it says it's completed + max_retries: 5 + delay_after: 1 + request: + url: '{ot2_server_base_url}/protocols/{protocol_id}' + response: + status_code: 200 + json: + data: + analyses: [] + analysisSummaries: + - id: '{analysis_id}' + status: completed + id: !anything + protocolType: !anything + files: !anything + createdAt: !anything + robotType: !anything + metadata: !anything + links: !anything + + - name: Check that the analysis data is present in /protocols/:id/analyses/:id + request: + url: '{ot2_server_base_url}/protocols/{protocol_id}/analyses/{analysis_id}' + response: + save: + json: + analysis_data: data + strict: + - json:off + json: + data: + id: '{analysis_id}' + commands: + # Check for this command's presence as a smoke test that the analysis isn't empty. + - commandType: loadPipette + + + - name: Check that the analysis data is present in /protocols/:id/analyses + request: + url: '{ot2_server_base_url}/protocols/{protocol_id}/analyses' + response: + json: + data: + - !force_format_include '{analysis_data}' + meta: + cursor: 0 + totalLength: 1 + + - name: Check that the analysis data is present in /protocols/:id/analyses/:id/asDocument + request: + url: '{ot2_server_base_url}/protocols/{protocol_id}/analyses/{analysis_id}/asDocument' + response: + headers: + # This endpoint's steps outside our usual FastAPI implementation. + # We need to make sure we get the Content-Type right because FastAPI won't do it for us. + Content-Type: application/json + json: !force_format_include '{analysis_data}' diff --git a/robot-server/tests/integration/http_api/protocols/test_persistence.py b/robot-server/tests/integration/http_api/protocols/test_persistence.py index c5e375ba52f..2c6c62bf930 100644 --- a/robot-server/tests/integration/http_api/protocols/test_persistence.py +++ b/robot-server/tests/integration/http_api/protocols/test_persistence.py @@ -167,7 +167,13 @@ async def _get_all_analyses(robot_client: RobotClient) -> Dict[str, List[object] protocol_id=protocol_id, analysis_id=analysis_id, ) - analyses_on_this_protocol.append(analysis_response.json()["data"]) + analysis_as_document_response = await robot_client.get_analysis_as_document( + protocol_id=protocol_id, + analysis_id=analysis_id, + ) + analyses_on_this_protocol.append( + (analysis_response.json()["data"], analysis_as_document_response.json()) + ) analyses_by_protocol_id[protocol_id] = analyses_on_this_protocol diff --git a/robot-server/tests/integration/robot_client.py b/robot-server/tests/integration/robot_client.py index 23c13bca7c3..32cb87f4201 100644 --- a/robot-server/tests/integration/robot_client.py +++ b/robot-server/tests/integration/robot_client.py @@ -261,13 +261,23 @@ async def get_analyses(self, protocol_id: str) -> Response: return response async def get_analysis(self, protocol_id: str, analysis_id: str) -> Response: - """GET /protocols/{protocol_id}/{analysis_id}.""" + """GET /protocols/{protocol_id}/analyses/{analysis_id}.""" response = await self.httpx_client.get( url=f"{self.base_url}/protocols/{protocol_id}/analyses/{analysis_id}" ) response.raise_for_status() return response + async def get_analysis_as_document( + self, protocol_id: str, analysis_id: str + ) -> Response: + """GET /protocols/{protocol_id}/analyses/{analysis_id}/asDocument.""" + response = await self.httpx_client.get( + url=f"{self.base_url}/protocols/{protocol_id}/analyses/{analysis_id}/asDocument" + ) + response.raise_for_status() + return response + async def delete_run(self, run_id: str) -> Response: """DELETE /runs/{run_id}.""" response = await self.httpx_client.delete(f"{self.base_url}/runs/{run_id}") diff --git a/robot-server/tests/persistence/test_migrations.py b/robot-server/tests/persistence/test_migrations.py deleted file mode 100644 index 3ae8a6eb3a5..00000000000 --- a/robot-server/tests/persistence/test_migrations.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Test SQL database migrations.""" -from pathlib import Path -from typing import Generator - -import pytest -import sqlalchemy -from pytest_lazyfixture import lazy_fixture # type: ignore[import] - -from robot_server.persistence import create_sql_engine -from robot_server.persistence import ( - migration_table, - run_table, - action_table, - protocol_table, - analysis_table, -) - - -TABLES = [run_table, action_table, protocol_table, analysis_table] - - -@pytest.fixture -def database_v0(tmp_path: Path) -> Path: - """Create a database matching schema version 0.""" - db_path = tmp_path / "migration-test-v0.db" - sql_engine = create_sql_engine(db_path) - sql_engine.execute("DROP TABLE migration") - sql_engine.execute("DROP TABLE run") - sql_engine.execute( - """ - CREATE TABLE run ( - id VARCHAR NOT NULL, - created_at DATETIME NOT NULL, - protocol_id VARCHAR, - PRIMARY KEY (id), - FOREIGN KEY(protocol_id) REFERENCES protocol (id) - ) - """ - ) - sql_engine.dispose() - return db_path - - -@pytest.fixture -def database_v1(tmp_path: Path) -> Path: - """Create a database matching schema version 1.""" - db_path = tmp_path / "migration-test-v1.db" - sql_engine = create_sql_engine(db_path) - sql_engine.dispose() - return db_path - - -@pytest.fixture -def subject(database_path: Path) -> Generator[sqlalchemy.engine.Engine, None, None]: - """Get a SQLEngine test subject. - - The tests in this suite will use this SQLEngine to test - that migrations happen properly. For other tests, the `sql_engine` - fixture in `conftest.py` should be used, instead. - """ - engine = create_sql_engine(database_path) - yield engine - engine.dispose() - - -@pytest.mark.parametrize( - "database_path", - [ - lazy_fixture("database_v0"), - lazy_fixture("database_v1"), - ], -) -def test_migration(subject: sqlalchemy.engine.Engine) -> None: - """It should migrate a table.""" - migrations = subject.execute(sqlalchemy.select(migration_table)).all() - - assert [m.version for m in migrations] == [1] - - # all table queries work without raising - for table in TABLES: - values = subject.execute(sqlalchemy.select(table)).all() - assert values == [] diff --git a/robot-server/tests/persistence/test_tables.py b/robot-server/tests/persistence/test_tables.py index 1f1ec177175..b50b06d5687 100644 --- a/robot-server/tests/persistence/test_tables.py +++ b/robot-server/tests/persistence/test_tables.py @@ -41,6 +41,7 @@ protocol_id VARCHAR NOT NULL, analyzer_version VARCHAR NOT NULL, completed_analysis BLOB NOT NULL, + completed_analysis_as_document VARCHAR, PRIMARY KEY (id), FOREIGN KEY(protocol_id) REFERENCES protocol (id) ) diff --git a/robot-server/tests/protocols/test_analysis_store.py b/robot-server/tests/protocols/test_analysis_store.py index be78b044a2d..058ec0497d9 100644 --- a/robot-server/tests/protocols/test_analysis_store.py +++ b/robot-server/tests/protocols/test_analysis_store.py @@ -1,10 +1,12 @@ """Tests for the AnalysisStore interface.""" -import pytest +import json from datetime import datetime, timezone from pathlib import Path from typing import List, NamedTuple +import pytest + from sqlalchemy.engine import Engine as SQLEngine from opentrons_shared_data.pipette.dev_types import PipetteNameType @@ -105,9 +107,13 @@ async def test_add_pending( result = subject.add_pending(protocol_id="protocol-id", analysis_id="analysis-id") assert result == expected_summary + assert await subject.get("analysis-id") == expected_analysis assert await subject.get_by_protocol("protocol-id") == [expected_analysis] assert subject.get_summaries_by_protocol("protocol-id") == [expected_summary] + with pytest.raises(AnalysisNotFoundError, match="analysis-id"): + # Unlike get(), get_as_document() should raise if the analysis is pending. + await subject.get_as_document("analysis-id") async def test_returned_in_order_added( @@ -178,6 +184,7 @@ async def test_update_adds_details_and_completes_analysis( ) result = await subject.get("analysis-id") + result_as_document = await subject.get_as_document("analysis-id") assert result == CompletedAnalysis( id="analysis-id", @@ -190,6 +197,26 @@ async def test_update_adds_details_and_completes_analysis( liquids=[], ) assert await subject.get_by_protocol("protocol-id") == [result] + assert json.loads(result_as_document) == { + "id": "analysis-id", + "result": "ok", + "status": "completed", + "labware": [ + { + "id": "labware-id", + "loadName": "load-name", + "definitionUri": "namespace/load-name/42", + "location": {"slotName": "1"}, + } + ], + "pipettes": [ + {"id": "pipette-id", "pipetteName": "p300_single", "mount": "left"} + ], + "commands": [], + "errors": [], + "liquids": [], + "modules": [], + } class AnalysisResultSpec(NamedTuple): diff --git a/robot-server/tests/protocols/test_completed_analysis_store.py b/robot-server/tests/protocols/test_completed_analysis_store.py index eb1a596974b..76a9d32adc6 100644 --- a/robot-server/tests/protocols/test_completed_analysis_store.py +++ b/robot-server/tests/protocols/test_completed_analysis_store.py @@ -1,4 +1,5 @@ """Test the CompletedAnalysisStore.""" +import json from datetime import datetime, timezone from pathlib import Path @@ -92,7 +93,6 @@ def _completed_analysis_resource( async def test_get_by_analysis_id_prefers_cache( subject: CompletedAnalysisStore, - sql_engine: Engine, memcache: MemoryCache[str, CompletedAnalysisResource], protocol_store: ProtocolStore, decoy: Decoy, @@ -106,9 +106,8 @@ async def test_get_by_analysis_id_prefers_cache( assert (await subject.get_by_id("analysis-id")) is resource -async def test_get_by_analysis_falls_back_to_sql( +async def test_get_by_analysis_id_falls_back_to_sql( subject: CompletedAnalysisStore, - sql_engine: Engine, memcache: MemoryCache[str, CompletedAnalysisResource], protocol_store: ProtocolStore, decoy: Decoy, @@ -124,9 +123,8 @@ async def test_get_by_analysis_falls_back_to_sql( assert analysis_from_sql == resource -async def test_get_by_analysis_id_caches_results( +async def test_get_by_analysis_id_stores_results_in_cache( subject: CompletedAnalysisStore, - sql_engine: Engine, memcache: MemoryCache[str, CompletedAnalysisResource], protocol_store: ProtocolStore, decoy: Decoy, @@ -142,8 +140,32 @@ async def test_get_by_analysis_id_caches_results( decoy.verify(memcache.insert("analysis-id", from_sql)) +async def test_get_by_analysis_id_as_document( + subject: CompletedAnalysisStore, + protocol_store: ProtocolStore, +) -> None: + """It should return the analysis serialized as a JSON string.""" + resource = _completed_analysis_resource("analysis-id", "protocol-id") + protocol_store.insert(make_dummy_protocol_resource("protocol-id")) + await subject.add(resource) + result = await subject.get_by_id_as_document("analysis-id") + assert result is not None + assert json.loads(result) == { + "id": "analysis-id", + "result": "ok", + "status": "completed", + "commands": [], + "errors": [], + "labware": [], + "liquids": [], + "modules": [], + "pipettes": [], + "result": "ok", + } + + async def test_get_ids_by_protocol( - subject: CompletedAnalysisStore, sql_engine: Engine, protocol_store: ProtocolStore + subject: CompletedAnalysisStore, protocol_store: ProtocolStore ) -> None: """It should return correct analysis id lists.""" resource_1 = _completed_analysis_resource("analysis-id-1", "protocol-id-1") @@ -154,14 +176,14 @@ async def test_get_ids_by_protocol( await subject.add(resource_1) await subject.add(resource_2) await subject.add(resource_3) - assert sorted(subject.get_ids_by_protocol("protocol-id-1")) == sorted( - ["analysis-id-1", "analysis-id-2"] - ) + assert subject.get_ids_by_protocol("protocol-id-1") == [ + "analysis-id-1", + "analysis-id-2", + ] async def test_get_by_protocol( subject: CompletedAnalysisStore, - sql_engine: Engine, memcache: MemoryCache[str, CompletedAnalysisResource], protocol_store: ProtocolStore, decoy: Decoy, diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 1887d989435..56027ff5970 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -56,6 +56,7 @@ delete_protocol_by_id, get_protocol_analyses, get_protocol_analysis_by_id, + get_protocol_analysis_as_document, ) @@ -647,7 +648,7 @@ async def test_get_protocol_analysis_by_id_analysis_not_found( protocol_store: ProtocolStore, analysis_store: AnalysisStore, ) -> None: - """It should get a single full analysis by ID.""" + """It should 404 if the analysis does not exist.""" decoy.when(protocol_store.has("protocol-id")).then_return(True) decoy.when(await analysis_store.get("analysis-id")).then_raise( AnalysisNotFoundError("oh no") @@ -663,3 +664,70 @@ async def test_get_protocol_analysis_by_id_analysis_not_found( assert exc_info.value.status_code == 404 assert exc_info.value.content["errors"][0]["id"] == "AnalysisNotFound" + + +async def test_get_protocol_analysis_as_document( + decoy: Decoy, + protocol_store: ProtocolStore, + analysis_store: AnalysisStore, +) -> None: + """It should get a single full analysis by ID.""" + analysis = "foo" + + decoy.when(protocol_store.has("protocol-id")).then_return(True) + decoy.when(await analysis_store.get_as_document("analysis-id")).then_return( + analysis + ) + + result = await get_protocol_analysis_as_document( + protocolId="protocol-id", + analysisId="analysis-id", + protocol_store=protocol_store, + analysis_store=analysis_store, + ) + + assert result.status_code == 200 + assert result.body.decode(result.charset) == analysis + + +async def test_get_protocol_analysis_as_document_protocol_not_found( + decoy: Decoy, + protocol_store: ProtocolStore, + analysis_store: AnalysisStore, +) -> None: + """It should 404 if the protocol does not exist.""" + decoy.when(protocol_store.has("protocol-id")).then_return(False) + + with pytest.raises(ApiError) as exc_info: + await get_protocol_analysis_as_document( + protocolId="protocol-id", + analysisId="analysis-id", + protocol_store=protocol_store, + analysis_store=analysis_store, + ) + + assert exc_info.value.status_code == 404 + assert exc_info.value.content["errors"][0]["id"] == "ProtocolNotFound" + + +async def test_get_protocol_analysis_as_document_analysis_not_found( + decoy: Decoy, + protocol_store: ProtocolStore, + analysis_store: AnalysisStore, +) -> None: + """It should 404 if the analysis document does not exist.""" + decoy.when(protocol_store.has("protocol-id")).then_return(True) + decoy.when(await analysis_store.get_as_document("analysis-id")).then_raise( + AnalysisNotFoundError("oh no") + ) + + with pytest.raises(ApiError) as exc_info: + await get_protocol_analysis_as_document( + protocolId="protocol-id", + analysisId="analysis-id", + protocol_store=protocol_store, + analysis_store=analysis_store, + ) + + assert exc_info.value.status_code == 404 + assert exc_info.value.content["errors"][0]["id"] == "AnalysisNotFound"