diff --git a/opentrons-ai-client/Makefile b/opentrons-ai-client/Makefile new file mode 100644 index 00000000000..9c15fa32e41 --- /dev/null +++ b/opentrons-ai-client/Makefile @@ -0,0 +1,59 @@ +# opentrons ai client makefile + +# using bash instead of /bin/bash in SHELL prevents macOS optimizing away our PATH update +SHELL := bash + +# add node_modules/.bin to PATH +PATH := $(shell cd .. && yarn bin):$(PATH) + +benchmark_output := $(shell node -e 'console.log(new Date());') + +# These variables can be overriden when make is invoked to customize the +# behavior of jest +tests ?= +cov_opts ?= --coverage=true +test_opts ?= + +# standard targets +##################################################################### + +.PHONY: all +all: clean build + +.PHONY: setup +setup: + yarn + +.PHONY: clean +clean: + shx rm -rf dist + +# artifacts +##################################################################### + +.PHONY: build +build: export NODE_ENV := production +build: + vite build + git rev-parse HEAD > dist/.commit + +# development +##################################################################### + +.PHONY: dev +dev: export NODE_ENV := development +dev: + vite serve + +# production assets server +.PHONY: serve +serve: all + node ../scripts/serve-static dist + +.PHONY: test +test: + $(MAKE) -C .. test-js-ai-client tests="$(tests)" test_opts="$(test_opts)" + +.PHONY: test-cov +test-cov: + make -C .. test-js-ai-client tests=$(tests) test_opts="$(test_opts)" cov_opts="$(cov_opts)" diff --git a/opentrons-ai-client/README.md b/opentrons-ai-client/README.md new file mode 100644 index 00000000000..c2ff2908418 --- /dev/null +++ b/opentrons-ai-client/README.md @@ -0,0 +1,64 @@ +# Opentrons AI Frontend + +[![JavaScript Style Guide][style-guide-badge]][style-guide] + +[Download][] | [Support][] + +## Overview + +The Opentrons AI application helps you to create a protocol with natural language. + +## Developing + +To get started: clone the `Opentrons/opentrons` repository, set up your computer for development as specified in the [contributing guide][contributing-guide-setup], and then: + +```shell +# change into the cloned directory +cd opentrons +# prerequisite: install dependencies as specified in project setup +make setup +# launch the dev server +make -C opentrons-ai-client dev +``` + +## Stack and structure + +The UI stack is built using: + +- [React][] +- [Babel][] +- [Vite][] + +Some important directories: + +- `opentrons-ai-server` — Opentrons AI application's server + +## Copy management + +We use [i18next](https://www.i18next.com) for copy management and internationalization. + +## Testing + +Tests for the Opentrons App are run from the top level along with all other JS project tests. + +- `make test-js` - Run all JavaScript tests + +Test tasks can also be run with the following arguments: + +| Argument | Default | Description | Example | +| -------- | -------- | ----------------------- | --------------------------------- | +| watch | `false` | Run tests in watch mode | `make test-unit watch=true` | +| cover | `!watch` | Calculate code coverage | `make test watch=true cover=true` | + +## Building + +TBD + +[style-guide]: https://standardjs.com +[style-guide-badge]: https://img.shields.io/badge/code_style-standard-brightgreen.svg?style=flat-square&maxAge=3600 +[contributing-guide-setup]: ../CONTRIBUTING.md#development-setup +[contributing-guide-running-the-api]: ../CONTRIBUTING.md#opentrons-api +[react]: https://react.dev/ +[babel]: https://babeljs.io/ +[vite]: https://vitejs.dev/ +[bundle-analyzer]: https://github.com/webpack-contrib/webpack-bundle-analyzer diff --git a/opentrons-ai-client/babel.config.cjs b/opentrons-ai-client/babel.config.cjs new file mode 100644 index 00000000000..11739e6bf00 --- /dev/null +++ b/opentrons-ai-client/babel.config.cjs @@ -0,0 +1,21 @@ +'use strict' + +module.exports = { + env: { + // Must have babel-plugin-styled-components in each env, + // see here for further details: s https://styled-components.com/docs/tooling#babel-plugin + production: { + plugins: ['babel-plugin-styled-components', 'babel-plugin-unassert'], + }, + development: { + plugins: ['babel-plugin-styled-components'], + }, + test: { + plugins: [ + // disable ssr, displayName to fix toHaveStyleRule + // https://github.com/styled-components/jest-styled-components/issues/294 + ['babel-plugin-styled-components', { ssr: false, displayName: false }], + ], + }, + }, +} diff --git a/opentrons-ai-client/index.html b/opentrons-ai-client/index.html new file mode 100644 index 00000000000..57e7f83f591 --- /dev/null +++ b/opentrons-ai-client/index.html @@ -0,0 +1,13 @@ + + + + + + + Opentrons AI + + +
+ + + diff --git a/opentrons-ai-client/package.json b/opentrons-ai-client/package.json new file mode 100644 index 00000000000..e3c056e8bfe --- /dev/null +++ b/opentrons-ai-client/package.json @@ -0,0 +1,38 @@ +{ + "name": "opentrons-ai-client", + "type": "module", + "version": "0.0.0-dev", + "description": "Opentrons AI application UI", + "source": "src/index.tsx", + "types": "lib/index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/Opentrons/opentrons.git" + }, + "author": { + "name": "Opentrons Labworks", + "email": "engineering@opentrons.com" + }, + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/Opentrons/opentrons/issues" + }, + "homepage": "https://github.com/Opentrons/opentrons", + "dependencies": { + "@fontsource/dejavu-sans": "5.0.3", + "@fontsource/public-sans": "5.0.3", + "@opentrons/components": "link:../components", + "i18next": "^19.8.3", + "react": "18.2.0", + "react-dom": "18.2.0", + "react-error-boundary": "^4.0.10", + "react-i18next": "13.5.0", + "styled-components": "5.3.6" + }, + "engines": { + "node": ">=18.19.0" + }, + "devDependencies": { + "@types/styled-components": "^5.1.26" + } +} diff --git a/opentrons-ai-client/src/App.test.tsx b/opentrons-ai-client/src/App.test.tsx new file mode 100644 index 00000000000..03b731311c0 --- /dev/null +++ b/opentrons-ai-client/src/App.test.tsx @@ -0,0 +1,18 @@ +import React from 'react' +import { screen } from '@testing-library/react' +import { describe, it } from 'vitest' + +import { renderWithProviders } from './__testing-utils__' + +import { App } from './App' + +const render = (): ReturnType => { + return renderWithProviders() +} + +describe('App', () => { + it('should render text', () => { + render() + screen.getByText('Opentrons AI') + }) +}) diff --git a/opentrons-ai-client/src/App.tsx b/opentrons-ai-client/src/App.tsx new file mode 100644 index 00000000000..f31fbd35940 --- /dev/null +++ b/opentrons-ai-client/src/App.tsx @@ -0,0 +1,9 @@ +import React from 'react' +import { Flex, StyledText } from '@opentrons/components' +export function App(): JSX.Element { + return ( + + Opentrons AI + + ) +} diff --git a/opentrons-ai-client/src/__testing-utils__/index.ts b/opentrons-ai-client/src/__testing-utils__/index.ts new file mode 100644 index 00000000000..e17c0ffbc31 --- /dev/null +++ b/opentrons-ai-client/src/__testing-utils__/index.ts @@ -0,0 +1,2 @@ +export * from './renderWithProviders' +export * from './matchers' diff --git a/opentrons-ai-client/src/__testing-utils__/matchers.ts b/opentrons-ai-client/src/__testing-utils__/matchers.ts new file mode 100644 index 00000000000..66234dbc915 --- /dev/null +++ b/opentrons-ai-client/src/__testing-utils__/matchers.ts @@ -0,0 +1,24 @@ +import type { Matcher } from '@testing-library/react' + +// Match things like

Some nested text

+// Use with either string match: getByText(nestedTextMatcher("Some nested text")) +// or regexp: getByText(nestedTextMatcher(/Some nested text/)) +export const nestedTextMatcher = (textMatch: string | RegExp): Matcher => ( + content, + node +) => { + const hasText = (n: typeof node): boolean => { + if (n == null || n.textContent === null) return false + return typeof textMatch === 'string' + ? Boolean(n?.textContent.match(textMatch)) + : textMatch.test(n.textContent) + } + const nodeHasText = hasText(node) + const childrenDontHaveText = + node != null && Array.from(node.children).every(child => !hasText(child)) + + return nodeHasText && childrenDontHaveText +} + +// need componentPropsMatcher +// need partialComponentPropsMatcher diff --git a/opentrons-ai-client/src/__testing-utils__/renderWithProviders.tsx b/opentrons-ai-client/src/__testing-utils__/renderWithProviders.tsx new file mode 100644 index 00000000000..65a2e01855e --- /dev/null +++ b/opentrons-ai-client/src/__testing-utils__/renderWithProviders.tsx @@ -0,0 +1,53 @@ +// render using targetted component using @testing-library/react +// with wrapping providers for i18next and redux +import * as React from 'react' +import { QueryClient, QueryClientProvider } from 'react-query' +import { I18nextProvider } from 'react-i18next' +import { Provider } from 'react-redux' +import { vi } from 'vitest' +import { render } from '@testing-library/react' +import { createStore } from 'redux' + +import type { PreloadedState, Store } from 'redux' +import type { RenderOptions, RenderResult } from '@testing-library/react' + +export interface RenderWithProvidersOptions extends RenderOptions { + initialState?: State + i18nInstance: React.ComponentProps['i18n'] +} + +export function renderWithProviders( + Component: React.ReactElement, + options?: RenderWithProvidersOptions +): [RenderResult, Store] { + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + const { initialState = {}, i18nInstance = null } = options || {} + + const store: Store = createStore( + vi.fn(), + initialState as PreloadedState + ) + store.dispatch = vi.fn() + store.getState = vi.fn(() => initialState) as () => State + + const queryClient = new QueryClient() + + const ProviderWrapper: React.ComponentType> = ({ + children, + }) => { + const BaseWrapper = ( + + {children} + + ) + if (i18nInstance != null) { + return ( + {BaseWrapper} + ) + } else { + return BaseWrapper + } + } + + return [render(Component, { wrapper: ProviderWrapper }), store] +} diff --git a/opentrons-ai-client/src/assets/localization/en/index.ts b/opentrons-ai-client/src/assets/localization/en/index.ts new file mode 100644 index 00000000000..b5aa26621dd --- /dev/null +++ b/opentrons-ai-client/src/assets/localization/en/index.ts @@ -0,0 +1,7 @@ +import shared from './shared.json' +import protocol_generator from './protocol_generator.json' + +export const en = { + shared, + protocol_generator, +} diff --git a/opentrons-ai-client/src/assets/localization/en/protocol_generator.json b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json new file mode 100644 index 00000000000..c8ac35504bb --- /dev/null +++ b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json @@ -0,0 +1,23 @@ +{ + "api": "API: An API level is 2.15", + "application": "Application: Your protocol's name, describing what it does.", + "commands": "Commands: List the protocol's steps, specifying quantities in microliters and giving exact source and destination locations.", + "make_sure_your_prompt": "Make sure your prompt includes the following:", + "metadata": "Metadata: Three pieces of information.", + "modules": "Modules: Thermocycler or Temperature Module.", + "opentronsai_asks_you": "OpentronsAI asks you to provide it!", + "ot2_pipettes": "OT-2 pipettes: Include volume, number of channels, and generation.", + "prc_flex": "PRC (Flex)", + "prc": "PCR", + "reagent_transfer_flex": "Reagent Transfer (Flex)", + "reagent_transfer": "Reagent Transfer", + "robot": "Robot: OT-2.", + "sidebar_body": "Write a prompt in natural language to generate a Reagent Transfer or a PCR protocol for the OT-2 or Opentrons Flex using the Opentrons Python Protocol API.", + "sidebar_header": "Use natural language to generate protocols with OpentronsAI powered by OpenAI", + "stuck": "Stuck? Try these example prompts to get started.", + "tipracks_and_labware": "Tip racks and labware: Use names from the Opentrons Labware Library.", + "type_your_prompt": "Type your prompt...", + "well_allocations": "Well allocations: Describe where liquids should go in labware.", + "what_if_you": "What if you don’t provide all of those pieces of information?", + "what_typeof_protocol": "What type of protocol do you need?" +} diff --git a/opentrons-ai-client/src/assets/localization/en/shared.json b/opentrons-ai-client/src/assets/localization/en/shared.json new file mode 100644 index 00000000000..46cb365873f --- /dev/null +++ b/opentrons-ai-client/src/assets/localization/en/shared.json @@ -0,0 +1,3 @@ +{ + "send": "Send" +} diff --git a/opentrons-ai-client/src/assets/localization/index.ts b/opentrons-ai-client/src/assets/localization/index.ts new file mode 100644 index 00000000000..e92a7077ed9 --- /dev/null +++ b/opentrons-ai-client/src/assets/localization/index.ts @@ -0,0 +1,5 @@ +import { en } from './en' + +export const resources = { + en, +} diff --git a/opentrons-ai-client/src/i18n.ts b/opentrons-ai-client/src/i18n.ts new file mode 100644 index 00000000000..0f7ef3bf6df --- /dev/null +++ b/opentrons-ai-client/src/i18n.ts @@ -0,0 +1,45 @@ +import i18n from 'i18next' +import capitalize from 'lodash/capitalize' +import startCase from 'lodash/startCase' +import { initReactI18next } from 'react-i18next' +import { resources } from './assets/localization' +import { titleCase } from '@opentrons/shared-data' + +i18n.use(initReactI18next).init( + { + resources, + lng: 'en', + fallbackLng: 'en', + debug: process.env.NODE_ENV === 'development', + ns: ['shared'], + defaultNS: 'shared', + interpolation: { + escapeValue: false, // not needed for react as it escapes by default + format: function (value, format, lng) { + if (format === 'upperCase') return value.toUpperCase() + if (format === 'lowerCase') return value.toLowerCase() + if (format === 'capitalize') return capitalize(value) + if (format === 'sentenceCase') return startCase(value) + if (format === 'titleCase') return titleCase(value) + return value + }, + }, + keySeparator: false, // use namespaces and context instead + saveMissing: true, + missingKeyHandler: (lng, ns, key) => { + process.env.NODE_ENV === 'test' + ? console.error(`Missing ${lng} Translation: key={${key}} ns={${ns}}`) + : console.warn(`Missing ${lng} Translation: key={${key}} ns={${ns}}`) + }, + }, + err => { + if (err) { + console.error( + 'Internationalization was not initialized properly. error: ', + err + ) + } + } +) + +export { i18n } diff --git a/opentrons-ai-client/src/main.tsx b/opentrons-ai-client/src/main.tsx new file mode 100644 index 00000000000..466bd35e081 --- /dev/null +++ b/opentrons-ai-client/src/main.tsx @@ -0,0 +1,14 @@ +import React from 'react' +import ReactDOM from 'react-dom/client' +import { App } from './App' + +const rootElement = document.getElementById('root') +if (rootElement) { + ReactDOM.createRoot(rootElement).render( + + + + ) +} else { + console.error('Root element not found') +} diff --git a/opentrons-ai-client/tsconfig-data.json b/opentrons-ai-client/tsconfig-data.json new file mode 100644 index 00000000000..79a9673faa9 --- /dev/null +++ b/opentrons-ai-client/tsconfig-data.json @@ -0,0 +1,12 @@ +{ + "extends": "../tsconfig-base.json", + "references": [], + "compilerOptions": { + "composite": true, + "emitDeclarationOnly": false, + "rootDir": ".", + "outDir": "lib" + }, + "include": ["src/**/*.json", "fixtures/**/*.json", "vite.config.ts"], + "exclude": ["**/*.ts", "**/*.tsx"] +} diff --git a/opentrons-ai-client/tsconfig.json b/opentrons-ai-client/tsconfig.json new file mode 100644 index 00000000000..b3c6dc275a8 --- /dev/null +++ b/opentrons-ai-client/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../tsconfig-base.json", + "references": [ + { + "path": "./tsconfig-data.json" + }, + { + "path": "../components" + } + ], + "compilerOptions": { + "rootDir": "src", + "outDir": "lib" + }, + "include": ["typings", "src"] +} diff --git a/opentrons-ai-client/typings/images.d.ts b/opentrons-ai-client/typings/images.d.ts new file mode 100644 index 00000000000..9dcd2f68792 --- /dev/null +++ b/opentrons-ai-client/typings/images.d.ts @@ -0,0 +1,15 @@ +declare module '*.png' { + const image: string + // eslint-disable-next-line import/no-default-export + export default image +} +declare module '*.svg' { + const image: string + // eslint-disable-next-line import/no-default-export + export default image +} +declare module '*.webm' { + const image: string + // eslint-disable-next-line import/no-default-export + export default image +} diff --git a/opentrons-ai-client/typings/styled-components.d.ts b/opentrons-ai-client/typings/styled-components.d.ts new file mode 100644 index 00000000000..5d6296f94be --- /dev/null +++ b/opentrons-ai-client/typings/styled-components.d.ts @@ -0,0 +1 @@ +import 'styled-components/cssprop' diff --git a/opentrons-ai-client/vite.config.ts b/opentrons-ai-client/vite.config.ts new file mode 100644 index 00000000000..ee557f68d62 --- /dev/null +++ b/opentrons-ai-client/vite.config.ts @@ -0,0 +1,43 @@ +import path from 'path' +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +export default defineConfig({ + // this makes imports relative rather than absolute + base: '', + build: { + // Relative to the root + outDir: 'dist', + }, + plugins: [ + react({ + include: '**/*.tsx', + babel: { + // Use babel.config.js files + configFile: true, + }, + }), + ], + optimizeDeps: { + esbuildOptions: { + target: 'es2020', + }, + }, + css: { + postcss: { + plugins: [], + }, + }, + define: { + 'process.env': process.env, + global: 'globalThis', + }, + resolve: { + alias: { + '@opentrons/components/styles': path.resolve( + '../components/src/index.module.css' + ), + '@opentrons/components': path.resolve('../components/src/index.ts'), + }, + }, +}) diff --git a/opentrons-ai-server/Makefile b/opentrons-ai-server/Makefile new file mode 100644 index 00000000000..9de2141f6a0 --- /dev/null +++ b/opentrons-ai-server/Makefile @@ -0,0 +1,2 @@ +# opentrons ai server makefile +# TBD \ No newline at end of file diff --git a/opentrons-ai-server/README.md b/opentrons-ai-server/README.md new file mode 100644 index 00000000000..e00cdc1af3d --- /dev/null +++ b/opentrons-ai-server/README.md @@ -0,0 +1,39 @@ +# Opentrons AI Backend + +## Overview + +The Opentrons AI application's server. + +## Developing + +To get started: clone the `Opentrons/opentrons` repository, set up your computer for development as specified in the [contributing guide][contributing-guide-setup], and then: + +```shell +# change into the cloned directory +cd opentrons +# prerequisite: install dependencies as specified in project setup +make setup +# launch the dev server +make -C opentrons-ai-server dev +``` + +## Stack and structure + +The UI stack is built using: + +- [OpenAI Python API library][] + +Some important directories: + +- `opentrons-ai-client` — Opentrons AI application's client-side + +## Testing + +TBD + +## Building + +TBD + +[pytest]: https://docs.pytest.org/en/ +[openai python api library]: https://pypi.org/project/openai/ diff --git a/tsconfig-eslint.json b/tsconfig-eslint.json index 4468d4f6fd4..541feb786c0 100644 --- a/tsconfig-eslint.json +++ b/tsconfig-eslint.json @@ -19,6 +19,7 @@ "labware-designer/typings", "labware-library/src", "labware-library/typings", + "opentrons-ai-client/src", "shared-data/deck", "shared-data/js", "shared-data/protocol",