diff --git a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx index 3b5254a799c..7c3f8e482ca 100644 --- a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx +++ b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx @@ -3,7 +3,6 @@ import { useTranslation } from 'react-i18next' import styled, { css } from 'styled-components' import { useForm } from 'react-hook-form' import { useAtom } from 'jotai' -import axios from 'axios' import { useAuth0 } from '@auth0/auth0-react' import { @@ -18,12 +17,13 @@ import { } from '@opentrons/components' import { SendButton } from '../../atoms/SendButton' import { preparedPromptAtom, chatDataAtom } from '../../resources/atoms' +import { useApiCall } from '../../resources/hooks/useApiCall' +import { calcTextAreaHeight } from '../../resources/utils/utils' +import { END_POINT } from '../../resources/constants' +import type { AxiosRequestConfig } from 'axios' import type { ChatData } from '../../resources/types' -const url = - 'https://fk0py9eu3e.execute-api.us-east-2.amazonaws.com/sandbox/chat/completion' - interface InputType { userPrompt: string } @@ -39,8 +39,9 @@ export function InputPrompt(): JSX.Element { const [, setChatData] = useAtom(chatDataAtom) const [submitted, setSubmitted] = React.useState(false) - const [data, setData] = React.useState(null) - const [loading, setLoading] = React.useState(false) + // const [data, setData] = React.useState(null) + // const [loading, setLoading] = React.useState(false) + // ToDo (kk:05/15/2024) this will be used in the future // const [error, setError] = React.useState('') @@ -48,49 +49,75 @@ export function InputPrompt(): JSX.Element { const userPrompt = watch('userPrompt') ?? '' - const calcTextAreaHeight = (): number => { - const rowsNum = userPrompt.split('\n').length - return rowsNum - } - - // ToDo (kk:05/15/2024) This will be moved to a better place - const fetchData = async (prompt: string): Promise => { - if (prompt !== '') { - setLoading(true) - try { - const accessToken = await getAccessTokenSilently({ - authorizationParams: { - audience: 'sandbox-ai-api', - }, - }) - const postData = { - message: prompt, - fake: false, - } - const headers = { - Authorization: `Bearer ${accessToken}`, - 'Content-Type': 'application/json', - } - const response = await axios.post(url, postData, { headers }) - setData(response.data) - } catch (err) { - // setError('Error fetching data from the API.') - console.error(`error: ${err}`) - } finally { - setLoading(false) - } - } - } - - const handleClick = (): void => { + const { data, error, isLoading, fetchData } = useApiCall() + + // // ToDo (kk:05/15/2024) This will be moved to a better place + // const fetchData = async (prompt: string): Promise => { + // if (prompt !== '') { + // setLoading(true) + // try { + // const accessToken = await getAccessTokenSilently({ + // authorizationParams: { + // audience: 'sandbox-ai-api', + // }, + // }) + // const postData = { + // message: prompt, + // fake: false, + // } + // const headers = { + // Authorization: `Bearer ${accessToken}`, + // 'Content-Type': 'application/json', + // } + // const response = await axios.post(END_POINT, postData, { headers }) + // setData(response.data) + // } catch (err) { + // // setError('Error fetching data from the API.') + // console.error(`error: ${err}`) + // } finally { + // setLoading(false) + // } + // } + // } + + const handleClick = async (): Promise => { const userInput: ChatData = { role: 'user', reply: userPrompt, } setChatData(chatData => [...chatData, userInput]) - void fetchData(userPrompt) - setSubmitted(true) - reset() + + try { + const accessToken = await getAccessTokenSilently({ + authorizationParams: { + audience: 'sandbox-ai-api', + }, + }) + + const headers = { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json', + } + + const config = { + url: END_POINT, + method: 'POST', + headers, + data: { + message: userPrompt, + fake: false, + }, + withCredentials: true, + } + console.log('called') + await fetchData(config as AxiosRequestConfig) + console.log('fetched') + console.log('data', data) + setSubmitted(true) + reset() + } catch (err) { + console.error(`error: ${err}`) + } } React.useEffect(() => { @@ -98,7 +125,7 @@ export function InputPrompt(): JSX.Element { }, [preparedPrompt, setValue]) React.useEffect(() => { - if (submitted && data != null && !loading) { + if (submitted && data != null && !isLoading) { const { role, reply } = data const assistantResponse: ChatData = { role, @@ -107,7 +134,7 @@ export function InputPrompt(): JSX.Element { setChatData(chatData => [...chatData, assistantResponse]) setSubmitted(false) } - }, [data, loading, submitted]) + }, [data, isLoading, submitted]) // ToDo (kk:05/02/2024) This is also temp. Asking the design about error. // console.error('error', error) @@ -116,13 +143,13 @@ export function InputPrompt(): JSX.Element { diff --git a/opentrons-ai-client/src/resources/constants.ts b/opentrons-ai-client/src/resources/constants.ts new file mode 100644 index 00000000000..054a1d66d07 --- /dev/null +++ b/opentrons-ai-client/src/resources/constants.ts @@ -0,0 +1,2 @@ +export const END_POINT = + 'https://fk0py9eu3e.execute-api.us-east-2.amazonaws.com/sandbox/chat/completion' diff --git a/opentrons-ai-client/src/resources/hooks/__tests__/useApiCall.test.ts b/opentrons-ai-client/src/resources/hooks/__tests__/useApiCall.test.ts new file mode 100644 index 00000000000..e69de29bb2d diff --git a/opentrons-ai-client/src/resources/hooks/useApiCall.ts b/opentrons-ai-client/src/resources/hooks/useApiCall.ts new file mode 100644 index 00000000000..2c338d5d8dc --- /dev/null +++ b/opentrons-ai-client/src/resources/hooks/useApiCall.ts @@ -0,0 +1,40 @@ +import { useState } from 'react' +import axios from 'axios' + +import type { AxiosRequestConfig } from 'axios' + +interface UseApiCallResult { + data: T | null + error: string | null + isLoading: boolean + fetchData: (config: AxiosRequestConfig) => Promise +} + +export const useApiCall = (): UseApiCallResult => { + const [data, setData] = useState(null) + const [error, setError] = useState(null) + const [isLoading, setIsLoading] = useState(false) + + const fetchData = async (config: AxiosRequestConfig): Promise => { + console.log('useApiCall - fetchData') + setIsLoading(true) + setError(null) + + try { + const response = await axios.request({ + ...config, + withCredentials: true, + }) + console.log(response) + setData(response.data) + } catch (err: any) { + // ToDo (kk:05/15/2024) remove any + console.log(err) + setError(err.message as string) + } finally { + setIsLoading(false) + } + } + + return { data, error, isLoading, fetchData } +} diff --git a/opentrons-ai-client/src/resources/hooks/useGetAuth0Token.ts b/opentrons-ai-client/src/resources/hooks/useGetAuth0Token.ts new file mode 100644 index 00000000000..6dca4506657 --- /dev/null +++ b/opentrons-ai-client/src/resources/hooks/useGetAuth0Token.ts @@ -0,0 +1,17 @@ +import { useAuth0 } from '@auth0/auth0-react' + +const audience = 'sandbox-ai-api' +export const useGetAuth0Token = async (): Promise => { + const { getAccessTokenSilently } = useAuth0() + try { + const accessToken = await getAccessTokenSilently({ + authorizationParams: { + audience, + }, + }) + return accessToken + } catch (err) { + console.error(`cannot get token: ${err}`) + } + return null +} diff --git a/opentrons-ai-client/src/resources/utils/__tests__/utils.test.ts b/opentrons-ai-client/src/resources/utils/__tests__/utils.test.ts new file mode 100644 index 00000000000..fb2e7b94630 --- /dev/null +++ b/opentrons-ai-client/src/resources/utils/__tests__/utils.test.ts @@ -0,0 +1,11 @@ +import { describe, it, expect } from 'vitest' +import { calcTextAreaHeight } from '../utils' + +describe('calcTextAreaHeight', () => { + it('should return the correct number of lines', () => { + const input = 'Hello\nWorld\nThis is testing data.' + const expectedOutput = 3 + const result = calcTextAreaHeight(input) + expect(result).toEqual(expectedOutput) + }) +}) diff --git a/opentrons-ai-client/src/resources/utils/utils.ts b/opentrons-ai-client/src/resources/utils/utils.ts new file mode 100644 index 00000000000..ffd8e3408a9 --- /dev/null +++ b/opentrons-ai-client/src/resources/utils/utils.ts @@ -0,0 +1,10 @@ +/** + * Calculates the number of lines in a given string. + * @param input - The string to calculate the number of lines for. + * @returns The number of lines in the input string. + */ + +export const calcTextAreaHeight = (input: string): number => { + const rowsNum = input.split('\n').length + return rowsNum +}