From 45a584f40e63ba1b828a7ad8a9fb8d83483621de Mon Sep 17 00:00:00 2001 From: Katerina Skroumpelou Date: Mon, 28 Aug 2023 18:26:16 +0300 Subject: [PATCH] feat(nx-dev): fixed error handling ] --- .../data-access-ai/src/lib/data-access-ai.ts | 7 +- nx-dev/data-access-ai/src/lib/utils.ts | 14 --- nx-dev/feature-ai/src/lib/error-message.tsx | 2 +- nx-dev/feature-ai/src/lib/feed-container.tsx | 20 +---- nx-dev/nx-dev/pages/api/ai-analytics.ts | 42 --------- nx-dev/nx-dev/pages/api/query-ai-handler.ts | 85 +++++++++++-------- nx-dev/util-ai/src/lib/moderation.ts | 4 +- nx-dev/util-ai/src/lib/utils.ts | 46 +++++----- 8 files changed, 84 insertions(+), 136 deletions(-) delete mode 100644 nx-dev/nx-dev/pages/api/ai-analytics.ts diff --git a/nx-dev/data-access-ai/src/lib/data-access-ai.ts b/nx-dev/data-access-ai/src/lib/data-access-ai.ts index 4ab974360045c8..0e9ce7fccb33ea 100644 --- a/nx-dev/data-access-ai/src/lib/data-access-ai.ts +++ b/nx-dev/data-access-ai/src/lib/data-access-ai.ts @@ -27,7 +27,7 @@ export async function queryAi( ); if (!responseObj.ok) { - throw new Error(responseObj.statusText); + throw await responseObj.json(); } const response: { @@ -43,9 +43,8 @@ export async function queryAi( chatFullHistory = response.chatHistory; return response; - } catch (e) { - // TODO(katerina): Fix this to show the actual error - console.error('Error in fetch', e); + } catch (e: any) { + console.error('Error: ', e?.['message'] || e); throw e; } } diff --git a/nx-dev/data-access-ai/src/lib/utils.ts b/nx-dev/data-access-ai/src/lib/utils.ts index 445022fbbe0d1e..caa7516b7c5f92 100644 --- a/nx-dev/data-access-ai/src/lib/utils.ts +++ b/nx-dev/data-access-ai/src/lib/utils.ts @@ -40,17 +40,3 @@ export function getChatResponse( }), }); } - -export function sendAnalytics( - table: string, - analyticsData: { [key: string]: string | number | null } -): Promise { - return fetch('/api/ai-analytics', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - table, - analyticsData, - }), - }); -} diff --git a/nx-dev/feature-ai/src/lib/error-message.tsx b/nx-dev/feature-ai/src/lib/error-message.tsx index 233c3610aaaf6e..9e46d6aeef3adc 100644 --- a/nx-dev/feature-ai/src/lib/error-message.tsx +++ b/nx-dev/feature-ai/src/lib/error-message.tsx @@ -4,7 +4,7 @@ import { } from '@heroicons/react/24/outline'; export function ErrorMessage({ error }: { error: any }): JSX.Element { - if (error.data.no_results) { + if (error?.data?.no_results) { return (
diff --git a/nx-dev/feature-ai/src/lib/feed-container.tsx b/nx-dev/feature-ai/src/lib/feed-container.tsx index e262b16a46d341..2aa1dfe9cfb662 100644 --- a/nx-dev/feature-ai/src/lib/feed-container.tsx +++ b/nx-dev/feature-ai/src/lib/feed-container.tsx @@ -1,8 +1,4 @@ -import { - getProcessedHistory, - queryAi, - sendAnalytics, -} from '@nx/nx-dev/data-access-ai'; +import { getProcessedHistory, queryAi } from '@nx/nx-dev/data-access-ai'; import { sendCustomEvent } from '@nx/nx-dev/feature-analytics'; import { RefObject, useEffect, useRef, useState } from 'react'; import { ErrorMessage } from './error-message'; @@ -85,11 +81,6 @@ export function FeedContainer(): JSX.Element { query, ...aiResponse.usage, }); - sendAnalytics('user_queries', { - action: 'ai_query', - query, - ...aiResponse.usage, - }); } catch (error: any) { setQueryError(error); } @@ -108,15 +99,6 @@ export function FeedContainer(): JSX.Element { ? JSON.stringify(lastQueryMetadata.sources) : 'Could not retrieve last answer sources', }); - sendAnalytics('feedback', { - action: 'evaluation', - result: answer ? answer.content : 'Could not retrieve the answer', - query: question ? question.content : 'Could not retrieve the question', - response: null, // TODO: Use query metadata here - sources: lastQueryMetadata - ? JSON.stringify(lastQueryMetadata.sources) - : 'Could not retrieve last answer sources', - }); }; return ( diff --git a/nx-dev/nx-dev/pages/api/ai-analytics.ts b/nx-dev/nx-dev/pages/api/ai-analytics.ts deleted file mode 100644 index e0a6f1178fad49..00000000000000 --- a/nx-dev/nx-dev/pages/api/ai-analytics.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { checkEnvVariables } from '@nx/nx-dev/util-ai'; -import { SupabaseClient, createClient } from '@supabase/supabase-js'; -import { NextRequest } from 'next/server'; - -const supabaseUrl = process.env['NX_NEXT_PUBLIC_SUPABASE_URL']; -const supabaseServiceKey = process.env['NX_SUPABASE_SERVICE_ROLE_KEY_ACTUAL']; -export const config = { - runtime: 'edge', -}; - -export default async function handler(request: NextRequest) { - checkEnvVariables('not-needed', supabaseUrl, supabaseServiceKey); - const { table, analyticsData } = await request.json(); - - const supabaseClient: SupabaseClient = createClient( - supabaseUrl as string, - supabaseServiceKey as string - ); - - try { - const result = await supabaseClient.from(table).insert(analyticsData); - - return new Response(JSON.stringify(result), { - status: 200, - headers: { - 'content-type': 'application/json', - }, - }); - } catch (e) { - return new Response( - JSON.stringify({ - error: 'Error saving feedback in Supabase.', - }), - { - status: 500, - headers: { - 'content-type': 'application/json', - }, - } - ); - } -} diff --git a/nx-dev/nx-dev/pages/api/query-ai-handler.ts b/nx-dev/nx-dev/pages/api/query-ai-handler.ts index 577534725a0bd0..de801d5cde7beb 100644 --- a/nx-dev/nx-dev/pages/api/query-ai-handler.ts +++ b/nx-dev/nx-dev/pages/api/query-ai-handler.ts @@ -3,13 +3,12 @@ import { NextRequest } from 'next/server'; import { - ApplicationError, + CustomError, DEFAULT_MATCH_COUNT, DEFAULT_MATCH_THRESHOLD, MIN_CONTENT_LENGTH, PROMPT, PageSection, - UserError, checkEnvVariables, getListOfSources, getMessageFromResponse, @@ -32,17 +31,19 @@ export const config = { }; export default async function handler(request: NextRequest) { - checkEnvVariables(openAiKey, supabaseUrl, supabaseServiceKey); - const { query, aiResponse, chatFullHistory } = await request.json(); + try { + checkEnvVariables(openAiKey, supabaseUrl, supabaseServiceKey); + const { query, aiResponse, chatFullHistory } = await request.json(); - const supabaseClient: SupabaseClient = createClient( - supabaseUrl as string, - supabaseServiceKey as string - ); + const supabaseClient: SupabaseClient = createClient( + supabaseUrl as string, + supabaseServiceKey as string + ); - try { if (!query) { - throw new UserError('Missing query in request data'); + throw new CustomError('user_error', 'Missing query in request data', { + missing_query: true, + }); } // Moderate the content to comply with OpenAI T&C @@ -75,13 +76,18 @@ export default async function handler(request: NextRequest) { openAiKey as string ); + const embeddingResponse = await embeddingResponseObj.json(); + if (!embeddingResponseObj.ok) { - throw new ApplicationError('Failed to create embedding for question', { - data: embeddingResponseObj.status, - }); + throw new CustomError( + 'application_error', + 'Failed to create embedding for question', + { + data: embeddingResponse, + } + ); } - const embeddingResponse = await embeddingResponseObj.json(); const { data: [{ embedding }], }: CreateEmbeddingResponse = embeddingResponse; @@ -97,13 +103,19 @@ export default async function handler(request: NextRequest) { ); if (matchError) { - throw new ApplicationError('Failed to match page sections', matchError); + throw new CustomError( + 'application_error', + 'Failed to match page sections', + matchError + ); } // Note: this is experimental. I think it should work // mainly because we're testing previous response + query. if (!pageSections || pageSections.length === 0) { - throw new UserError('No results found.', { no_results: true }); + throw new CustomError('user_error', 'No results found.', { + no_results: true, + }); } const tokenizer = new GPT3Tokenizer({ type: 'gpt3' }); @@ -142,14 +154,17 @@ export default async function handler(request: NextRequest) { openAiKey as string ); - if (!responseObj.ok) { - throw new ApplicationError('Failed to generate completion', { - data: responseObj.status, - }); - } - const response = await responseObj.json(); + if (!responseObj.ok) { + throw new CustomError( + 'application_error', + 'Failed to generate completion', + { + data: response, + } + ); + } // Message asking to double-check const callout: string = '{% callout type="warning" title="Always double-check!" %}The results may not be accurate, so please always double check with our documentation.{% /callout %}\n'; @@ -175,19 +190,19 @@ export default async function handler(request: NextRequest) { }, }); } catch (err: unknown) { - if (err instanceof UserError) { - console.error(err.message); - } else if (err instanceof ApplicationError) { - // Print out application errors with their additional data - console.error(`${err.message}: ${JSON.stringify(err.data)}`); - } else { - // Print out unexpected errors as is to help with debugging - console.error(err); - } + console.error('Error: ', err); - // TODO: include more response info in debug environments - // OR RETURN RESPONSE WITH DIFFERENT ERROR STATUS - console.error(err); - throw err; + return new Response( + JSON.stringify({ + ...JSON.parse(JSON.stringify(err)), + message: err?.['message'], + }), + { + status: 500, + headers: { + 'content-type': 'application/json', + }, + } + ); } } diff --git a/nx-dev/util-ai/src/lib/moderation.ts b/nx-dev/util-ai/src/lib/moderation.ts index 381506a167911c..73240ddb607209 100644 --- a/nx-dev/util-ai/src/lib/moderation.ts +++ b/nx-dev/util-ai/src/lib/moderation.ts @@ -1,5 +1,5 @@ import { openAiAPICall } from './openai-call'; -import { UserError } from './utils'; +import { CustomError } from './utils'; export async function moderateContent( sanitizedQuery: string, @@ -15,7 +15,7 @@ export async function moderateContent( const [results] = moderationResponse.results; if (results.flagged) { - throw new UserError('Flagged content', { + throw new CustomError('user_error', 'Flagged content', { flagged: true, categories: results.categories, }); diff --git a/nx-dev/util-ai/src/lib/utils.ts b/nx-dev/util-ai/src/lib/utils.ts index 3c3f632dc492de..6a8a6b5ba58363 100644 --- a/nx-dev/util-ai/src/lib/utils.ts +++ b/nx-dev/util-ai/src/lib/utils.ts @@ -1,7 +1,4 @@ -import { - ChatCompletionRequestMessageRoleEnum, - CreateChatCompletionResponse, -} from 'openai'; +import { ChatCompletionRequestMessageRoleEnum } from 'openai'; export function checkEnvVariables( openAiKey?: string, @@ -9,32 +6,43 @@ export function checkEnvVariables( supabaseServiceKey?: string ) { if (!openAiKey) { - throw new ApplicationError('Missing environment variable NX_OPENAI_KEY'); + throw new CustomError( + 'application_error', + 'Missing environment variable NX_OPENAI_KEY', + { + missing_key: true, + } + ); } if (!supabaseUrl) { - throw new ApplicationError( - 'Missing environment variable NX_NEXT_PUBLIC_SUPABASE_URL' + throw new CustomError( + 'application_error', + 'Missing environment variable NX_NEXT_PUBLIC_SUPABASE_URL', + { missing_key: true } ); } if (!supabaseServiceKey) { - throw new ApplicationError( - 'Missing environment variable NX_SUPABASE_SERVICE_ROLE_KEY' + throw new CustomError( + 'application_error', + 'Missing environment variable NX_SUPABASE_SERVICE_ROLE_KEY', + { missing_key: true } ); } } -export class ApplicationError extends Error { - public type: string = 'application_error'; - constructor(message: string, public data: Record = {}) { - super(message); - } -} +export class CustomError extends Error { + public type: string; + public data: Record; -export class UserError extends ApplicationError { - public override type: string = 'user_error'; - constructor(message: string, data: Record = {}) { - super(message, data); + constructor( + type: string = 'application_error', + message: string, + data: Record = {} + ) { + super(message); + this.type = type; + this.data = data; } }