From dea30f8fefae54cffcf3febec73a041fc94f46bc Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Wed, 1 Nov 2023 17:39:50 +0800 Subject: [PATCH 1/6] feat: use agent framework API to generate answer Signed-off-by: SuZhou-Joe --- server/services/chat/olly_chat_service.ts | 29 ++++++++++++++--------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/server/services/chat/olly_chat_service.ts b/server/services/chat/olly_chat_service.ts index 6c2f09c2..b77d50fc 100644 --- a/server/services/chat/olly_chat_service.ts +++ b/server/services/chat/olly_chat_service.ts @@ -5,6 +5,7 @@ import { Run } from 'langchain/callbacks'; import { v4 as uuid } from 'uuid'; +import { ApiResponse } from '@opensearch-project/opensearch'; import { OpenSearchDashboardsRequest, RequestHandlerContext } from '../../../../../src/core/server'; import { IMessage, IInput } from '../../../common/types/chat_saved_object_attributes'; import { convertToTraces } from '../../../common/utils/llm_chat/traces'; @@ -51,16 +52,22 @@ export class OllyChatService implements ChatService { callbacks ); const memory = memoryInit(payload.messages); - const chatAgent = chatAgentInit( - model, - pluginTools.flatMap((tool) => tool.toolsList), - callbacks, - memory - ); - const agentResponse = await chatAgent.run( - payload.input.content, - payload.sessionId ? OllyChatService.abortControllers.get(payload.sessionId) : undefined - ); + + const agentFrameworkResponse = (await opensearchClient.transport.request({ + method: 'POST', + path: '/_plugins/_ml/agents/usjqiYsBC_Oyjc6-Rhpq/_execute', + body: { + parameters: { + question: payload.input.content, + }, + }, + })) as ApiResponse<{ + inference_results: Array<{ output: Array<{ name: string; result: string }> }>; + }>; + const agentFrameworkAnswer = + agentFrameworkResponse.body.inference_results[0].output[0].result; + await memory.chatHistory.addUserMessage(payload.input.content); + await memory.chatHistory.addAIChatMessage(agentFrameworkAnswer); const suggestions = await requestSuggestionsChain( model, @@ -71,7 +78,7 @@ export class OllyChatService implements ChatService { return buildOutputs( payload.input.content, - agentResponse, + agentFrameworkAnswer, traceId, suggestions, convertToTraces(runs) From 4ce0a1522adb125d1ae9f09625e054badda87e45 Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Wed, 8 Nov 2023 17:37:00 +0800 Subject: [PATCH 2/6] feat: comply with multi type of agent Signed-off-by: SuZhou-Joe --- server/services/chat/olly_chat_service.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/server/services/chat/olly_chat_service.ts b/server/services/chat/olly_chat_service.ts index b77d50fc..0f970c81 100644 --- a/server/services/chat/olly_chat_service.ts +++ b/server/services/chat/olly_chat_service.ts @@ -53,19 +53,25 @@ export class OllyChatService implements ChatService { ); const memory = memoryInit(payload.messages); + /** + * Wait for an API to fetch root agent id. + */ const agentFrameworkResponse = (await opensearchClient.transport.request({ method: 'POST', - path: '/_plugins/_ml/agents/usjqiYsBC_Oyjc6-Rhpq/_execute', + path: '/_plugins/_ml/agents/_UoprosBZFp32K9Rsfqe/_execute', body: { parameters: { question: payload.input.content, }, }, })) as ApiResponse<{ - inference_results: Array<{ output: Array<{ name: string; result: string }> }>; + inference_results: Array<{ + output: Array<{ name: string; result?: string; dataAsMap?: { response: string } }>; + }>; }>; + const outputBody = agentFrameworkResponse.body.inference_results?.[0]?.output?.[0]; const agentFrameworkAnswer = - agentFrameworkResponse.body.inference_results[0].output[0].result; + agentFrameworkResponse.body.inference_results[0].output[0].result || ""; await memory.chatHistory.addUserMessage(payload.input.content); await memory.chatHistory.addAIChatMessage(agentFrameworkAnswer); From 91dce23d7eb8940734f70bf6fb917149427f8420 Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Mon, 20 Nov 2023 10:58:00 +0800 Subject: [PATCH 3/6] feat: integrate with memory APIs Signed-off-by: SuZhou-Joe --- server/routes/chat_routes.ts | 36 ++--- server/services/chat/chat_service.ts | 5 +- server/services/chat/olly_chat_service.ts | 97 ++++++------ .../agent_framework_storage_service.ts | 148 ++++++++++++++++++ server/services/storage/storage_service.ts | 2 + 5 files changed, 213 insertions(+), 75 deletions(-) create mode 100644 server/services/storage/agent_framework_storage_service.ts diff --git a/server/routes/chat_routes.ts b/server/routes/chat_routes.ts index 7e63804b..e857b645 100644 --- a/server/routes/chat_routes.ts +++ b/server/routes/chat_routes.ts @@ -15,6 +15,7 @@ import { ASSISTANT_API } from '../../common/constants/llm'; import { OllyChatService } from '../services/chat/olly_chat_service'; import { SavedObjectsStorageService } from '../services/storage/saved_objects_storage_service'; import { IMessage, IInput } from '../../common/types/chat_saved_object_attributes'; +import { AgentFrameworkStorageService } from '../services/storage/agent_framework_storage_service'; const llmRequestRoute = { path: ASSISTANT_API.SEND_MESSAGE, @@ -104,7 +105,7 @@ const updateSessionRoute = { export function registerChatRoutes(router: IRouter) { const createStorageService = (context: RequestHandlerContext) => - new SavedObjectsStorageService(context.core.savedObjects.client); + new AgentFrameworkStorageService(context.core.opensearch.client.asCurrentUser); const createChatService = () => new OllyChatService(); router.post( @@ -114,34 +115,25 @@ export function registerChatRoutes(router: IRouter) { request, response ): Promise> => { - const { sessionId, input, messages = [] } = request.body; + const { messages = [], input, sessionId: sessionIdInRequestBody } = request.body; const storageService = createStorageService(context); const chatService = createChatService(); - // get history from the chat object for existing chats - if (sessionId && messages.length === 0) { - try { - const session = await storageService.getSession(sessionId); - messages.push(...session.messages); - } catch (error) { - return response.custom({ statusCode: error.statusCode || 500, body: error.message }); - } - } - try { const outputs = await chatService.requestLLM( - { messages, input, sessionId }, + { messages, input, sessionId: sessionIdInRequestBody }, context, request ); - const title = input.content.substring(0, 50); - const saveMessagesResponse = await storageService.saveMessages( - title, - sessionId, - [...messages, input, ...outputs].filter((message) => message.content !== 'AbortError') - ); + const sessionId = outputs.memoryId; + const finalMessage = await storageService.getSession(sessionId); + return response.ok({ - body: { ...saveMessagesResponse, title }, + body: { + messages: finalMessage.messages, + sessionId: outputs.memoryId, + title: finalMessage.title + }, }); } catch (error) { context.assistant_plugin.logger.warn(error); @@ -278,13 +270,13 @@ export function registerChatRoutes(router: IRouter) { const outputs = await chatService.requestLLM( { messages, input, sessionId }, context, - request + request as any ); const title = input.content.substring(0, 50); const saveMessagesResponse = await storageService.saveMessages( title, sessionId, - [...messages, input, ...outputs].filter((message) => message.content !== 'AbortError') + [...messages, input, ...outputs.messages].filter((message) => message.content !== 'AbortError') ); return response.ok({ body: { ...saveMessagesResponse, title }, diff --git a/server/services/chat/chat_service.ts b/server/services/chat/chat_service.ts index 92d2ec89..e1e81b9a 100644 --- a/server/services/chat/chat_service.ts +++ b/server/services/chat/chat_service.ts @@ -13,7 +13,10 @@ export interface ChatService { payload: { messages: IMessage[]; input: IInput; sessionId?: string }, context: RequestHandlerContext, request: OpenSearchDashboardsRequest - ): Promise; + ): Promise<{ + messages: IMessage[]; + memoryId: string; + }>; generatePPL( context: RequestHandlerContext, request: OpenSearchDashboardsRequest diff --git a/server/services/chat/olly_chat_service.ts b/server/services/chat/olly_chat_service.ts index 0f970c81..d7dd4abe 100644 --- a/server/services/chat/olly_chat_service.ts +++ b/server/services/chat/olly_chat_service.ts @@ -9,30 +9,30 @@ import { ApiResponse } from '@opensearch-project/opensearch'; import { OpenSearchDashboardsRequest, RequestHandlerContext } from '../../../../../src/core/server'; import { IMessage, IInput } from '../../../common/types/chat_saved_object_attributes'; import { convertToTraces } from '../../../common/utils/llm_chat/traces'; -import { chatAgentInit } from '../../olly/agents/agent_helpers'; import { OpenSearchTracer } from '../../olly/callbacks/opensearch_tracer'; -import { requestSuggestionsChain } from '../../olly/chains/suggestions_generator'; -import { memoryInit } from '../../olly/memory/chat_agent_memory'; import { LLMModelFactory } from '../../olly/models/llm_model_factory'; -import { initTools } from '../../olly/tools/tools_helper'; import { PPLTools } from '../../olly/tools/tool_sets/ppl'; import { buildOutputs } from '../../olly/utils/output_builders/build_outputs'; import { AbortAgentExecutionSchema, LLMRequestSchema } from '../../routes/chat_routes'; import { PPLGenerationRequestSchema } from '../../routes/langchain_routes'; import { ChatService } from './chat_service'; +const MEMORY_ID_FIELD = 'memory_id'; +const RESPONSE_FIELD = 'response'; + export class OllyChatService implements ChatService { static abortControllers: Map = new Map(); public async requestLLM( payload: { messages: IMessage[]; input: IInput; sessionId?: string }, context: RequestHandlerContext, - request: OpenSearchDashboardsRequest - ): Promise { - const traceId = uuid(); - const observabilityClient = context.assistant_plugin.observabilityClient.asScoped(request); + request: OpenSearchDashboardsRequest + ): Promise<{ + messages: IMessage[]; + memoryId: string; + }> { + const { input, sessionId } = payload; const opensearchClient = context.core.opensearch.client.asCurrentUser; - const savedObjectsClient = context.core.savedObjects.client; if (payload.sessionId) { OllyChatService.abortControllers.set(payload.sessionId, new AbortController()); @@ -40,65 +40,58 @@ export class OllyChatService implements ChatService { try { const runs: Run[] = []; - const callbacks = [new OpenSearchTracer(opensearchClient, traceId, runs)]; - const model = LLMModelFactory.createModel({ client: opensearchClient }); - const embeddings = LLMModelFactory.createEmbeddings({ client: opensearchClient }); - const pluginTools = initTools( - model, - embeddings, - opensearchClient, - observabilityClient, - savedObjectsClient, - callbacks - ); - const memory = memoryInit(payload.messages); /** * Wait for an API to fetch root agent id. */ + const parametersPayload: { + question: string; + verbose?: boolean; + memory_id?: string; + } = { + question: input.content, + verbose: true, + }; + if (sessionId) { + parametersPayload.memory_id = sessionId; + } const agentFrameworkResponse = (await opensearchClient.transport.request({ method: 'POST', - path: '/_plugins/_ml/agents/_UoprosBZFp32K9Rsfqe/_execute', + path: '/_plugins/_ml/agents/-jld3IsBXlmiPBu-5dDC/_execute', body: { - parameters: { - question: payload.input.content, - }, + parameters: parametersPayload, }, })) as ApiResponse<{ inference_results: Array<{ - output: Array<{ name: string; result?: string; dataAsMap?: { response: string } }>; + output: Array<{ name: string; result?: string }>; }>; }>; - const outputBody = agentFrameworkResponse.body.inference_results?.[0]?.output?.[0]; - const agentFrameworkAnswer = - agentFrameworkResponse.body.inference_results[0].output[0].result || ""; - await memory.chatHistory.addUserMessage(payload.input.content); - await memory.chatHistory.addAIChatMessage(agentFrameworkAnswer); + const outputBody = + agentFrameworkResponse.body.inference_results?.[0]?.output || + agentFrameworkResponse.body.inference_results?.[0]?.output; + const memoryIdItem = outputBody?.find((item) => item.name === MEMORY_ID_FIELD); + const reversedOutputBody = [...outputBody].reverse(); + const finalAnswerItem = reversedOutputBody.find((item) => item.name === RESPONSE_FIELD); - const suggestions = await requestSuggestionsChain( - model, - pluginTools.flatMap((tool) => tool.toolsList), - memory, - callbacks - ); + const agentFrameworkAnswer = finalAnswerItem?.result || ''; - return buildOutputs( - payload.input.content, - agentFrameworkAnswer, - traceId, - suggestions, - convertToTraces(runs) - ); + return { + messages: buildOutputs(input.content, agentFrameworkAnswer, '', {}, convertToTraces(runs)), + memoryId: memoryIdItem?.result || '', + }; } catch (error) { context.assistant_plugin.logger.error(error); - return [ - { - type: 'output', - traceId, - contentType: 'error', - content: error.message, - }, - ]; + return { + messages: [ + { + type: 'output', + traceId: '', + contentType: 'error', + content: error.message, + }, + ], + memoryId: '', + }; } finally { if (payload.sessionId) { OllyChatService.abortControllers.delete(payload.sessionId); diff --git a/server/services/storage/agent_framework_storage_service.ts b/server/services/storage/agent_framework_storage_service.ts new file mode 100644 index 00000000..7731301e --- /dev/null +++ b/server/services/storage/agent_framework_storage_service.ts @@ -0,0 +1,148 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import { ApiResponse } from '@opensearch-project/opensearch/.'; +import { OpenSearchClient } from '../../../../../src/core/server'; +import { LLM_INDEX } from '../../../common/constants/llm'; +import { + IInput, + IMessage, + IOutput, + ISession, + ISessionFindResponse, +} from '../../../common/types/chat_saved_object_attributes'; +import { GetSessionsSchema } from '../../routes/chat_routes'; +import { StorageService } from './storage_service'; + +export class AgentFrameworkStorageService implements StorageService { + constructor(private readonly client: OpenSearchClient) {} + async getSession(sessionId: string): Promise { + const session = (await this.client.transport.request({ + method: 'GET', + path: `/_plugins/_ml/memory/conversation/${sessionId}`, + })) as ApiResponse<{ + interactions: Array<{ + input: string; + response: string; + parent_interaction_id: string; + interaction_id: string; + }>; + }>; + return { + title: 'test', + version: 1, + createdTimeMs: Date.now(), + updatedTimeMs: Date.now(), + messages: session.body.interactions + .filter((item) => !item.parent_interaction_id) + .reduce((total, current) => { + const inputItem: IInput = { + type: 'input', + contentType: 'text', + content: current.input, + }; + const outputItems: IOutput[] = [ + { + type: 'output', + contentType: 'markdown', + content: current.response, + traceId: current.interaction_id, + }, + ]; + return [...total, inputItem, ...outputItems]; + }, [] as IMessage[]), + }; + } + + async getSessions(query: GetSessionsSchema): Promise { + await this.createIndex(); + const sessions = await this.client.search({ + index: LLM_INDEX.SESSIONS, + body: { + from: (query.page - 1) * query.perPage, + size: query.perPage, + ...(query.sortField && + query.sortOrder && { sort: [{ [query.sortField]: query.sortOrder }] }), + }, + }); + + return { + objects: sessions.body.hits.hits + .filter( + (hit): hit is RequiredKey => + hit._source !== null && hit._source !== undefined + ) + .map((session) => ({ ...session._source, id: session._id })), + total: + typeof sessions.body.hits.total === 'number' + ? sessions.body.hits.total + : sessions.body.hits.total.value, + }; + } + + async saveMessages( + title: string, + sessionId: string | undefined, + messages: IMessage[] + ): Promise<{ sessionId: string; messages: IMessage[] }> { + await this.createIndex(); + const timestamp = new Date().getTime(); + if (!sessionId) { + const createResponse = await this.client.index({ + index: LLM_INDEX.SESSIONS, + body: { + title, + version: 1, + createdTimeMs: timestamp, + updatedTimeMs: timestamp, + messages, + }, + }); + return { sessionId: createResponse.body._id, messages }; + } + const updateResponse = await this.client.update>({ + index: LLM_INDEX.SESSIONS, + id: sessionId, + body: { + doc: { + messages, + updatedTimeMs: timestamp, + }, + }, + }); + return { sessionId, messages }; + } + + private async createIndex() { + const existsResponse = await this.client.indices.exists({ index: LLM_INDEX.SESSIONS }); + if (!existsResponse.body) { + return this.client.indices.create({ + index: LLM_INDEX.SESSIONS, + body: { + settings: { + index: { + number_of_shards: '1', + auto_expand_replicas: '0-2', + mapping: { ignore_malformed: true }, + }, + }, + mappings: { + properties: { + title: { type: 'keyword' }, + createdTimeMs: { type: 'date' }, + updatedTimeMs: { type: 'date' }, + }, + }, + }, + }); + } + } + deleteSession(sessionId: string): Promise<{}> { + throw new Error('Method not implemented.'); + } + updateSession(sessionId: string, title: string): Promise<{}> { + throw new Error('Method not implemented.'); + } +} diff --git a/server/services/storage/storage_service.ts b/server/services/storage/storage_service.ts index 8d676c5e..0fe27df6 100644 --- a/server/services/storage/storage_service.ts +++ b/server/services/storage/storage_service.ts @@ -18,4 +18,6 @@ export interface StorageService { sessionId: string | undefined, messages: IMessage[] ): Promise<{ sessionId: string; messages: IMessage[] }>; + deleteSession(sessionId: string): Promise<{}>; + updateSession(sessionId: string, title: string): Promise<{}>; } From 7a335b6ee8eac6979330c75e106cacf0ca6de8a0 Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Mon, 20 Nov 2023 11:15:15 +0800 Subject: [PATCH 4/6] feat: use the agent id from request body Signed-off-by: SuZhou-Joe --- public/chat_header_button.tsx | 4 ++++ public/contexts/chat_context.tsx | 1 + public/hooks/use_chat_actions.tsx | 1 + server/routes/chat_routes.ts | 1 + server/services/chat/olly_chat_service.ts | 4 ++-- 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/public/chat_header_button.tsx b/public/chat_header_button.tsx index ce21f666..26e76373 100644 --- a/public/chat_header_button.tsx +++ b/public/chat_header_button.tsx @@ -38,6 +38,9 @@ export const HeaderChatButton: React.FC = (props) => { const [traceId, setTraceId] = useState(undefined); const [chatSize, setChatSize] = useState('dock-right'); const flyoutFullScreen = chatSize === 'fullscreen'; + const [rootAgentId, setRootAgentId] = useState( + new URL(window.location.href).searchParams.get('agent_id') || '' + ); if (!flyoutLoaded && flyoutVisible) flyoutLoaded = true; @@ -73,6 +76,7 @@ export const HeaderChatButton: React.FC = (props) => { setTitle, traceId, setTraceId, + rootAgentId, }), [ appId, diff --git a/public/contexts/chat_context.tsx b/public/contexts/chat_context.tsx index c0807be3..24d3a0f7 100644 --- a/public/contexts/chat_context.tsx +++ b/public/contexts/chat_context.tsx @@ -25,6 +25,7 @@ export interface IChatContext { setTitle: React.Dispatch>; traceId?: string; setTraceId: React.Dispatch>; + rootAgentId?: string; } export const ChatContext = React.createContext(null); diff --git a/public/hooks/use_chat_actions.tsx b/public/hooks/use_chat_actions.tsx index c71836b3..6781cc0b 100644 --- a/public/hooks/use_chat_actions.tsx +++ b/public/hooks/use_chat_actions.tsx @@ -36,6 +36,7 @@ export const useChatActions = (): AssistantActions => { // do not send abort signal to http client to allow LLM call run in background body: JSON.stringify({ sessionId: chatContext.sessionId, + rootAgentId: chatContext.rootAgentId, ...(!chatContext.sessionId && { messages: chatState.messages }), // include all previous messages for new chats input, }), diff --git a/server/routes/chat_routes.ts b/server/routes/chat_routes.ts index e857b645..01329e1f 100644 --- a/server/routes/chat_routes.ts +++ b/server/routes/chat_routes.ts @@ -23,6 +23,7 @@ const llmRequestRoute = { body: schema.object({ sessionId: schema.maybe(schema.string()), messages: schema.maybe(schema.arrayOf(schema.any())), + rootAgentId: schema.string(), input: schema.object({ type: schema.literal('input'), context: schema.object({ diff --git a/server/services/chat/olly_chat_service.ts b/server/services/chat/olly_chat_service.ts index d7dd4abe..68a92ed4 100644 --- a/server/services/chat/olly_chat_service.ts +++ b/server/services/chat/olly_chat_service.ts @@ -31,7 +31,7 @@ export class OllyChatService implements ChatService { messages: IMessage[]; memoryId: string; }> { - const { input, sessionId } = payload; + const { input, sessionId, rootAgentId } = request.body; const opensearchClient = context.core.opensearch.client.asCurrentUser; if (payload.sessionId) { @@ -57,7 +57,7 @@ export class OllyChatService implements ChatService { } const agentFrameworkResponse = (await opensearchClient.transport.request({ method: 'POST', - path: '/_plugins/_ml/agents/-jld3IsBXlmiPBu-5dDC/_execute', + path: `/_plugins/_ml/agents/${rootAgentId}/_execute`, body: { parameters: parametersPayload, }, From 2ed0d486045e87fd975df122b32d3443d2258121 Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Mon, 20 Nov 2023 14:50:32 +0800 Subject: [PATCH 5/6] feat: remove useless code Signed-off-by: SuZhou-Joe --- .../agent_framework_storage_service.ts | 76 +------------------ 1 file changed, 2 insertions(+), 74 deletions(-) diff --git a/server/services/storage/agent_framework_storage_service.ts b/server/services/storage/agent_framework_storage_service.ts index 7731301e..64f2bd79 100644 --- a/server/services/storage/agent_framework_storage_service.ts +++ b/server/services/storage/agent_framework_storage_service.ts @@ -57,29 +57,7 @@ export class AgentFrameworkStorageService implements StorageService { } async getSessions(query: GetSessionsSchema): Promise { - await this.createIndex(); - const sessions = await this.client.search({ - index: LLM_INDEX.SESSIONS, - body: { - from: (query.page - 1) * query.perPage, - size: query.perPage, - ...(query.sortField && - query.sortOrder && { sort: [{ [query.sortField]: query.sortOrder }] }), - }, - }); - - return { - objects: sessions.body.hits.hits - .filter( - (hit): hit is RequiredKey => - hit._source !== null && hit._source !== undefined - ) - .map((session) => ({ ...session._source, id: session._id })), - total: - typeof sessions.body.hits.total === 'number' - ? sessions.body.hits.total - : sessions.body.hits.total.value, - }; + throw new Error('Method not implemented.'); } async saveMessages( @@ -87,57 +65,7 @@ export class AgentFrameworkStorageService implements StorageService { sessionId: string | undefined, messages: IMessage[] ): Promise<{ sessionId: string; messages: IMessage[] }> { - await this.createIndex(); - const timestamp = new Date().getTime(); - if (!sessionId) { - const createResponse = await this.client.index({ - index: LLM_INDEX.SESSIONS, - body: { - title, - version: 1, - createdTimeMs: timestamp, - updatedTimeMs: timestamp, - messages, - }, - }); - return { sessionId: createResponse.body._id, messages }; - } - const updateResponse = await this.client.update>({ - index: LLM_INDEX.SESSIONS, - id: sessionId, - body: { - doc: { - messages, - updatedTimeMs: timestamp, - }, - }, - }); - return { sessionId, messages }; - } - - private async createIndex() { - const existsResponse = await this.client.indices.exists({ index: LLM_INDEX.SESSIONS }); - if (!existsResponse.body) { - return this.client.indices.create({ - index: LLM_INDEX.SESSIONS, - body: { - settings: { - index: { - number_of_shards: '1', - auto_expand_replicas: '0-2', - mapping: { ignore_malformed: true }, - }, - }, - mappings: { - properties: { - title: { type: 'keyword' }, - createdTimeMs: { type: 'date' }, - updatedTimeMs: { type: 'date' }, - }, - }, - }, - }); - } + throw new Error('Method not implemented.'); } deleteSession(sessionId: string): Promise<{}> { throw new Error('Method not implemented.'); From c6dbfb1347d2fd231d90b401a07a00cdab4aece0 Mon Sep 17 00:00:00 2001 From: SuZhou-Joe Date: Mon, 20 Nov 2023 15:09:23 +0800 Subject: [PATCH 6/6] feat: update babel.config.ts Signed-off-by: SuZhou-Joe --- babel.config.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/babel.config.js b/babel.config.js index a0f8a3ad..3805f7cf 100644 --- a/babel.config.js +++ b/babel.config.js @@ -16,8 +16,8 @@ module.exports = function (api) { ], plugins: [ [require('@babel/plugin-transform-runtime'), { regenerator: true }], - require('@babel/plugin-proposal-class-properties'), - require('@babel/plugin-proposal-object-rest-spread'), + require('@babel/plugin-transform-class-properties'), + require('@babel/plugin-transform-object-rest-spread'), [require('@babel/plugin-transform-modules-commonjs'), { allowTopLevelThis: true }], ], };