From 3dcae5144034a146068566e920ade2e57d9abd08 Mon Sep 17 00:00:00 2001 From: Pierre Gayvallet Date: Mon, 23 Dec 2024 10:20:42 +0100 Subject: [PATCH 01/21] [inference] Add support for inference connectors (#204541) ## Summary ~Depends on~ https://github.com/elastic/kibana/pull/200249 merged! Fix https://github.com/elastic/kibana/issues/199082 - Add support for the `inference` stack connectors to the `inference` plugin (everything is inference) - Adapt the o11y assistant to use the `inference-common` utilities for connector filtering / compat checking ## How to test **1. Starts ES with the unified completion feature flag** ```sh yarn es snapshot --license trial ES_JAVA_OPTS="-Des.inference_unified_feature_flag_enabled=true" ``` **2. Enable the inference connector for Kibana** In the Kibana config file: ```yaml xpack.stack_connectors.enableExperimental: ['inferenceConnectorOn'] ``` **3. Start Dev Kibana** ```sh node scripts/kibana --dev --no-base-path ``` **4. Create an inference connector** Go to `http://localhost:5601/app/management/insightsAndAlerting/triggersActionsConnectors/connectors`, create an inference connector - Type: `AI connector` then - Service: `OpenAI` - API Key: Gwzk... Kidding, please ping someone - Model ID: `gpt-4o` - Task type: `completion` -> save **5. test the o11y assistant** Use the assistant as you would do for any other connector (just make sure the inference connector is selected as the one being used) and do your testing. --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> --- .../src/chat/welcome_message.tsx | 2 +- .../packages/kbn-ai-assistant/tsconfig.json | 1 + .../shared/ai-infra/inference-common/index.ts | 6 + .../inference-common/src/connectors.test.ts | 91 +++++++++ .../inference-common/src/connectors.ts | 76 +++++++ .../shared/inference/common/connectors.ts | 24 --- .../shared/inference/common/http_apis.ts | 8 +- .../plugins/shared/inference/public/types.ts | 3 +- .../inference/scripts/util/kibana_client.ts | 2 +- .../adapters/get_inference_adapter.test.ts | 7 +- .../adapters/get_inference_adapter.ts | 6 +- .../chat_complete/adapters/inference/index.ts | 8 + .../inference/inference_adapter.test.ts | 148 ++++++++++++++ .../adapters/inference/inference_adapter.ts | 85 ++++++++ .../adapters/openai/from_openai.ts | 46 +++++ .../chat_complete/adapters/openai/index.ts | 2 + .../adapters/openai/openai_adapter.test.ts | 2 +- .../adapters/openai/openai_adapter.ts | 184 +---------------- .../adapters/openai/process_openai_stream.ts | 52 +++++ .../adapters/openai/to_openai.test.ts | 187 ++++++++++++++++++ .../adapters/openai/to_openai.ts | 107 ++++++++++ .../utils/inference_executor.test.ts | 2 +- .../chat_complete/utils/inference_executor.ts | 6 +- .../server/inference_client/types.ts | 2 +- .../inference/server/routes/connectors.ts | 2 +- .../server/test_utils/inference_connector.ts | 2 +- .../server/test_utils/inference_executor.ts | 2 +- .../server/util/get_connector_by_id.test.ts | 4 +- .../server/util/get_connector_by_id.ts | 15 +- .../common/connectors.ts | 22 --- .../common/index.ts | 2 - .../public/index.ts | 1 - .../server/routes/connectors/route.ts | 5 +- .../common/inference/schema.ts | 2 +- .../inference/inference.test.ts | 33 +++- .../connector_types/inference/inference.ts | 13 +- .../scripts/evaluation/kibana_client.ts | 7 +- 37 files changed, 894 insertions(+), 273 deletions(-) create mode 100644 x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.test.ts create mode 100644 x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.ts delete mode 100644 x-pack/platform/plugins/shared/inference/common/connectors.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/index.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.test.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/from_openai.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/process_openai_stream.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.test.ts create mode 100644 x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.ts delete mode 100644 x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/connectors.ts diff --git a/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message.tsx b/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message.tsx index 0783c7f64620a..6133df55c57e0 100644 --- a/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message.tsx +++ b/x-pack/packages/kbn-ai-assistant/src/chat/welcome_message.tsx @@ -10,7 +10,7 @@ import { css } from '@emotion/css'; import { EuiFlexGroup, EuiFlexItem, EuiSpacer, useCurrentEuiBreakpoint } from '@elastic/eui'; import type { ActionConnector } from '@kbn/triggers-actions-ui-plugin/public'; import { GenerativeAIForObservabilityConnectorFeatureId } from '@kbn/actions-plugin/common'; -import { isSupportedConnectorType } from '@kbn/observability-ai-assistant-plugin/public'; +import { isSupportedConnectorType } from '@kbn/inference-common'; import { AssistantBeacon } from '@kbn/ai-assistant-icon'; import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; import type { UseGenAIConnectorsResult } from '../hooks/use_genai_connectors'; diff --git a/x-pack/packages/kbn-ai-assistant/tsconfig.json b/x-pack/packages/kbn-ai-assistant/tsconfig.json index c23f92085c28d..d33b8642561eb 100644 --- a/x-pack/packages/kbn-ai-assistant/tsconfig.json +++ b/x-pack/packages/kbn-ai-assistant/tsconfig.json @@ -37,6 +37,7 @@ "@kbn/ml-plugin", "@kbn/share-plugin", "@kbn/ai-assistant-common", + "@kbn/inference-common", "@kbn/storybook", "@kbn/ai-assistant-icon", ] diff --git a/x-pack/platform/packages/shared/ai-infra/inference-common/index.ts b/x-pack/platform/packages/shared/ai-infra/inference-common/index.ts index 134b0f02811fe..0c6d254c0f527 100644 --- a/x-pack/platform/packages/shared/ai-infra/inference-common/index.ts +++ b/x-pack/platform/packages/shared/ai-infra/inference-common/index.ts @@ -95,3 +95,9 @@ export { } from './src/errors'; export { truncateList } from './src/truncate_list'; +export { + InferenceConnectorType, + isSupportedConnectorType, + isSupportedConnector, + type InferenceConnector, +} from './src/connectors'; diff --git a/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.test.ts b/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.test.ts new file mode 100644 index 0000000000000..a4729aa8a8578 --- /dev/null +++ b/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.test.ts @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + InferenceConnectorType, + isSupportedConnectorType, + isSupportedConnector, + RawConnector, + COMPLETION_TASK_TYPE, +} from './connectors'; + +const createRawConnector = (parts: Partial): RawConnector => { + return { + id: 'id', + actionTypeId: 'connector-type', + name: 'some connector', + config: {}, + ...parts, + }; +}; + +describe('isSupportedConnectorType', () => { + it('returns true for supported connector types', () => { + expect(isSupportedConnectorType(InferenceConnectorType.OpenAI)).toBe(true); + expect(isSupportedConnectorType(InferenceConnectorType.Bedrock)).toBe(true); + expect(isSupportedConnectorType(InferenceConnectorType.Gemini)).toBe(true); + expect(isSupportedConnectorType(InferenceConnectorType.Inference)).toBe(true); + }); + it('returns false for unsupported connector types', () => { + expect(isSupportedConnectorType('anything-else')).toBe(false); + }); +}); + +describe('isSupportedConnector', () => { + // TODO + + it('returns true for OpenAI connectors', () => { + expect( + isSupportedConnector(createRawConnector({ actionTypeId: InferenceConnectorType.OpenAI })) + ).toBe(true); + }); + + it('returns true for Bedrock connectors', () => { + expect( + isSupportedConnector(createRawConnector({ actionTypeId: InferenceConnectorType.Bedrock })) + ).toBe(true); + }); + + it('returns true for Gemini connectors', () => { + expect( + isSupportedConnector(createRawConnector({ actionTypeId: InferenceConnectorType.Gemini })) + ).toBe(true); + }); + + it('returns true for OpenAI connectors with the right taskType', () => { + expect( + isSupportedConnector( + createRawConnector({ + actionTypeId: InferenceConnectorType.Inference, + config: { taskType: COMPLETION_TASK_TYPE }, + }) + ) + ).toBe(true); + }); + + it('returns false for OpenAI connectors with a bad taskType', () => { + expect( + isSupportedConnector( + createRawConnector({ + actionTypeId: InferenceConnectorType.Inference, + config: { taskType: 'embeddings' }, + }) + ) + ).toBe(false); + }); + + it('returns false for OpenAI connectors without taskType', () => { + expect( + isSupportedConnector( + createRawConnector({ + actionTypeId: InferenceConnectorType.Inference, + config: {}, + }) + ) + ).toBe(false); + }); +}); diff --git a/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.ts b/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.ts new file mode 100644 index 0000000000000..da77d973614b5 --- /dev/null +++ b/x-pack/platform/packages/shared/ai-infra/inference-common/src/connectors.ts @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +/** + * The list of connector types that can be used with the inference APIs + */ +export enum InferenceConnectorType { + OpenAI = '.gen-ai', + Bedrock = '.bedrock', + Gemini = '.gemini', + Inference = '.inference', +} + +export const COMPLETION_TASK_TYPE = 'completion'; + +const allSupportedConnectorTypes = Object.values(InferenceConnectorType); + +export interface InferenceConnector { + type: InferenceConnectorType; + name: string; + connectorId: string; +} + +/** + * Checks if a given connector type is compatible for inference. + * + * Note: this check is not sufficient to assert if a given connector can be + * used for inference, as `.inference` connectors need additional check logic. + * Please use `isSupportedConnector` instead when possible. + */ +export function isSupportedConnectorType(id: string): id is InferenceConnectorType { + return allSupportedConnectorTypes.includes(id as InferenceConnectorType); +} + +/** + * Checks if a given connector is compatible for inference. + * + * A connector is compatible if: + * 1. its type is in the list of allowed types + * 2. for inference connectors, if its taskType is "completion" + */ +export function isSupportedConnector(connector: RawConnector): connector is RawInferenceConnector { + if (!isSupportedConnectorType(connector.actionTypeId)) { + return false; + } + if (connector.actionTypeId === InferenceConnectorType.Inference) { + const config = connector.config ?? {}; + if (config.taskType !== COMPLETION_TASK_TYPE) { + return false; + } + } + return true; +} + +/** + * Connector types are living in the actions plugin and we can't afford + * having dependencies from this package to some mid-level plugin, + * so we're just using our own connector mixin type. + */ +export interface RawConnector { + id: string; + actionTypeId: string; + name: string; + config?: Record; +} + +interface RawInferenceConnector { + id: string; + actionTypeId: InferenceConnectorType; + name: string; + config?: Record; +} diff --git a/x-pack/platform/plugins/shared/inference/common/connectors.ts b/x-pack/platform/plugins/shared/inference/common/connectors.ts deleted file mode 100644 index ee628f520feff..0000000000000 --- a/x-pack/platform/plugins/shared/inference/common/connectors.ts +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export enum InferenceConnectorType { - OpenAI = '.gen-ai', - Bedrock = '.bedrock', - Gemini = '.gemini', -} - -const allSupportedConnectorTypes = Object.values(InferenceConnectorType); - -export interface InferenceConnector { - type: InferenceConnectorType; - name: string; - connectorId: string; -} - -export function isSupportedConnectorType(id: string): id is InferenceConnectorType { - return allSupportedConnectorTypes.includes(id as InferenceConnectorType); -} diff --git a/x-pack/platform/plugins/shared/inference/common/http_apis.ts b/x-pack/platform/plugins/shared/inference/common/http_apis.ts index c07fcd29b2211..f6a60051e84fb 100644 --- a/x-pack/platform/plugins/shared/inference/common/http_apis.ts +++ b/x-pack/platform/plugins/shared/inference/common/http_apis.ts @@ -5,8 +5,12 @@ * 2.0. */ -import type { FunctionCallingMode, Message, ToolOptions } from '@kbn/inference-common'; -import { InferenceConnector } from './connectors'; +import type { + FunctionCallingMode, + Message, + ToolOptions, + InferenceConnector, +} from '@kbn/inference-common'; export type ChatCompleteRequestBody = { connectorId: string; diff --git a/x-pack/platform/plugins/shared/inference/public/types.ts b/x-pack/platform/plugins/shared/inference/public/types.ts index 735abfb5459a0..f07fe1e636836 100644 --- a/x-pack/platform/plugins/shared/inference/public/types.ts +++ b/x-pack/platform/plugins/shared/inference/public/types.ts @@ -5,8 +5,7 @@ * 2.0. */ -import type { ChatCompleteAPI, OutputAPI } from '@kbn/inference-common'; -import type { InferenceConnector } from '../common/connectors'; +import type { ChatCompleteAPI, OutputAPI, InferenceConnector } from '@kbn/inference-common'; /* eslint-disable @typescript-eslint/no-empty-interface*/ diff --git a/x-pack/platform/plugins/shared/inference/scripts/util/kibana_client.ts b/x-pack/platform/plugins/shared/inference/scripts/util/kibana_client.ts index ef6f1c4fdcdce..a3a75ea980523 100644 --- a/x-pack/platform/plugins/shared/inference/scripts/util/kibana_client.ts +++ b/x-pack/platform/plugins/shared/inference/scripts/util/kibana_client.ts @@ -25,9 +25,9 @@ import { withoutOutputUpdateEvents, type ToolOptions, ChatCompleteOptions, + type InferenceConnector, } from '@kbn/inference-common'; import type { ChatCompleteRequestBody } from '../../common/http_apis'; -import type { InferenceConnector } from '../../common/connectors'; import { createOutputApi } from '../../common/output/create_output_api'; import { eventSourceStreamIntoObservable } from '../../server/util/event_source_stream_into_observable'; diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.test.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.test.ts index 558e0cd06ef91..f6613152f9f00 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.test.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.test.ts @@ -5,11 +5,12 @@ * 2.0. */ -import { InferenceConnectorType } from '../../../common/connectors'; +import { InferenceConnectorType } from '@kbn/inference-common'; import { getInferenceAdapter } from './get_inference_adapter'; import { openAIAdapter } from './openai'; import { geminiAdapter } from './gemini'; import { bedrockClaudeAdapter } from './bedrock'; +import { inferenceAdapter } from './inference'; describe('getInferenceAdapter', () => { it('returns the openAI adapter for OpenAI type', () => { @@ -23,4 +24,8 @@ describe('getInferenceAdapter', () => { it('returns the bedrock adapter for Bedrock type', () => { expect(getInferenceAdapter(InferenceConnectorType.Bedrock)).toBe(bedrockClaudeAdapter); }); + + it('returns the inference adapter for Inference type', () => { + expect(getInferenceAdapter(InferenceConnectorType.Inference)).toBe(inferenceAdapter); + }); }); diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.ts index f34b0c27a339f..ec5e6803ab86d 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/get_inference_adapter.ts @@ -5,11 +5,12 @@ * 2.0. */ -import { InferenceConnectorType } from '../../../common/connectors'; +import { InferenceConnectorType } from '@kbn/inference-common'; import type { InferenceConnectorAdapter } from '../types'; import { openAIAdapter } from './openai'; import { geminiAdapter } from './gemini'; import { bedrockClaudeAdapter } from './bedrock'; +import { inferenceAdapter } from './inference'; export const getInferenceAdapter = ( connectorType: InferenceConnectorType @@ -23,6 +24,9 @@ export const getInferenceAdapter = ( case InferenceConnectorType.Bedrock: return bedrockClaudeAdapter; + + case InferenceConnectorType.Inference: + return inferenceAdapter; } return undefined; diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/index.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/index.ts new file mode 100644 index 0000000000000..040b4103dae80 --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export { inferenceAdapter } from './inference_adapter'; diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.test.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.test.ts new file mode 100644 index 0000000000000..7cf5fc7bdfb8a --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.test.ts @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import OpenAI from 'openai'; +import { v4 } from 'uuid'; +import { PassThrough } from 'stream'; +import { lastValueFrom, Subject, toArray } from 'rxjs'; +import type { Logger } from '@kbn/logging'; +import { loggerMock } from '@kbn/logging-mocks'; +import { ChatCompletionEventType, MessageRole } from '@kbn/inference-common'; +import { observableIntoEventSourceStream } from '../../../util/observable_into_event_source_stream'; +import { InferenceExecutor } from '../../utils/inference_executor'; +import { inferenceAdapter } from './inference_adapter'; + +function createOpenAIChunk({ + delta, + usage, +}: { + delta?: OpenAI.ChatCompletionChunk['choices'][number]['delta']; + usage?: OpenAI.ChatCompletionChunk['usage']; +}): OpenAI.ChatCompletionChunk { + return { + choices: delta + ? [ + { + finish_reason: null, + index: 0, + delta, + }, + ] + : [], + created: new Date().getTime(), + id: v4(), + model: 'gpt-4o', + object: 'chat.completion.chunk', + usage, + }; +} + +describe('inferenceAdapter', () => { + const executorMock = { + invoke: jest.fn(), + } as InferenceExecutor & { invoke: jest.MockedFn }; + + const logger = { + debug: jest.fn(), + error: jest.fn(), + } as unknown as Logger; + + beforeEach(() => { + executorMock.invoke.mockReset(); + }); + + const defaultArgs = { + executor: executorMock, + logger: loggerMock.create(), + }; + + describe('when creating the request', () => { + beforeEach(() => { + executorMock.invoke.mockImplementation(async () => { + return { + actionId: '', + status: 'ok', + data: new PassThrough(), + }; + }); + }); + + it('emits chunk events', async () => { + const source$ = new Subject>(); + + executorMock.invoke.mockImplementation(async () => { + return { + actionId: '', + status: 'ok', + data: observableIntoEventSourceStream(source$, logger), + }; + }); + + const response$ = inferenceAdapter.chatComplete({ + ...defaultArgs, + messages: [ + { + role: MessageRole.User, + content: 'Hello', + }, + ], + }); + + source$.next( + createOpenAIChunk({ + delta: { + content: 'First', + }, + }) + ); + + source$.next( + createOpenAIChunk({ + delta: { + content: ', second', + }, + }) + ); + + source$.complete(); + + const allChunks = await lastValueFrom(response$.pipe(toArray())); + + expect(allChunks).toEqual([ + { + content: 'First', + tool_calls: [], + type: ChatCompletionEventType.ChatCompletionChunk, + }, + { + content: ', second', + tool_calls: [], + type: ChatCompletionEventType.ChatCompletionChunk, + }, + ]); + }); + + it('propagates the abort signal when provided', () => { + const abortController = new AbortController(); + + inferenceAdapter.chatComplete({ + logger, + executor: executorMock, + messages: [{ role: MessageRole.User, content: 'question' }], + abortSignal: abortController.signal, + }); + + expect(executorMock.invoke).toHaveBeenCalledTimes(1); + expect(executorMock.invoke).toHaveBeenCalledWith({ + subAction: 'unified_completion_stream', + subActionParams: expect.objectContaining({ + signal: abortController.signal, + }), + }); + }); + }); +}); diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.ts new file mode 100644 index 0000000000000..323dec4f5789d --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/inference/inference_adapter.ts @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type OpenAI from 'openai'; +import { from, identity, switchMap, throwError } from 'rxjs'; +import { isReadable, Readable } from 'stream'; +import { createInferenceInternalError } from '@kbn/inference-common'; +import { eventSourceStreamIntoObservable } from '../../../util/event_source_stream_into_observable'; +import type { InferenceConnectorAdapter } from '../../types'; +import { + parseInlineFunctionCalls, + wrapWithSimulatedFunctionCalling, +} from '../../simulated_function_calling'; +import { + toolsToOpenAI, + toolChoiceToOpenAI, + messagesToOpenAI, + processOpenAIStream, +} from '../openai'; + +export const inferenceAdapter: InferenceConnectorAdapter = { + chatComplete: ({ + executor, + system, + messages, + toolChoice, + tools, + functionCalling, + logger, + abortSignal, + }) => { + const simulatedFunctionCalling = functionCalling === 'simulated'; + + let request: Omit & { model?: string }; + if (simulatedFunctionCalling) { + const wrapped = wrapWithSimulatedFunctionCalling({ + system, + messages, + toolChoice, + tools, + }); + request = { + messages: messagesToOpenAI({ system: wrapped.system, messages: wrapped.messages }), + }; + } else { + request = { + messages: messagesToOpenAI({ system, messages }), + tool_choice: toolChoiceToOpenAI(toolChoice), + tools: toolsToOpenAI(tools), + }; + } + + return from( + executor.invoke({ + subAction: 'unified_completion_stream', + subActionParams: { + body: request, + signal: abortSignal, + }, + }) + ).pipe( + switchMap((response) => { + if (response.status === 'error') { + return throwError(() => + createInferenceInternalError('Error calling the inference API', { + rootError: response.serviceMessage, + }) + ); + } + if (isReadable(response.data as any)) { + return eventSourceStreamIntoObservable(response.data as Readable); + } + return throwError(() => + createInferenceInternalError('Unexpected error', response.data as Record) + ); + }), + processOpenAIStream(), + simulatedFunctionCalling ? parseInlineFunctionCalls({ logger }) : identity + ); + }, +}; diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/from_openai.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/from_openai.ts new file mode 100644 index 0000000000000..750ae4710104a --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/from_openai.ts @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type OpenAI from 'openai'; +import { + ChatCompletionChunkEvent, + ChatCompletionEventType, + ChatCompletionTokenCountEvent, +} from '@kbn/inference-common'; + +export function chunkFromOpenAI(chunk: OpenAI.ChatCompletionChunk): ChatCompletionChunkEvent { + const delta = chunk.choices[0].delta; + + return { + type: ChatCompletionEventType.ChatCompletionChunk, + content: delta.content ?? '', + tool_calls: + delta.tool_calls?.map((toolCall) => { + return { + function: { + name: toolCall.function?.name ?? '', + arguments: toolCall.function?.arguments ?? '', + }, + toolCallId: toolCall.id ?? '', + index: toolCall.index, + }; + }) ?? [], + }; +} + +export function tokenCountFromOpenAI( + completionUsage: OpenAI.CompletionUsage +): ChatCompletionTokenCountEvent { + return { + type: ChatCompletionEventType.ChatCompletionTokenCount, + tokens: { + completion: completionUsage.completion_tokens, + prompt: completionUsage.prompt_tokens, + total: completionUsage.total_tokens, + }, + }; +} diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/index.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/index.ts index 9aa1d94e01a52..ddf8441756cba 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/index.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/index.ts @@ -6,3 +6,5 @@ */ export { openAIAdapter } from './openai_adapter'; +export { toolChoiceToOpenAI, messagesToOpenAI, toolsToOpenAI } from './to_openai'; +export { processOpenAIStream } from './process_openai_stream'; diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.test.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.test.ts index 9b7fbc388024f..d93dee627ec18 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.test.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.test.ts @@ -15,7 +15,7 @@ import { loggerMock } from '@kbn/logging-mocks'; import { ChatCompletionEventType, MessageRole } from '@kbn/inference-common'; import { observableIntoEventSourceStream } from '../../../util/observable_into_event_source_stream'; import { InferenceExecutor } from '../../utils/inference_executor'; -import { openAIAdapter } from '.'; +import { openAIAdapter } from './openai_adapter'; function createOpenAIChunk({ delta, diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.ts index 0529820b1bfbf..8806429882e3f 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/openai_adapter.ts @@ -6,41 +6,17 @@ */ import type OpenAI from 'openai'; -import type { - ChatCompletionAssistantMessageParam, - ChatCompletionMessageParam, - ChatCompletionSystemMessageParam, - ChatCompletionToolMessageParam, - ChatCompletionUserMessageParam, -} from 'openai/resources'; -import { - filter, - from, - identity, - map, - mergeMap, - Observable, - switchMap, - tap, - throwError, -} from 'rxjs'; +import { from, identity, switchMap, throwError } from 'rxjs'; import { isReadable, Readable } from 'stream'; -import { - ChatCompletionChunkEvent, - ChatCompletionEventType, - ChatCompletionTokenCountEvent, - createInferenceInternalError, - Message, - MessageRole, - ToolOptions, -} from '@kbn/inference-common'; -import { createTokenLimitReachedError } from '../../errors'; +import { createInferenceInternalError } from '@kbn/inference-common'; import { eventSourceStreamIntoObservable } from '../../../util/event_source_stream_into_observable'; import type { InferenceConnectorAdapter } from '../../types'; import { parseInlineFunctionCalls, wrapWithSimulatedFunctionCalling, } from '../../simulated_function_calling'; +import { messagesToOpenAI, toolsToOpenAI, toolChoiceToOpenAI } from './to_openai'; +import { processOpenAIStream } from './process_openai_stream'; export const openAIAdapter: InferenceConnectorAdapter = { chatComplete: ({ @@ -95,158 +71,8 @@ export const openAIAdapter: InferenceConnectorAdapter = { createInferenceInternalError('Unexpected error', response.data as Record) ); }), - filter((line) => !!line && line !== '[DONE]'), - map( - (line) => JSON.parse(line) as OpenAI.ChatCompletionChunk | { error: { message: string } } - ), - tap((line) => { - if ('error' in line) { - throw createInferenceInternalError(line.error.message); - } - if ( - 'choices' in line && - line.choices.length && - line.choices[0].finish_reason === 'length' - ) { - throw createTokenLimitReachedError(); - } - }), - filter((line): line is OpenAI.ChatCompletionChunk => { - return 'object' in line && line.object === 'chat.completion.chunk'; - }), - mergeMap((chunk): Observable => { - const events: Array = []; - if (chunk.usage) { - events.push(tokenCountFromOpenAI(chunk.usage)); - } - if (chunk.choices?.length) { - events.push(chunkFromOpenAI(chunk)); - } - return from(events); - }), + processOpenAIStream(), simulatedFunctionCalling ? parseInlineFunctionCalls({ logger }) : identity ); }, }; - -function chunkFromOpenAI(chunk: OpenAI.ChatCompletionChunk): ChatCompletionChunkEvent { - const delta = chunk.choices[0].delta; - - return { - type: ChatCompletionEventType.ChatCompletionChunk, - content: delta.content ?? '', - tool_calls: - delta.tool_calls?.map((toolCall) => { - return { - function: { - name: toolCall.function?.name ?? '', - arguments: toolCall.function?.arguments ?? '', - }, - toolCallId: toolCall.id ?? '', - index: toolCall.index, - }; - }) ?? [], - }; -} - -function tokenCountFromOpenAI( - completionUsage: OpenAI.CompletionUsage -): ChatCompletionTokenCountEvent { - return { - type: ChatCompletionEventType.ChatCompletionTokenCount, - tokens: { - completion: completionUsage.completion_tokens, - prompt: completionUsage.prompt_tokens, - total: completionUsage.total_tokens, - }, - }; -} - -function toolsToOpenAI(tools: ToolOptions['tools']): OpenAI.ChatCompletionCreateParams['tools'] { - return tools - ? Object.entries(tools).map(([toolName, { description, schema }]) => { - return { - type: 'function', - function: { - name: toolName, - description, - parameters: (schema ?? { - type: 'object' as const, - properties: {}, - }) as unknown as Record, - }, - }; - }) - : undefined; -} - -function toolChoiceToOpenAI( - toolChoice: ToolOptions['toolChoice'] -): OpenAI.ChatCompletionCreateParams['tool_choice'] { - return typeof toolChoice === 'string' - ? toolChoice - : toolChoice - ? { - function: { - name: toolChoice.function, - }, - type: 'function' as const, - } - : undefined; -} - -function messagesToOpenAI({ - system, - messages, -}: { - system?: string; - messages: Message[]; -}): OpenAI.ChatCompletionMessageParam[] { - const systemMessage: ChatCompletionSystemMessageParam | undefined = system - ? { role: 'system', content: system } - : undefined; - - return [ - ...(systemMessage ? [systemMessage] : []), - ...messages.map((message): ChatCompletionMessageParam => { - const role = message.role; - - switch (role) { - case MessageRole.Assistant: - const assistantMessage: ChatCompletionAssistantMessageParam = { - role: 'assistant', - content: message.content, - tool_calls: message.toolCalls?.map((toolCall) => { - return { - function: { - name: toolCall.function.name, - arguments: - 'arguments' in toolCall.function - ? JSON.stringify(toolCall.function.arguments) - : '{}', - }, - id: toolCall.toolCallId, - type: 'function', - }; - }), - }; - return assistantMessage; - - case MessageRole.User: - const userMessage: ChatCompletionUserMessageParam = { - role: 'user', - content: message.content, - }; - return userMessage; - - case MessageRole.Tool: - const toolMessage: ChatCompletionToolMessageParam = { - role: 'tool', - content: JSON.stringify(message.response), - tool_call_id: message.toolCallId, - }; - return toolMessage; - } - }), - ]; -} diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/process_openai_stream.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/process_openai_stream.ts new file mode 100644 index 0000000000000..65384ed52e5ff --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/process_openai_stream.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type OpenAI from 'openai'; +import { filter, from, map, mergeMap, Observable, tap } from 'rxjs'; +import { + ChatCompletionChunkEvent, + ChatCompletionTokenCountEvent, + createInferenceInternalError, +} from '@kbn/inference-common'; +import { createTokenLimitReachedError } from '../../errors'; +import { tokenCountFromOpenAI, chunkFromOpenAI } from './from_openai'; + +export function processOpenAIStream() { + return (source: Observable) => { + return source.pipe( + filter((line) => !!line && line !== '[DONE]'), + map( + (line) => JSON.parse(line) as OpenAI.ChatCompletionChunk | { error: { message: string } } + ), + tap((line) => { + if ('error' in line) { + throw createInferenceInternalError(line.error.message); + } + if ( + 'choices' in line && + line.choices.length && + line.choices[0].finish_reason === 'length' + ) { + throw createTokenLimitReachedError(); + } + }), + filter((line): line is OpenAI.ChatCompletionChunk => { + return 'object' in line && line.object === 'chat.completion.chunk'; + }), + mergeMap((chunk): Observable => { + const events: Array = []; + if (chunk.usage) { + events.push(tokenCountFromOpenAI(chunk.usage)); + } + if (chunk.choices?.length) { + events.push(chunkFromOpenAI(chunk)); + } + return from(events); + }) + ); + }; +} diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.test.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.test.ts new file mode 100644 index 0000000000000..978f775c5d3dd --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.test.ts @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageRole, ToolChoiceType } from '@kbn/inference-common'; +import { messagesToOpenAI, toolChoiceToOpenAI, toolsToOpenAI } from './to_openai'; + +describe('toolChoiceToOpenAI', () => { + it('returns the right value for tool choice types', () => { + expect(toolChoiceToOpenAI(ToolChoiceType.none)).toEqual('none'); + expect(toolChoiceToOpenAI(ToolChoiceType.auto)).toEqual('auto'); + expect(toolChoiceToOpenAI(ToolChoiceType.required)).toEqual('required'); + }); + + it('returns the right value for undefined', () => { + expect(toolChoiceToOpenAI(undefined)).toBeUndefined(); + }); + + it('returns the right value for named functions', () => { + expect(toolChoiceToOpenAI({ function: 'foo' })).toEqual({ + type: 'function', + function: { name: 'foo' }, + }); + }); +}); + +describe('toolsToOpenAI', () => { + it('converts tools to the expected format', () => { + expect( + toolsToOpenAI({ + myTool: { + description: 'my tool', + schema: { + type: 'object', + description: 'my tool schema', + properties: { + foo: { + type: 'string', + }, + }, + }, + }, + }) + ).toMatchInlineSnapshot(` + Array [ + Object { + "function": Object { + "description": "my tool", + "name": "myTool", + "parameters": Object { + "description": "my tool schema", + "properties": Object { + "foo": Object { + "type": "string", + }, + }, + "type": "object", + }, + }, + "type": "function", + }, + ] + `); + }); +}); + +describe('messagesToOpenAI', () => { + it('converts a user message', () => { + expect( + messagesToOpenAI({ + messages: [ + { + role: MessageRole.User, + content: 'question', + }, + ], + }) + ).toEqual([ + { + content: 'question', + role: 'user', + }, + ]); + }); + + it('converts single message and system', () => { + expect( + messagesToOpenAI({ + system: 'system message', + messages: [ + { + role: MessageRole.User, + content: 'question', + }, + ], + }) + ).toEqual([ + { + content: 'system message', + role: 'system', + }, + { + content: 'question', + role: 'user', + }, + ]); + }); + + it('converts a tool call', () => { + expect( + messagesToOpenAI({ + messages: [ + { + role: MessageRole.Tool, + name: 'tool', + response: {}, + toolCallId: 'callId', + }, + ], + }) + ).toEqual([ + { + content: '{}', + role: 'tool', + tool_call_id: 'callId', + }, + ]); + }); + + it('converts an assistant message', () => { + expect( + messagesToOpenAI({ + messages: [ + { + role: MessageRole.Assistant, + content: 'response', + }, + ], + }) + ).toEqual([ + { + role: 'assistant', + content: 'response', + }, + ]); + }); + + it('converts an assistant tool call', () => { + expect( + messagesToOpenAI({ + messages: [ + { + role: MessageRole.Assistant, + content: null, + toolCalls: [ + { + toolCallId: 'id', + function: { + name: 'function', + arguments: {}, + }, + }, + ], + }, + ], + }) + ).toEqual([ + { + role: 'assistant', + content: '', + tool_calls: [ + { + function: { + arguments: '{}', + name: 'function', + }, + id: 'id', + type: 'function', + }, + ], + }, + ]); + }); +}); diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.ts new file mode 100644 index 0000000000000..709b1fd4c6bfe --- /dev/null +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/adapters/openai/to_openai.ts @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type OpenAI from 'openai'; +import type { + ChatCompletionAssistantMessageParam, + ChatCompletionMessageParam, + ChatCompletionSystemMessageParam, + ChatCompletionToolMessageParam, + ChatCompletionUserMessageParam, +} from 'openai/resources'; +import { Message, MessageRole, ToolOptions } from '@kbn/inference-common'; + +export function toolsToOpenAI( + tools: ToolOptions['tools'] +): OpenAI.ChatCompletionCreateParams['tools'] { + return tools + ? Object.entries(tools).map(([toolName, { description, schema }]) => { + return { + type: 'function', + function: { + name: toolName, + description, + parameters: (schema ?? { + type: 'object' as const, + properties: {}, + }) as unknown as Record, + }, + }; + }) + : undefined; +} + +export function toolChoiceToOpenAI( + toolChoice: ToolOptions['toolChoice'] +): OpenAI.ChatCompletionCreateParams['tool_choice'] { + return typeof toolChoice === 'string' + ? toolChoice + : toolChoice + ? { + function: { + name: toolChoice.function, + }, + type: 'function' as const, + } + : undefined; +} + +export function messagesToOpenAI({ + system, + messages, +}: { + system?: string; + messages: Message[]; +}): OpenAI.ChatCompletionMessageParam[] { + const systemMessage: ChatCompletionSystemMessageParam | undefined = system + ? { role: 'system', content: system } + : undefined; + + return [ + ...(systemMessage ? [systemMessage] : []), + ...messages.map((message): ChatCompletionMessageParam => { + const role = message.role; + + switch (role) { + case MessageRole.Assistant: + const assistantMessage: ChatCompletionAssistantMessageParam = { + role: 'assistant', + content: message.content ?? '', + tool_calls: message.toolCalls?.map((toolCall) => { + return { + function: { + name: toolCall.function.name, + arguments: + 'arguments' in toolCall.function + ? JSON.stringify(toolCall.function.arguments) + : '{}', + }, + id: toolCall.toolCallId, + type: 'function', + }; + }), + }; + return assistantMessage; + + case MessageRole.User: + const userMessage: ChatCompletionUserMessageParam = { + role: 'user', + content: message.content, + }; + return userMessage; + + case MessageRole.Tool: + const toolMessage: ChatCompletionToolMessageParam = { + role: 'tool', + content: JSON.stringify(message.response), + tool_call_id: message.toolCallId, + }; + return toolMessage; + } + }), + ]; +} diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.test.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.test.ts index 1821b553dd6a9..1965d731885af 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.test.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.test.ts @@ -6,7 +6,7 @@ */ import { actionsClientMock } from '@kbn/actions-plugin/server/mocks'; -import { InferenceConnector, InferenceConnectorType } from '../../../common/connectors'; +import { InferenceConnector, InferenceConnectorType } from '@kbn/inference-common'; import { createInferenceExecutor, type InferenceExecutor } from './inference_executor'; describe('createInferenceExecutor', () => { diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.ts index c461e6b6cdfb7..0849e71ccf975 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/utils/inference_executor.ts @@ -11,7 +11,7 @@ import type { ActionsClient, PluginStartContract as ActionsPluginStart, } from '@kbn/actions-plugin/server'; -import type { InferenceConnector } from '../../../common/connectors'; +import type { InferenceConnector } from '@kbn/inference-common'; import { getConnectorById } from '../../util/get_connector_by_id'; export interface InferenceInvokeOptions { @@ -28,7 +28,7 @@ export type InferenceInvokeResult = ActionTypeExecutorResult InferenceConnector; - invoke(params: InferenceInvokeOptions): Promise; + invoke(params: InferenceInvokeOptions): Promise>; } export const createInferenceExecutor = ({ @@ -40,7 +40,7 @@ export const createInferenceExecutor = ({ }): InferenceExecutor => { return { getConnector: () => connector, - async invoke({ subAction, subActionParams }): Promise { + async invoke({ subAction, subActionParams }): Promise> { return await actionsClient.execute({ actionId: connector.connectorId, params: { diff --git a/x-pack/platform/plugins/shared/inference/server/inference_client/types.ts b/x-pack/platform/plugins/shared/inference/server/inference_client/types.ts index 193ce83f6d7b6..4037eac3fb7ce 100644 --- a/x-pack/platform/plugins/shared/inference/server/inference_client/types.ts +++ b/x-pack/platform/plugins/shared/inference/server/inference_client/types.ts @@ -10,8 +10,8 @@ import type { ChatCompleteAPI, BoundOutputAPI, OutputAPI, + InferenceConnector, } from '@kbn/inference-common'; -import type { InferenceConnector } from '../../common/connectors'; /** * An inference client, scoped to a request, that can be used to interact with LLMs. diff --git a/x-pack/platform/plugins/shared/inference/server/routes/connectors.ts b/x-pack/platform/plugins/shared/inference/server/routes/connectors.ts index 240e11a37f20e..d28dfc6780af4 100644 --- a/x-pack/platform/plugins/shared/inference/server/routes/connectors.ts +++ b/x-pack/platform/plugins/shared/inference/server/routes/connectors.ts @@ -10,7 +10,7 @@ import { InferenceConnector, InferenceConnectorType, isSupportedConnectorType, -} from '../../common/connectors'; +} from '@kbn/inference-common'; import type { InferenceServerStart, InferenceStartDependencies } from '../types'; export function registerConnectorsRoute({ diff --git a/x-pack/platform/plugins/shared/inference/server/test_utils/inference_connector.ts b/x-pack/platform/plugins/shared/inference/server/test_utils/inference_connector.ts index af7f35115325d..2ef7d05bdbd50 100644 --- a/x-pack/platform/plugins/shared/inference/server/test_utils/inference_connector.ts +++ b/x-pack/platform/plugins/shared/inference/server/test_utils/inference_connector.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { InferenceConnector, InferenceConnectorType } from '../../common/connectors'; +import { InferenceConnector, InferenceConnectorType } from '@kbn/inference-common'; export const createInferenceConnectorMock = ( parts: Partial = {} diff --git a/x-pack/platform/plugins/shared/inference/server/test_utils/inference_executor.ts b/x-pack/platform/plugins/shared/inference/server/test_utils/inference_executor.ts index 64b5100a9db3d..9203f5eacf0de 100644 --- a/x-pack/platform/plugins/shared/inference/server/test_utils/inference_executor.ts +++ b/x-pack/platform/plugins/shared/inference/server/test_utils/inference_executor.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { InferenceConnector } from '../../common/connectors'; +import type { InferenceConnector } from '@kbn/inference-common'; import { InferenceExecutor } from '../chat_complete/utils'; import { createInferenceConnectorMock } from './inference_connector'; diff --git a/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.test.ts b/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.test.ts index 7387944950f4a..17b5cbe86d7f4 100644 --- a/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.test.ts +++ b/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.test.ts @@ -7,7 +7,7 @@ import type { ActionResult as ActionConnector } from '@kbn/actions-plugin/server'; import { actionsClientMock } from '@kbn/actions-plugin/server/mocks'; -import { InferenceConnectorType } from '../../common/connectors'; +import { InferenceConnectorType } from '@kbn/inference-common'; import { getConnectorById } from './get_connector_by_id'; describe('getConnectorById', () => { @@ -68,7 +68,7 @@ describe('getConnectorById', () => { await expect(() => getConnectorById({ actionsClient, connectorId }) ).rejects.toThrowErrorMatchingInlineSnapshot( - `"Type '.tcp-pigeon' not recognized as a supported connector type"` + `"Connector 'tcp-pigeon-3-0' of type '.tcp-pigeon' not recognized as a supported connector"` ); }); diff --git a/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.ts b/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.ts index 1dbf9a6f0d75e..4bdbff0e1fecf 100644 --- a/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.ts +++ b/x-pack/platform/plugins/shared/inference/server/util/get_connector_by_id.ts @@ -6,8 +6,11 @@ */ import type { ActionsClient, ActionResult as ActionConnector } from '@kbn/actions-plugin/server'; -import { createInferenceRequestError } from '@kbn/inference-common'; -import { isSupportedConnectorType, type InferenceConnector } from '../../common/connectors'; +import { + createInferenceRequestError, + isSupportedConnector, + type InferenceConnector, +} from '@kbn/inference-common'; /** * Retrieves a connector given the provided `connectorId` and asserts it's an inference connector @@ -29,11 +32,9 @@ export const getConnectorById = async ({ throw createInferenceRequestError(`No connector found for id '${connectorId}'`, 400); } - const actionTypeId = connector.actionTypeId; - - if (!isSupportedConnectorType(actionTypeId)) { + if (!isSupportedConnector(connector)) { throw createInferenceRequestError( - `Type '${actionTypeId}' not recognized as a supported connector type`, + `Connector '${connector.id}' of type '${connector.actionTypeId}' not recognized as a supported connector`, 400 ); } @@ -41,6 +42,6 @@ export const getConnectorById = async ({ return { connectorId: connector.id, name: connector.name, - type: actionTypeId, + type: connector.actionTypeId, }; }; diff --git a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/connectors.ts b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/connectors.ts deleted file mode 100644 index f176f4009ac84..0000000000000 --- a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/connectors.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export enum ObservabilityAIAssistantConnectorType { - Bedrock = '.bedrock', - OpenAI = '.gen-ai', - Gemini = '.gemini', -} - -export function isSupportedConnectorType( - type: string -): type is ObservabilityAIAssistantConnectorType { - return ( - type === ObservabilityAIAssistantConnectorType.Bedrock || - type === ObservabilityAIAssistantConnectorType.OpenAI || - type === ObservabilityAIAssistantConnectorType.Gemini - ); -} diff --git a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/index.ts b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/index.ts index 52afdf95d4a43..0157a6a2b0aae 100644 --- a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/index.ts +++ b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/common/index.ts @@ -47,8 +47,6 @@ export { export { concatenateChatCompletionChunks } from './utils/concatenate_chat_completion_chunks'; -export { isSupportedConnectorType } from './connectors'; - export { ShortIdTable } from './utils/short_id_table'; export { KnowledgeBaseType } from './types'; diff --git a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/public/index.ts b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/public/index.ts index 76e643c6ae0d5..f8ca9709a6e20 100644 --- a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/public/index.ts +++ b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/public/index.ts @@ -62,7 +62,6 @@ export { } from '../common/functions/visualize_esql'; export { - isSupportedConnectorType, FunctionVisibility, MessageRole, KnowledgeBaseEntryRole, diff --git a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/server/routes/connectors/route.ts b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/server/routes/connectors/route.ts index 80bc877e6f5f9..78e713b42e9f0 100644 --- a/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/server/routes/connectors/route.ts +++ b/x-pack/platform/plugins/shared/observability_solution/observability_ai_assistant/server/routes/connectors/route.ts @@ -5,7 +5,7 @@ * 2.0. */ import { FindActionResult } from '@kbn/actions-plugin/server'; -import { isSupportedConnectorType } from '../../../common/connectors'; +import { isSupportedConnector } from '@kbn/inference-common'; import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route'; const listConnectorsRoute = createObservabilityAIAssistantServerRoute({ @@ -37,8 +37,7 @@ const listConnectorsRoute = createObservabilityAIAssistantServerRoute({ return connectors.filter( (connector) => - availableTypes.includes(connector.actionTypeId) && - isSupportedConnectorType(connector.actionTypeId) + availableTypes.includes(connector.actionTypeId) && isSupportedConnector(connector) ); }, }); diff --git a/x-pack/plugins/stack_connectors/common/inference/schema.ts b/x-pack/plugins/stack_connectors/common/inference/schema.ts index c62e9782bb517..2213efef1d6e8 100644 --- a/x-pack/plugins/stack_connectors/common/inference/schema.ts +++ b/x-pack/plugins/stack_connectors/common/inference/schema.ts @@ -26,7 +26,7 @@ export const ChatCompleteParamsSchema = schema.object({ // subset of OpenAI.ChatCompletionMessageParam https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts const AIMessage = schema.object({ role: schema.string(), - content: schema.maybe(schema.string()), + content: schema.maybe(schema.nullable(schema.string())), name: schema.maybe(schema.string()), tool_calls: schema.maybe( schema.arrayOf( diff --git a/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.test.ts b/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.test.ts index 4aa28d2952dba..febec4d27ff5e 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.test.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.test.ts @@ -60,11 +60,13 @@ describe('InferenceConnector', () => { }); it('uses the completion task_type is supplied', async () => { - const stream = Readable.from([ - `data: {"id":"chatcmpl-AbLKRuRMZCAcMMQdl96KMTUgAfZNg","choices":[{"delta":{"content":" you"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}\n\n`, - `data: [DONE]\n\n`, - ]); - mockEsClient.transport.request.mockResolvedValue(stream); + mockEsClient.transport.request.mockResolvedValue({ + body: Readable.from([ + `data: {"id":"chatcmpl-AbLKRuRMZCAcMMQdl96KMTUgAfZNg","choices":[{"delta":{"content":" you"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}\n\n`, + `data: [DONE]\n\n`, + ]), + statusCode: 200, + }); const response = await connector.performApiUnifiedCompletion({ body: { messages: [{ content: 'What is Elastic?', role: 'user' }] }, @@ -84,7 +86,7 @@ describe('InferenceConnector', () => { method: 'POST', path: '_inference/completion/test/_unified', }, - { asStream: true } + { asStream: true, meta: true } ); expect(response.choices[0].message.content).toEqual(' you'); }); @@ -264,6 +266,11 @@ describe('InferenceConnector', () => { }); it('the API call is successful with correct request parameters', async () => { + mockEsClient.transport.request.mockResolvedValue({ + body: Readable.from([`data: [DONE]\n\n`]), + statusCode: 200, + }); + await connector.performApiUnifiedCompletionStream({ body: { messages: [{ content: 'Hello world', role: 'user' }] }, }); @@ -282,11 +289,16 @@ describe('InferenceConnector', () => { method: 'POST', path: '_inference/completion/test/_unified', }, - { asStream: true } + { asStream: true, meta: true } ); }); it('signal is properly passed to streamApi', async () => { + mockEsClient.transport.request.mockResolvedValue({ + body: Readable.from([`data: [DONE]\n\n`]), + statusCode: 200, + }); + const signal = jest.fn() as unknown as AbortSignal; await connector.performApiUnifiedCompletionStream({ body: { messages: [{ content: 'Hello world', role: 'user' }] }, @@ -299,7 +311,7 @@ describe('InferenceConnector', () => { method: 'POST', path: '_inference/completion/test/_unified', }, - { asStream: true } + { asStream: true, meta: true, signal } ); }); @@ -319,7 +331,10 @@ describe('InferenceConnector', () => { `data: {"id":"chatcmpl-AbLKRuRMZCAcMMQdl96KMTUgAfZNg","choices":[{"delta":{"content":" you"},"index":0}],"model":"gpt-4o-2024-08-06","object":"chat.completion.chunk"}\n\n`, `data: [DONE]\n\n`, ]); - mockEsClient.transport.request.mockResolvedValue(stream); + mockEsClient.transport.request.mockResolvedValue({ + body: stream, + statusCode: 200, + }); const response = await connector.performApiUnifiedCompletionStream({ body: { messages: [{ content: 'What is Elastic?', role: 'user' }] }, }); diff --git a/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.ts b/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.ts index d6c9af0e1365e..63d8904a6af8a 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/inference/inference.ts @@ -5,6 +5,7 @@ * 2.0. */ +import { text as streamToString } from 'node:stream/consumers'; import { ServiceParams, SubActionConnector } from '@kbn/actions-plugin/server'; import { Stream } from 'openai/streaming'; import { Readable } from 'stream'; @@ -181,7 +182,7 @@ export class InferenceConnector extends SubActionConnector { * @signal abort signal */ public async performApiUnifiedCompletionStream(params: UnifiedChatCompleteParams) { - return await this.esClient.transport.request( + const response = await this.esClient.transport.request( { method: 'POST', path: `_inference/completion/${this.inferenceId}/_unified`, @@ -189,8 +190,18 @@ export class InferenceConnector extends SubActionConnector { }, { asStream: true, + meta: true, + signal: params.signal, } ); + + // errors should be thrown as it will not be a stream response + if (response.statusCode >= 400) { + const error = await streamToString(response.body as unknown as Readable); + throw new Error(error); + } + + return response.body; } /** diff --git a/x-pack/solutions/observability/plugins/observability_ai_assistant_app/scripts/evaluation/kibana_client.ts b/x-pack/solutions/observability/plugins/observability_ai_assistant_app/scripts/evaluation/kibana_client.ts index f3b5ca357231b..69f6715da2dbe 100644 --- a/x-pack/solutions/observability/plugins/observability_ai_assistant_app/scripts/evaluation/kibana_client.ts +++ b/x-pack/solutions/observability/plugins/observability_ai_assistant_app/scripts/evaluation/kibana_client.ts @@ -5,6 +5,7 @@ * 2.0. */ +import { isSupportedConnectorType } from '@kbn/inference-common'; import { BufferFlushEvent, ChatCompletionChunkEvent, @@ -21,11 +22,7 @@ import { import type { ObservabilityAIAssistantScreenContext } from '@kbn/observability-ai-assistant-plugin/common/types'; import type { AssistantScope } from '@kbn/ai-assistant-common'; import { throwSerializedChatCompletionErrors } from '@kbn/observability-ai-assistant-plugin/common/utils/throw_serialized_chat_completion_errors'; -import { - isSupportedConnectorType, - Message, - MessageRole, -} from '@kbn/observability-ai-assistant-plugin/common'; +import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; import { streamIntoObservable } from '@kbn/observability-ai-assistant-plugin/server'; import { ToolingLog } from '@kbn/tooling-log'; import axios, { AxiosInstance, AxiosResponse, isAxiosError } from 'axios'; From 0cc887be92df434671ad37fe89cb4ebcc0d5af3d Mon Sep 17 00:00:00 2001 From: Robert Jaszczurek <92210485+rbrtj@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:56:12 +0100 Subject: [PATCH 02/21] [ML][UX]: Consistent Layout and UI Enhancements for ML Pages (#203813) ## Summary * Updated alignment for `Add to` action buttons across various ML pages - see: #184109 * Fixed the overflowing date picker on `Anomaly Detection` pages - see: [#204394](https://github.com/elastic/kibana/issues/204394) * Standardized gaps around items on pages to maintain consistent values of `8px` (`gutterSize = 's'`) * Fixed the header on the Data Visualizer page - see: [#204393](https://github.com/elastic/kibana/issues/204393) * Adjusted the layout for Change Point Detection * Updated toast messages & toast action button - see: #184109 * Added icons for attachments actions Exploration around new `Add to` actions buttons - the right column is the most recent one, see: #184109 : | Before | After (add_to button) | After (icon button) - current | | ------------- | ------------- | ------------- | | ![Screenshot 2024-12-12 at 11 45 14](https://github.com/user-attachments/assets/08dc0be5-0b98-481d-9906-d3434f03f634) | ![Screenshot 2024-12-12 at 11 37 38](https://github.com/user-attachments/assets/0b2cbdcd-cad0-49aa-842f-123eebec1716) | ![Screenshot 2024-12-12 at 12 42 58](https://github.com/user-attachments/assets/c0a0c732-bbc0-4007-998e-df413fae612b) | | ![Screenshot 2024-12-12 at 11 45 49](https://github.com/user-attachments/assets/9ff45cf8-1c24-4ef4-ab59-2b54f1569c6e) | ![Screenshot 2024-12-12 at 11 39 34](https://github.com/user-attachments/assets/293255eb-eba5-4d90-a10b-0f41de0cc195) | ![Screenshot 2024-12-12 at 12 44 58](https://github.com/user-attachments/assets/740da2fb-ceed-4e6a-add6-9a8d695776a6) | | ![Screenshot 2024-12-12 at 11 46 30](https://github.com/user-attachments/assets/71cea9f4-7658-4776-865d-0f7c5682e67a) | ![Screenshot 2024-12-12 at 11 40 18](https://github.com/user-attachments/assets/b03e8a75-68d3-4c26-942c-1d41072a62ee) | ![image](https://github.com/user-attachments/assets/6a259924-7081-426c-8bd2-346e4f0ae152) | | ![Screenshot 2024-12-12 at 11 48 07](https://github.com/user-attachments/assets/2b340d38-26a5-45bc-851e-8b1956503500) | ![Screenshot 2024-12-12 at 11 42 03](https://github.com/user-attachments/assets/ecef0b37-a43c-42a3-911f-31d4acf9ac7b) | ![Screenshot 2024-12-12 at 12 46 14](https://github.com/user-attachments/assets/f9dddfe0-7296-4394-bb2f-94d702361f49) | | ![Screenshot 2024-12-12 at 11 49 05](https://github.com/user-attachments/assets/d670ad40-58d4-40fb-a88d-7ac5e6c1fbbd) | ![Screenshot 2024-12-12 at 11 43 40](https://github.com/user-attachments/assets/856f9476-c6ff-4405-8865-fb8784f3d818) | ![image](https://github.com/user-attachments/assets/b18f624b-e648-403f-9595-442b2723bdde) | Toasts: | Before | After | | ------ | ------ | | image | ![image](https://github.com/user-attachments/assets/36955456-026a-4abe-b872-c72c115a2dbe) | Other changes: | Before | After | | ------ | ------ | | ![Screenshot 2024-12-13 at 17 57 36](https://github.com/user-attachments/assets/263940ea-9396-4f82-b14e-c9086c6d36e8) | ![Screenshot 2024-12-13 at 18 00 26](https://github.com/user-attachments/assets/49430be4-356b-4902-b855-7fc1b252fbdb) | | ![Screenshot 2024-12-13 at 18 06 59](https://github.com/user-attachments/assets/67ad0faf-42f7-44e1-9290-857e28a9d5e4) | ![Screenshot 2024-12-13 at 18 02 04](https://github.com/user-attachments/assets/357d7296-7b5f-4df5-b664-8bd99c93205b) | | ![Screenshot 2024-12-13 at 18 08 20](https://github.com/user-attachments/assets/819a7c33-9c7a-4423-be1b-cbec30dd8a97) | ![Screenshot 2024-12-13 at 18 09 30](https://github.com/user-attachments/assets/c4b3cb40-f572-4828-888b-4cfff6b565b9) | | ![Screenshot 2024-12-13 at 18 11 52](https://github.com/user-attachments/assets/c63ccdf3-aeaa-4047-a3b5-f67c11690020) | ![Screenshot 2024-12-13 at 18 10 34](https://github.com/user-attachments/assets/6a6343d5-a7f7-45da-bf40-46b14b257e41) | | ![Screenshot 2024-12-13 at 18 30 32](https://github.com/user-attachments/assets/7aa13ad8-ba6f-4801-b0fe-ff90dd9038c1) | ![Screenshot 2024-12-13 at 18 32 59](https://github.com/user-attachments/assets/17774c78-003d-46fd-b7bb-d21cdee7df47) | | ![Screenshot 2024-12-13 at 18 35 56](https://github.com/user-attachments/assets/b7b003c6-11a6-4a1d-97c2-c1b920c0fd1a) | ![Screenshot 2024-12-13 at 18 34 25](https://github.com/user-attachments/assets/5af49323-cb9c-433d-aa6f-91af21dfa5bf) | | image | image | - [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> --- .../src/components/date_picker_wrapper.tsx | 7 +- .../components/full_time_range_selector.tsx | 2 +- .../data_drift/data_drift_page.tsx | 1 - .../index_data_visualizer_view.tsx | 79 ++++---- .../shared/aiops/public/cases/constants.ts | 32 ++++ .../change_point_detection/fields_config.tsx | 81 ++++---- .../log_categorization/attachments_menu.tsx | 13 +- .../log_categorization_page.tsx | 88 +++++---- .../sampling_menu/sampling_menu.tsx | 10 +- .../log_rate_analysis_attachments_menu.tsx | 13 +- .../components/page_header/page_header.tsx | 1 - .../aiops/public/hooks/use_cases_modal.ts | 22 ++- .../contexts/kibana/use_cases_modal.ts | 23 ++- .../explorer/anomaly_context_menu.tsx | 9 +- .../application/explorer/anomaly_timeline.tsx | 10 +- .../forecasting_modal/forecast_button.tsx | 2 + .../series_controls/series_controls.tsx | 4 +- .../timeseriesexplorer_controls.tsx | 179 ++++++++++++------ .../timeseriesexplorer/timeseriesexplorer.js | 60 +++--- .../shared/ml/public/cases/constants.ts | 27 +++ .../cases/public/common/translations.ts | 2 +- .../public/common/use_cases_toast.test.tsx | 2 +- .../cases/public/common/use_cases_toast.tsx | 28 ++- .../apps/cases/group2/attachment_framework.ts | 8 +- .../cases/attachment_framework.ts | 4 +- 25 files changed, 447 insertions(+), 260 deletions(-) create mode 100644 x-pack/platform/plugins/shared/aiops/public/cases/constants.ts create mode 100644 x-pack/platform/plugins/shared/ml/public/cases/constants.ts diff --git a/x-pack/platform/packages/private/ml/date_picker/src/components/date_picker_wrapper.tsx b/x-pack/platform/packages/private/ml/date_picker/src/components/date_picker_wrapper.tsx index 3925b51b6c38e..060b0ee997d00 100644 --- a/x-pack/platform/packages/private/ml/date_picker/src/components/date_picker_wrapper.tsx +++ b/x-pack/platform/packages/private/ml/date_picker/src/components/date_picker_wrapper.tsx @@ -87,10 +87,6 @@ interface DatePickerWrapperProps { * Width setting to be passed on to `EuiSuperDatePicker` */ width?: EuiSuperDatePickerProps['width']; - /** - * Boolean flag to set use of flex group wrapper - */ - flexGroup?: boolean; /** * Boolean flag to disable the date picker */ @@ -123,7 +119,6 @@ export const DatePickerWrapper: FC = (props) => { isLoading = false, showRefresh, width, - flexGroup = true, isDisabled = false, needsUpdate, onRefresh, @@ -363,6 +358,8 @@ export const DatePickerWrapper: FC = (props) => { ); + const flexGroup = !isTimeRangeSelectorEnabled || isAutoRefreshOnly === true; + const wrapped = flexGroup ? ( {flexItems} diff --git a/x-pack/platform/packages/private/ml/date_picker/src/components/full_time_range_selector.tsx b/x-pack/platform/packages/private/ml/date_picker/src/components/full_time_range_selector.tsx index da0740f884ea9..f6e0aa1850cb9 100644 --- a/x-pack/platform/packages/private/ml/date_picker/src/components/full_time_range_selector.tsx +++ b/x-pack/platform/packages/private/ml/date_picker/src/components/full_time_range_selector.tsx @@ -220,7 +220,7 @@ export const FullTimeRangeSelector: FC = (props) => }, [frozenDataPreference, showFrozenDataTierChoice]); return ( - + = ({ onRefresh, needsUpdate }) => { isAutoRefreshOnly={!hasValidTimeField} showRefresh={!hasValidTimeField} width="full" - flexGroup={!hasValidTimeField} onRefresh={onRefresh} needsUpdate={needsUpdate} />, diff --git a/x-pack/platform/plugins/private/data_visualizer/public/application/index_data_visualizer/components/index_data_visualizer_view/index_data_visualizer_view.tsx b/x-pack/platform/plugins/private/data_visualizer/public/application/index_data_visualizer/components/index_data_visualizer_view/index_data_visualizer_view.tsx index 5f1f78af90073..b6ab99583cf49 100644 --- a/x-pack/platform/plugins/private/data_visualizer/public/application/index_data_visualizer/components/index_data_visualizer_view/index_data_visualizer_view.tsx +++ b/x-pack/platform/plugins/private/data_visualizer/public/application/index_data_visualizer/components/index_data_visualizer_view/index_data_visualizer_view.tsx @@ -12,7 +12,6 @@ import type { Required } from 'utility-types'; import { getEsQueryConfig } from '@kbn/data-plugin/common'; import { - useEuiTheme, useEuiBreakpoint, useIsWithinMaxBreakpoint, EuiFlexGroup, @@ -21,7 +20,6 @@ import { EuiPanel, EuiProgress, EuiSpacer, - EuiTitle, } from '@elastic/eui'; import { type Filter, FilterStateStore, type Query, buildEsQuery } from '@kbn/es-query'; @@ -108,8 +106,6 @@ export interface IndexDataVisualizerViewProps { } export const IndexDataVisualizerView: FC = (dataVisualizerProps) => { - const { euiTheme } = useEuiTheme(); - const [savedRandomSamplerPreference, saveRandomSamplerPreference] = useStorage< DVKey, DVStorageMapped @@ -515,49 +511,40 @@ export const IndexDataVisualizerView: FC = (dataVi paddingSize="none" > - - - -

{currentDataView.getName()}

-
- -
- - {isWithinLargeBreakpoint ? : null} - - {hasValidTimeField ? ( - - - - ) : null} - - + {currentDataView.getName()} + {/* TODO: This management section shouldn't live inside the header */} + + + } + rightSideGroupProps={{ + gutterSize: 's', + 'data-test-subj': 'dataVisualizerTimeRangeSelectorSection', + }} + rightSideItems={[ + , + hasValidTimeField && ( + - - -
+ ), + ]} + /> diff --git a/x-pack/platform/plugins/shared/aiops/public/cases/constants.ts b/x-pack/platform/plugins/shared/aiops/public/cases/constants.ts new file mode 100644 index 0000000000000..547734955cbbb --- /dev/null +++ b/x-pack/platform/plugins/shared/aiops/public/cases/constants.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { ChangePointDetectionViewType } from '@kbn/aiops-change-point-detection/constants'; +import { i18n } from '@kbn/i18n'; + +/** + * Titles for the cases toast messages + */ +export const CASES_TOAST_MESSAGES_TITLES = { + CHANGE_POINT_DETECTION: (viewType: ChangePointDetectionViewType, chartsCount: number) => + viewType === 'table' + ? i18n.translate('xpack.aiops.cases.changePointDetectionTableTitle', { + defaultMessage: 'Change point table', + }) + : i18n.translate('xpack.aiops.cases.changePointDetectionChartsTitle', { + defaultMessage: 'Change point {chartsCount, plural, one {chart} other {charts}}', + values: { + chartsCount, + }, + }), + LOG_RATE_ANALYSIS: i18n.translate('xpack.aiops.cases.logRateAnalysisTitle', { + defaultMessage: 'Log rate analysis', + }), + PATTERN_ANALYSIS: i18n.translate('xpack.aiops.cases.logPatternAnalysisTitle', { + defaultMessage: 'Log pattern analysis', + }), +}; diff --git a/x-pack/platform/plugins/shared/aiops/public/components/change_point_detection/fields_config.tsx b/x-pack/platform/plugins/shared/aiops/public/components/change_point_detection/fields_config.tsx index fb1a51c311668..226eb6fbbc2fc 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/change_point_detection/fields_config.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/change_point_detection/fields_config.tsx @@ -56,6 +56,7 @@ import { import { useChangePointResults } from './use_change_point_agg_request'; import { useSplitFieldCardinality } from './use_split_field_cardinality'; import { ViewTypeSelector } from './view_type_selector'; +import { CASES_TOAST_MESSAGES_TITLES } from '../../cases/constants'; const selectControlCss = { width: '350px' }; @@ -215,12 +216,18 @@ const FieldPanel: FC = ({ progress, } = useChangePointResults(fieldConfig, requestParams, combinedQuery, splitFieldCardinality); - const openCasesModalCallback = useCasesModal(EMBEDDABLE_CHANGE_POINT_CHART_TYPE); - const selectedPartitions = useMemo(() => { return (selectedChangePoints[panelIndex] ?? []).map((v) => v.group?.value as string); }, [selectedChangePoints, panelIndex]); + const openCasesModalCallback = useCasesModal( + EMBEDDABLE_CHANGE_POINT_CHART_TYPE, + CASES_TOAST_MESSAGES_TITLES.CHANGE_POINT_DETECTION( + caseAttachment.viewType, + selectedPartitions.length + ) + ); + const caseAttachmentButtonDisabled = isDefined(fieldConfig.splitField) && selectedPartitions.length === 0; @@ -283,6 +290,7 @@ const FieldPanel: FC = ({ defaultMessage: 'To dashboard', }), panel: 'attachToDashboardPanel', + icon: 'dashboardApp', 'data-test-subj': 'aiopsChangePointDetectionAttachToDashboardButton', }, ] @@ -307,6 +315,7 @@ const FieldPanel: FC = ({ : {}), 'data-test-subj': 'aiopsChangePointDetectionAttachToCaseButton', panel: 'attachToCasePanel', + icon: 'casesApp', }, ] : []), @@ -513,42 +522,37 @@ const FieldPanel: FC = ({ return ( - + - - - !prevState)} - aria-label={i18n.translate('xpack.aiops.changePointDetection.expandConfigLabel', { - defaultMessage: 'Expand configuration', - })} - /> - - - - - - } - value={progress ?? 0} - max={100} - valueText - size="m" + !prevState)} + aria-label={i18n.translate('xpack.aiops.changePointDetection.expandConfigLabel', { + defaultMessage: 'Expand configuration', + })} + size="s" + /> + + + + + + - - - + } + value={progress ?? 0} + max={100} + valueText + size="m" + /> + - + @@ -565,8 +569,11 @@ const FieldPanel: FC = ({ defaultMessage: 'Context menu', } )} - iconType="boxesHorizontal" color="text" + display="base" + size="s" + isSelected={isActionMenuOpen} + iconType="boxesHorizontal" onClick={setIsActionMenuOpen.bind(null, !isActionMenuOpen)} /> } @@ -678,7 +685,7 @@ export const FieldsControls: FC> = ({ : undefined } > - + onChangeFn('fn', v)} /> diff --git a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/attachments_menu.tsx b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/attachments_menu.tsx index 409b489ff4510..a82e1a4b087ed 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/attachments_menu.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/attachments_menu.tsx @@ -34,6 +34,7 @@ import type { PatternAnalysisEmbeddableState } from '../../embeddables/pattern_a import type { RandomSamplerOption, RandomSamplerProbability } from './sampling_menu/random_sampler'; import { useCasesModal } from '../../hooks/use_cases_modal'; import { useAiopsAppContext } from '../../hooks/use_aiops_app_context'; +import { CASES_TOAST_MESSAGES_TITLES } from '../../cases/constants'; const SavedObjectSaveModalDashboard = withSuspense(LazySavedObjectSaveModalDashboard); @@ -66,7 +67,10 @@ export const AttachmentsMenu = ({ update: false, }; - const openCasesModalCallback = useCasesModal(EMBEDDABLE_PATTERN_ANALYSIS_TYPE); + const openCasesModalCallback = useCasesModal( + EMBEDDABLE_PATTERN_ANALYSIS_TYPE, + CASES_TOAST_MESSAGES_TITLES.PATTERN_ANALYSIS + ); const timeRange = useTimeRangeUpdates(); @@ -123,6 +127,7 @@ export const AttachmentsMenu = ({ defaultMessage: 'Add to dashboard', }), panel: 'attachToDashboardPanel', + icon: 'dashboardApp', 'data-test-subj': 'aiopsLogPatternAnalysisAttachToDashboardButton', }, ] @@ -133,6 +138,7 @@ export const AttachmentsMenu = ({ name: i18n.translate('xpack.aiops.logCategorization.attachToCaseLabel', { defaultMessage: 'Add to case', }), + icon: 'casesApp', 'data-test-subj': 'aiopsLogPatternAnalysisAttachToCaseButton', onClick: () => { setIsActionMenuOpen(false); @@ -218,8 +224,11 @@ export const AttachmentsMenu = ({ defaultMessage: 'Attachments', } )} - iconType="boxesHorizontal" + size="m" color="text" + display="base" + isSelected={isActionMenuOpen} + iconType="boxesHorizontal" onClick={() => setIsActionMenuOpen(!isActionMenuOpen)} /> } diff --git a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/log_categorization_page.tsx b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/log_categorization_page.tsx index 9bebf4d9b731a..482ef5801183d 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/log_categorization_page.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/log_categorization_page.tsx @@ -357,48 +357,56 @@ export const LogCategorizationPage: FC = () => { - - - - - - - - {loading === false ? ( - { - loadCategories(); - }} - data-test-subj="aiopsLogPatternAnalysisRunButton" + + + + - - - ) : ( - cancelRequest()} - > - Cancel - - )} - - - + + + + {loading === false ? ( + { + loadCategories(); + }} + data-test-subj="aiopsLogPatternAnalysisRunButton" + > + + + ) : ( + cancelRequest()} + > + Cancel + + )} + + + loadCategories()} /> diff --git a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/sampling_menu/sampling_menu.tsx b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/sampling_menu/sampling_menu.tsx index d2dd9591e76f7..7da2736fdd453 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/sampling_menu/sampling_menu.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/log_categorization/sampling_menu/sampling_menu.tsx @@ -8,7 +8,7 @@ import type { FC } from 'react'; import { useMemo } from 'react'; import React, { useState } from 'react'; -import { EuiPopover, EuiButtonEmpty, EuiPanel } from '@elastic/eui'; +import { EuiPopover, EuiPanel, EuiButton } from '@elastic/eui'; import useObservable from 'react-use/lib/useObservable'; import type { RandomSampler } from './random_sampler'; @@ -34,14 +34,16 @@ export const SamplingMenu: FC = ({ randomSampler, reload }) => { data-test-subj="aiopsRandomSamplerOptionsPopover" id="aiopsSamplingOptions" button={ - setShowSamplingOptionsPopover(!showSamplingOptionsPopover)} + color="text" iconSide="right" - iconType="arrowDown" + isSelected={showSamplingOptionsPopover} + iconType={showSamplingOptionsPopover ? 'arrowUp' : 'arrowDown'} > {buttonText} - +
} isOpen={showSamplingOptionsPopover} closePopover={() => setShowSamplingOptionsPopover(false)} diff --git a/x-pack/platform/plugins/shared/aiops/public/components/log_rate_analysis/log_rate_analysis_content/log_rate_analysis_attachments_menu.tsx b/x-pack/platform/plugins/shared/aiops/public/components/log_rate_analysis/log_rate_analysis_content/log_rate_analysis_attachments_menu.tsx index d7e68ae42799c..def6721c2adb3 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/log_rate_analysis/log_rate_analysis_content/log_rate_analysis_attachments_menu.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/log_rate_analysis/log_rate_analysis_content/log_rate_analysis_attachments_menu.tsx @@ -28,6 +28,7 @@ import { } from '@elastic/eui'; import type { WindowParameters } from '@kbn/aiops-log-rate-analysis/window_parameters'; import type { SignificantItem } from '@kbn/ml-agg-utils'; +import { CASES_TOAST_MESSAGES_TITLES } from '../../../cases/constants'; import { useCasesModal } from '../../../hooks/use_cases_modal'; import { useDataSource } from '../../../hooks/use_data_source'; import type { LogRateAnalysisEmbeddableState } from '../../../embeddables/log_rate_analysis/types'; @@ -60,7 +61,10 @@ export const LogRateAnalysisAttachmentsMenu = ({ const timeRange = useTimeRangeUpdates(); const absoluteTimeRange = useTimeRangeUpdates(true); - const openCasesModalCallback = useCasesModal(EMBEDDABLE_LOG_RATE_ANALYSIS_TYPE); + const openCasesModalCallback = useCasesModal( + EMBEDDABLE_LOG_RATE_ANALYSIS_TYPE, + CASES_TOAST_MESSAGES_TITLES.LOG_RATE_ANALYSIS + ); const canEditDashboards = capabilities.dashboard.createNew; @@ -120,6 +124,7 @@ export const LogRateAnalysisAttachmentsMenu = ({ name: i18n.translate('xpack.aiops.logRateAnalysis.addToDashboardTitle', { defaultMessage: 'Add to dashboard', }), + icon: 'dashboardApp', panel: 'attachToDashboardPanel', 'data-test-subj': 'aiopsLogRateAnalysisAttachToDashboardButton', }, @@ -131,6 +136,7 @@ export const LogRateAnalysisAttachmentsMenu = ({ name: i18n.translate('xpack.aiops.logRateAnalysis.attachToCaseLabel', { defaultMessage: 'Add to case', }), + icon: 'casesApp', 'data-test-subj': 'aiopsLogRateAnalysisAttachToCaseButton', disabled: !isCasesAttachmentEnabled, ...(!isCasesAttachmentEnabled @@ -217,8 +223,11 @@ export const LogRateAnalysisAttachmentsMenu = ({ aria-label={i18n.translate('xpack.aiops.logRateAnalysis.attachmentsMenuAriaLabel', { defaultMessage: 'Attachments', })} - iconType="boxesHorizontal" color="text" + display="base" + size="s" + isSelected={isActionMenuOpen} + iconType="boxesHorizontal" onClick={() => setIsActionMenuOpen(!isActionMenuOpen)} /> } diff --git a/x-pack/platform/plugins/shared/aiops/public/components/page_header/page_header.tsx b/x-pack/platform/plugins/shared/aiops/public/components/page_header/page_header.tsx index a01e715e86272..3efed2b74093e 100644 --- a/x-pack/platform/plugins/shared/aiops/public/components/page_header/page_header.tsx +++ b/x-pack/platform/plugins/shared/aiops/public/components/page_header/page_header.tsx @@ -80,7 +80,6 @@ export const PageHeader: FC = () => { isAutoRefreshOnly={!hasValidTimeField} showRefresh={!hasValidTimeField} width="full" - flexGroup={!hasValidTimeField} />, hasValidTimeField && ( = * Returns a callback for opening the cases modal with provided attachment state. */ export const useCasesModal = ( - embeddableType: EmbeddableType + embeddableType: EmbeddableType, + title: string ) => { const { cases } = useAiopsAppContext(); - const selectCaseModal = cases?.hooks.useCasesAddToExistingCaseModal(); + const successMessage = useMemo(() => { + return i18n.translate('xpack.aiops.useCasesModal.successMessage', { + defaultMessage: '{title} added to case.', + values: { title }, + }); + }, [title]); + + const selectCaseModal = cases?.hooks.useCasesAddToExistingCaseModal({ + successToaster: { + content: successMessage, + }, + }); return useCallback( (persistableState: Partial, 'id'>>) => { @@ -64,7 +77,6 @@ export const useCasesModal = ( ], }); }, - // eslint-disable-next-line react-hooks/exhaustive-deps - [embeddableType] + [embeddableType, selectCaseModal] ); }; diff --git a/x-pack/platform/plugins/shared/ml/public/application/contexts/kibana/use_cases_modal.ts b/x-pack/platform/plugins/shared/ml/public/application/contexts/kibana/use_cases_modal.ts index c6142e715bad7..f24a541884555 100644 --- a/x-pack/platform/plugins/shared/ml/public/application/contexts/kibana/use_cases_modal.ts +++ b/x-pack/platform/plugins/shared/ml/public/application/contexts/kibana/use_cases_modal.ts @@ -5,9 +5,10 @@ * 2.0. */ -import { useCallback } from 'react'; +import { useCallback, useMemo } from 'react'; import { stringHash } from '@kbn/ml-string-hash'; import { AttachmentType } from '@kbn/cases-plugin/common'; +import { i18n } from '@kbn/i18n'; import { useMlKibana } from './kibana_context'; import type { MappedEmbeddableTypeOf, MlEmbeddableTypes } from '../../../embeddables'; @@ -15,13 +16,25 @@ import type { MappedEmbeddableTypeOf, MlEmbeddableTypes } from '../../../embedda * Returns a callback for opening the cases modal with provided attachment state. */ export const useCasesModal = ( - embeddableType: EmbeddableType + embeddableType: EmbeddableType, + title: string ) => { const { services: { cases }, } = useMlKibana(); - const selectCaseModal = cases?.hooks.useCasesAddToExistingCaseModal(); + const successMessage = useMemo(() => { + return i18n.translate('xpack.ml.useCasesModal.successMessage', { + defaultMessage: '{title} added to case.', + values: { title }, + }); + }, [title]); + + const selectCaseModal = cases?.hooks.useCasesAddToExistingCaseModal({ + successToaster: { + content: successMessage, + }, + }); return useCallback( (persistableState: Partial, 'id'>>) => { @@ -48,7 +61,7 @@ export const useCasesModal = ( ], }); }, - // eslint-disable-next-line react-hooks/exhaustive-deps - [embeddableType] + + [embeddableType, selectCaseModal] ); }; diff --git a/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_context_menu.tsx b/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_context_menu.tsx index 7904a55264d08..ccee7f8fa1be5 100644 --- a/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_context_menu.tsx +++ b/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_context_menu.tsx @@ -52,6 +52,7 @@ import { useMlKibana } from '../contexts/kibana'; import type { AppStateSelectedCells, ExplorerJob } from './explorer_utils'; import { getSelectionInfluencers, getSelectionTimeRange } from './explorer_utils'; import { getDefaultExplorerChartsPanelTitle } from '../../embeddables/anomaly_charts/utils'; +import { CASES_TOAST_MESSAGES_TITLES } from '../../cases/constants'; interface AnomalyContextMenuProps { selectedJobs: ExplorerJob[]; @@ -99,7 +100,10 @@ export const AnomalyContextMenu: FC = ({ [setIsMenuOpen] ); - const openCasesModal = useCasesModal(ANOMALY_EXPLORER_CHARTS_EMBEDDABLE_TYPE); + const openCasesModal = useCasesModal( + ANOMALY_EXPLORER_CHARTS_EMBEDDABLE_TYPE, + CASES_TOAST_MESSAGES_TITLES.ANOMALY_CHARTS(maxSeriesToPlot) + ); const canEditDashboards = capabilities.dashboard?.createNew ?? false; const casesPrivileges = cases?.helpers.canUseCases(); @@ -266,6 +270,7 @@ export const AnomalyContextMenu: FC = ({ /> ), panel: 'addToDashboardPanel', + icon: 'dashboardApp', 'data-test-subj': 'mlAnomalyAddChartsToDashboardButton', }); @@ -286,6 +291,7 @@ export const AnomalyContextMenu: FC = ({ name: ( ), + icon: 'casesApp', panel: 'addToCasePanel', 'data-test-subj': 'mlAnomalyAttachChartsToCasesButton', }); @@ -329,6 +335,7 @@ export const AnomalyContextMenu: FC = ({ defaultMessage: 'Actions', })} color="text" + display="base" iconType="boxesHorizontal" onClick={setIsMenuOpen.bind(null, !isMenuOpen)} data-test-subj="mlExplorerAnomalyPanelMenu" diff --git a/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_timeline.tsx b/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_timeline.tsx index cad2ef9376890..e70ca44772ed8 100644 --- a/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_timeline.tsx +++ b/x-pack/platform/plugins/shared/ml/public/application/explorer/anomaly_timeline.tsx @@ -66,6 +66,7 @@ import { useAnomalyExplorerContext } from './anomaly_explorer_context'; import { getTimeBoundsFromSelection } from './hooks/use_selected_cells'; import { SwimLaneWrapper } from './alerts'; import { Y_AXIS_LABEL_WIDTH } from './constants'; +import { CASES_TOAST_MESSAGES_TITLES } from '../../cases/constants'; import type { ExplorerState } from './explorer_data'; import { useJobSelection } from './hooks/use_job_selection'; @@ -187,7 +188,10 @@ export const AnomalyTimeline: FC = React.memo( [severityUpdate, swimLaneSeverity] ); - const openCasesModalCallback = useCasesModal(ANOMALY_SWIMLANE_EMBEDDABLE_TYPE); + const openCasesModalCallback = useCasesModal( + ANOMALY_SWIMLANE_EMBEDDABLE_TYPE, + CASES_TOAST_MESSAGES_TITLES.ANOMALY_TIMELINE + ); const openCasesModal = useCallback( (swimLaneType: SwimlaneType) => { @@ -235,6 +239,7 @@ export const AnomalyTimeline: FC = React.memo( /> ), panel: 'addToDashboardPanel', + icon: 'dashboardApp', 'data-test-subj': 'mlAnomalyTimelinePanelAddToDashboardButton', }); @@ -280,6 +285,7 @@ export const AnomalyTimeline: FC = React.memo( defaultMessage="Add to case" /> ), + icon: 'casesApp', 'data-test-subj': 'mlAnomalyTimelinePanelAttachToCaseButton', }); @@ -428,6 +434,8 @@ export const AnomalyTimeline: FC = React.memo( defaultMessage: 'Actions', })} color="text" + display="base" + isSelected={isMenuOpen} iconType="boxesHorizontal" onClick={setIsMenuOpen.bind(null, !isMenuOpen)} data-test-subj="mlAnomalyTimelinePanelMenu" diff --git a/x-pack/platform/plugins/shared/ml/public/application/timeseriesexplorer/components/forecasting_modal/forecast_button.tsx b/x-pack/platform/plugins/shared/ml/public/application/timeseriesexplorer/components/forecasting_modal/forecast_button.tsx index c989bb6ebd38d..cf94ada9c35c9 100644 --- a/x-pack/platform/plugins/shared/ml/public/application/timeseriesexplorer/components/forecasting_modal/forecast_button.tsx +++ b/x-pack/platform/plugins/shared/ml/public/application/timeseriesexplorer/components/forecasting_modal/forecast_button.tsx @@ -20,6 +20,8 @@ export const ForecastButton: FC = ({ isDisabled, onClick, mode = 'full' } const Button = mode === 'full' ? EuiButton : EuiButtonEmpty; return (