Skip to content

Commit

Permalink
Merge branch '8.x' into backport/8.x/pr-198735
Browse files Browse the repository at this point in the history
  • Loading branch information
opauloh authored Nov 6, 2024
2 parents 70ba927 + 7fed185 commit 3af9c8e
Show file tree
Hide file tree
Showing 96 changed files with 1,128 additions and 545 deletions.
7 changes: 4 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -571,6 +571,7 @@
"@kbn/index-management-plugin": "link:x-pack/plugins/index_management",
"@kbn/index-management-shared-types": "link:x-pack/packages/index-management/index_management_shared_types",
"@kbn/index-patterns-test-plugin": "link:test/plugin_functional/plugins/index_patterns",
"@kbn/inference-common": "link:x-pack/packages/ai-infra/inference-common",
"@kbn/inference-plugin": "link:x-pack/plugins/inference",
"@kbn/inference_integration_flyout": "link:x-pack/packages/ml/inference_integration_flyout",
"@kbn/infra-forge": "link:x-pack/packages/kbn-infra-forge",
Expand Down Expand Up @@ -1025,10 +1026,10 @@
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
"@mapbox/mapbox-gl-supported": "2.0.1",
"@mapbox/vector-tile": "1.3.1",
"@openfeature/core": "^1.4.0",
"@openfeature/core": "^1.5.0",
"@openfeature/launchdarkly-client-provider": "^0.3.0",
"@openfeature/server-sdk": "^1.15.1",
"@openfeature/web-sdk": "^1.2.4",
"@openfeature/server-sdk": "^1.16.1",
"@openfeature/web-sdk": "^1.3.1",
"@opentelemetry/api": "^1.1.0",
"@opentelemetry/api-metrics": "^0.31.0",
"@opentelemetry/exporter-metrics-otlp-grpc": "^0.34.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ function createRawEventLoopDelaysDailyDocs() {
return { rawEventLoopDelaysDaily, outdatedRawEventLoopDelaysDaily };
}

describe(`daily rollups integration test`, () => {
// Failing: See https://github.com/elastic/kibana/issues/111821
describe.skip(`daily rollups integration test`, () => {
let esServer: TestElasticsearchUtils;
let root: TestKibanaUtils['root'];
let internalRepository: ISavedObjectsRepository;
Expand Down
2 changes: 2 additions & 0 deletions tsconfig.base.json
Original file line number Diff line number Diff line change
Expand Up @@ -1044,6 +1044,8 @@
"@kbn/index-patterns-test-plugin/*": ["test/plugin_functional/plugins/index_patterns/*"],
"@kbn/inference_integration_flyout": ["x-pack/packages/ml/inference_integration_flyout"],
"@kbn/inference_integration_flyout/*": ["x-pack/packages/ml/inference_integration_flyout/*"],
"@kbn/inference-common": ["x-pack/packages/ai-infra/inference-common"],
"@kbn/inference-common/*": ["x-pack/packages/ai-infra/inference-common/*"],
"@kbn/inference-plugin": ["x-pack/plugins/inference"],
"@kbn/inference-plugin/*": ["x-pack/plugins/inference/*"],
"@kbn/infra-forge": ["x-pack/packages/kbn-infra-forge"],
Expand Down
7 changes: 7 additions & 0 deletions x-pack/packages/ai-infra/inference-common/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# @kbn/inference-common

Common types and utilities for the inference APIs and features.

The main purpose of the package is to have a clean line between the inference plugin's
implementation and the underlying types, so that other packages or plugins can leverage the
types without directly depending on the plugin.
77 changes: 77 additions & 0 deletions x-pack/packages/ai-infra/inference-common/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

export {
MessageRole,
ChatCompletionEventType,
ToolChoiceType,
type Message,
type AssistantMessage,
type ToolMessage,
type UserMessage,
type ToolSchemaType,
type FromToolSchema,
type ToolSchema,
type UnvalidatedToolCall,
type ToolCallsOf,
type ToolCall,
type ToolDefinition,
type ToolOptions,
type FunctionCallingMode,
type ToolChoice,
type ChatCompleteAPI,
type ChatCompleteOptions,
type ChatCompletionResponse,
type ChatCompletionTokenCountEvent,
type ChatCompletionEvent,
type ChatCompletionChunkEvent,
type ChatCompletionChunkToolCall,
type ChatCompletionMessageEvent,
withoutTokenCountEvents,
withoutChunkEvents,
isChatCompletionMessageEvent,
isChatCompletionEvent,
isChatCompletionChunkEvent,
isChatCompletionTokenCountEvent,
ChatCompletionErrorCode,
type ChatCompletionToolNotFoundError,
type ChatCompletionToolValidationError,
type ChatCompletionTokenLimitReachedError,
isToolValidationError,
isTokenLimitReachedError,
isToolNotFoundError,
} from './src/chat_complete';
export {
OutputEventType,
type OutputAPI,
type OutputResponse,
type OutputCompleteEvent,
type OutputUpdateEvent,
type Output,
type OutputEvent,
isOutputCompleteEvent,
isOutputUpdateEvent,
isOutputEvent,
withoutOutputUpdateEvents,
} from './src/output';
export {
InferenceTaskEventType,
type InferenceTaskEvent,
type InferenceTaskEventBase,
} from './src/inference_task';
export {
InferenceTaskError,
InferenceTaskErrorCode,
type InferenceTaskErrorEvent,
type InferenceTaskInternalError,
type InferenceTaskRequestError,
createInferenceInternalError,
createInferenceRequestError,
isInferenceError,
isInferenceInternalError,
isInferenceRequestError,
} from './src/errors';
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,8 @@
* 2.0.
*/

import { OutputEvent, OutputEventType, OutputUpdateEvent } from '.';

export function isOutputUpdateEvent<TId extends string>(
event: OutputEvent
): event is OutputUpdateEvent<TId> {
return event.type === OutputEventType.OutputComplete;
}
module.exports = {
preset: '@kbn/test',
rootDir: '../../../..',
roots: ['<rootDir>/x-pack/packages/ai-infra/inference-common'],
};
5 changes: 5 additions & 0 deletions x-pack/packages/ai-infra/inference-common/kibana.jsonc
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"type": "shared-common",
"id": "@kbn/inference-common",
"owner": "@elastic/appex-ai-infra"
}
7 changes: 7 additions & 0 deletions x-pack/packages/ai-infra/inference-common/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"name": "@kbn/inference-common",
"private": true,
"version": "1.0.0",
"license": "Elastic License 2.0",
"sideEffects": false
}
69 changes: 69 additions & 0 deletions x-pack/packages/ai-infra/inference-common/src/chat_complete/api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import type { Observable } from 'rxjs';
import type { ToolOptions } from './tools';
import type { Message } from './messages';
import type { ChatCompletionEvent } from './events';

/**
* Request a completion from the LLM based on a prompt or conversation.
*
* @example using the API to get an event observable.
* ```ts
* const events$ = chatComplete({
* connectorId: 'my-connector',
* system: "You are a helpful assistant",
* messages: [
* { role: MessageRole.User, content: "First question?"},
* { role: MessageRole.Assistant, content: "Some answer"},
* { role: MessageRole.User, content: "Another question?"},
* ]
* });
*/
export type ChatCompleteAPI = <TToolOptions extends ToolOptions = ToolOptions>(
options: ChatCompleteOptions<TToolOptions>
) => ChatCompletionResponse<TToolOptions>;

/**
* Options used to call the {@link ChatCompleteAPI}
*/
export type ChatCompleteOptions<TToolOptions extends ToolOptions = ToolOptions> = {
/**
* The ID of the connector to use.
* Must be a genAI compatible connector, or an error will be thrown.
*/
connectorId: string;
/**
* Optional system message for the LLM.
*/
system?: string;
/**
* The list of messages for the current conversation
*/
messages: Message[];
/**
* Function calling mode, defaults to "native".
*/
functionCalling?: FunctionCallingMode;
} & TToolOptions;

/**
* Response from the {@link ChatCompleteAPI}.
*
* Observable of {@link ChatCompletionEvent}
*/
export type ChatCompletionResponse<TToolOptions extends ToolOptions = ToolOptions> = Observable<
ChatCompletionEvent<TToolOptions>
>;

/**
* Define the function calling mode when using inference APIs.
* - native will use the LLM's native function calling (requires the LLM to have native support)
* - simulated: will emulate function calling with function calling instructions
*/
export type FunctionCallingMode = 'native' | 'simulated';
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,22 @@
* 2.0.
*/

import { i18n } from '@kbn/i18n';
import { InferenceTaskError } from '../errors';
import type { UnvalidatedToolCall } from './tools';

/**
* List of code of error that are specific to the {@link ChatCompleteAPI}
*/
export enum ChatCompletionErrorCode {
TokenLimitReachedError = 'tokenLimitReachedError',
ToolNotFoundError = 'toolNotFoundError',
ToolValidationError = 'toolValidationError',
}

/**
* Error thrown if the completion call fails because of a token limit
* error, e.g. when the context window is higher than the limit
*/
export type ChatCompletionTokenLimitReachedError = InferenceTaskError<
ChatCompletionErrorCode.TokenLimitReachedError,
{
Expand All @@ -23,13 +29,24 @@ export type ChatCompletionTokenLimitReachedError = InferenceTaskError<
}
>;

/**
* Error thrown if the LLM called a tool that was not provided
* in the list of available tools.
*/
export type ChatCompletionToolNotFoundError = InferenceTaskError<
ChatCompletionErrorCode.ToolNotFoundError,
{
/** The name of the tool that got called */
name: string;
}
>;

/**
* Error thrown when the LLM called a tool with parameters that
* don't match the tool's schema.
*
* The level of details on the error vary depending on the underlying LLM.
*/
export type ChatCompletionToolValidationError = InferenceTaskError<
ChatCompletionErrorCode.ToolValidationError,
{
Expand All @@ -40,49 +57,19 @@ export type ChatCompletionToolValidationError = InferenceTaskError<
}
>;

export function createTokenLimitReachedError(
tokenLimit?: number,
tokenCount?: number
): ChatCompletionTokenLimitReachedError {
return new InferenceTaskError(
ChatCompletionErrorCode.TokenLimitReachedError,
i18n.translate('xpack.inference.chatCompletionError.tokenLimitReachedError', {
defaultMessage: `Token limit reached. Token limit is {tokenLimit}, but the current conversation has {tokenCount} tokens.`,
values: { tokenLimit, tokenCount },
}),
{ tokenLimit, tokenCount }
);
}

export function createToolNotFoundError(name: string): ChatCompletionToolNotFoundError {
return new InferenceTaskError(
ChatCompletionErrorCode.ToolNotFoundError,
`Tool ${name} called but was not available`,
{
name,
}
);
}

export function createToolValidationError(
message: string,
meta: {
name?: string;
arguments?: string;
errorsText?: string;
toolCalls?: UnvalidatedToolCall[];
}
): ChatCompletionToolValidationError {
return new InferenceTaskError(ChatCompletionErrorCode.ToolValidationError, message, meta);
}

/**
* Check if an error is a {@link ChatCompletionToolValidationError}
*/
export function isToolValidationError(error?: Error): error is ChatCompletionToolValidationError {
return (
error instanceof InferenceTaskError &&
error.code === ChatCompletionErrorCode.ToolValidationError
);
}

/**
* Check if an error is a {@link ChatCompletionTokenLimitReachedError}
*/
export function isTokenLimitReachedError(
error: Error
): error is ChatCompletionTokenLimitReachedError {
Expand All @@ -92,6 +79,9 @@ export function isTokenLimitReachedError(
);
}

/**
* Check if an error is a {@link ChatCompletionToolNotFoundError}
*/
export function isToolNotFoundError(error: Error): error is ChatCompletionToolNotFoundError {
return (
error instanceof InferenceTaskError && error.code === ChatCompletionErrorCode.ToolNotFoundError
Expand Down
Loading

0 comments on commit 3af9c8e

Please sign in to comment.