Skip to content

Commit

Permalink
[Obs AI Assistant] Add API test to the alerts function to make sure t…
Browse files Browse the repository at this point in the history
…hat the query is correct (#203458)
  • Loading branch information
viduni94 committed Dec 11, 2024
1 parent 71b81e4 commit f9c5e6d
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 51 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -137,57 +137,6 @@ export default function ApiTest({ getService }: FtrProviderContext) {
]);
});

it.skip('returns a useful error if the request fails', async () => {
const interceptor = proxy.intercept('conversation', () => true);

const passThrough = new PassThrough();

supertest
.post(CHAT_API_URL)
.set('kbn-xsrf', 'foo')
.send({
name: 'my_api_call',
messages,
connectorId,
functions: [],
scopes: ['all'],
})
.expect(200)
.pipe(passThrough);

let data: string = '';

passThrough.on('data', (chunk) => {
data += chunk.toString('utf-8');
});

const simulator = await interceptor.waitForIntercept();

await simulator.status(400);

await simulator.rawWrite(
JSON.stringify({
error: {
code: 'context_length_exceeded',
message:
"This model's maximum context length is 8192 tokens. However, your messages resulted in 11036 tokens. Please reduce the length of the messages.",
param: 'messages',
type: 'invalid_request_error',
},
})
);

await simulator.rawEnd();

await new Promise<void>((resolve) => passThrough.on('end', () => resolve()));

const response = JSON.parse(data.trim());

expect(response.error.message).to.be(
`Token limit reached. Token limit is 8192, but the current conversation has 11036 tokens.`
);
});

describe('security roles and access privileges', () => {
it('should deny access for users without the ai_assistant privilege', async () => {
try {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { MessageRole, MessageAddEvent } from '@kbn/observability-ai-assistant-plugin/common';
import expect from '@kbn/expect';
import { LlmProxy, createLlmProxy } from '../../../common/create_llm_proxy';
import { FtrProviderContext } from '../../../common/ftr_provider_context';
import { getMessageAddedEvents, invokeChatCompleteWithFunctionRequest } from './helpers';
import {
createProxyActionConnector,
deleteActionConnector,
} from '../../../common/action_connectors';

export default function ApiTest({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const log = getService('log');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient');

describe('when calling the alerts function', () => {
let proxy: LlmProxy;
let connectorId: string;
let alertsEvents: MessageAddEvent[];

const start = 'now-100h';
const end = 'now';

before(async () => {
proxy = await createLlmProxy(log);
connectorId = await createProxyActionConnector({ supertest, log, port: proxy.getPort() });

void proxy
.intercept('conversation', () => true, 'Hello from LLM Proxy')
.completeAfterIntercept();

const alertsResponseBody = await invokeChatCompleteWithFunctionRequest({
connectorId,
observabilityAIAssistantAPIClient,
functionCall: {
name: 'alerts',
trigger: MessageRole.Assistant,
arguments: JSON.stringify({ start, end }),
},
});

await proxy.waitForAllInterceptorsSettled();

alertsEvents = getMessageAddedEvents(alertsResponseBody);
});

after(async () => {
proxy.close();
await deleteActionConnector({ supertest, connectorId, log });
});

// This test ensures that invoking the alerts function does not result in an error.
it('should execute the function without any errors', async () => {
const alertsFunctionResponse = alertsEvents[0];
expect(alertsFunctionResponse.message.message.name).to.be('alerts');

const parsedAlertsResponse = JSON.parse(alertsFunctionResponse.message.message.content!);

expect(parsedAlertsResponse).not.to.have.property('error');
expect(parsedAlertsResponse).to.have.property('total');
expect(parsedAlertsResponse).to.have.property('alerts');
expect(parsedAlertsResponse.alerts).to.be.an('array');
expect(parsedAlertsResponse.total).to.be(0);
expect(parsedAlertsResponse.alerts.length).to.be(0);
});
});
}

0 comments on commit f9c5e6d

Please sign in to comment.