diff --git a/.backportrc.json b/.backportrc.json index 87bc3a1be583..8f458343c51a 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -25,6 +25,7 @@ ], "targetPRLabels": ["backport"], "branchLabelMapping": { + "^v8.0.0$": "master", "^v7.9.0$": "7.x", "^v(\\d+).(\\d+).\\d+$": "$1.$2" } diff --git a/docs/development/core/public/kibana-plugin-core-public.app.exactroute.md b/docs/development/core/public/kibana-plugin-core-public.app.exactroute.md new file mode 100644 index 000000000000..d1e0be17a92b --- /dev/null +++ b/docs/development/core/public/kibana-plugin-core-public.app.exactroute.md @@ -0,0 +1,30 @@ + + +[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [App](./kibana-plugin-core-public.app.md) > [exactRoute](./kibana-plugin-core-public.app.exactroute.md) + +## App.exactRoute property + +If set to true, the application's route will only be checked against an exact match. Defaults to `false`. + +Signature: + +```typescript +exactRoute?: boolean; +``` + +## Example + + +```ts +core.application.register({ + id: 'my_app', + title: 'My App' + exactRoute: true, + mount: () => { ... }, +}) + +// '[basePath]/app/my_app' will be matched +// '[basePath]/app/my_app/some/path' will not be matched + +``` + diff --git a/docs/development/core/public/kibana-plugin-core-public.app.md b/docs/development/core/public/kibana-plugin-core-public.app.md index 90737d241f54..8dd60972549f 100644 --- a/docs/development/core/public/kibana-plugin-core-public.app.md +++ b/docs/development/core/public/kibana-plugin-core-public.app.md @@ -18,5 +18,6 @@ export interface App extends AppBase | --- | --- | --- | | [appRoute](./kibana-plugin-core-public.app.approute.md) | string | Override the application's routing path from /app/${id}. Must be unique across registered applications. Should not include the base path from HTTP. | | [chromeless](./kibana-plugin-core-public.app.chromeless.md) | boolean | Hide the UI chrome when the application is mounted. Defaults to false. Takes precedence over chrome service visibility settings. | +| [exactRoute](./kibana-plugin-core-public.app.exactroute.md) | boolean | If set to true, the application's route will only be checked against an exact match. Defaults to false. | | [mount](./kibana-plugin-core-public.app.mount.md) | AppMount<HistoryLocationState> | AppMountDeprecated<HistoryLocationState> | A mount function called when the user navigates to this app's route. May have signature of [AppMount](./kibana-plugin-core-public.appmount.md) or [AppMountDeprecated](./kibana-plugin-core-public.appmountdeprecated.md). | diff --git a/docs/images/data-viz-homepage.jpg b/docs/images/data-viz-homepage.jpg new file mode 100644 index 000000000000..f7a952b65d41 Binary files /dev/null and b/docs/images/data-viz-homepage.jpg differ diff --git a/docs/setup/connect-to-elasticsearch.asciidoc b/docs/setup/connect-to-elasticsearch.asciidoc index 8c04167de123..0575b8532508 100644 --- a/docs/setup/connect-to-elasticsearch.asciidoc +++ b/docs/setup/connect-to-elasticsearch.asciidoc @@ -18,10 +18,16 @@ to see all that you can do in {kib}. [[upload-data-kibana]] === Upload a CSV, JSON, or log file -To visualize data in a CSV, JSON, or log file, you can -upload it using the File Data Visualizer. On the home page, -click *Import a CSV, NDSON, or log file*, and then drag your file into the -File Data Visualizer. +experimental[] + +To visualize data in a CSV, JSON, or log file, you can upload it using the File +Data Visualizer. On the home page, click *Import a CSV, NDSON, or log file*, and +then drag your file into the File Data Visualizer. Alternatively, you can open +it by navigating to the Machine Learning app page from the sidebar menu and +selecting the Data Visualizer from the top navigation bar on the opening page. + +[role="screenshot"] +image::images/data-viz-homepage.jpg[File Data Visualizer on the home page] You can upload a file up to 100 MB. This value is configurable up to 1 GB in <>. diff --git a/package.json b/package.json index 3eaa1fb05e90..10eaef8ed5dc 100644 --- a/package.json +++ b/package.json @@ -406,7 +406,7 @@ "babel-eslint": "^10.0.3", "babel-jest": "^25.5.1", "babel-plugin-istanbul": "^6.0.0", - "backport": "5.4.1", + "backport": "5.4.6", "chai": "3.5.0", "chance": "1.0.18", "cheerio": "0.22.0", diff --git a/src/core/public/application/application_service.tsx b/src/core/public/application/application_service.tsx index 95361d8287c7..d7f15decb255 100644 --- a/src/core/public/application/application_service.tsx +++ b/src/core/public/application/application_service.tsx @@ -201,6 +201,7 @@ export class ApplicationService { this.mounters.set(app.id, { appRoute: app.appRoute!, appBasePath: basePath.prepend(app.appRoute!), + exactRoute: app.exactRoute ?? false, mount: wrapMount(plugin, app), unmountBeforeMounting: false, legacy: false, @@ -236,6 +237,7 @@ export class ApplicationService { this.mounters.set(app.id, { appRoute, appBasePath, + exactRoute: false, mount, unmountBeforeMounting: true, legacy: true, diff --git a/src/core/public/application/integration_tests/router.test.tsx b/src/core/public/application/integration_tests/router.test.tsx index 2827b93f6d17..f992e121437a 100644 --- a/src/core/public/application/integration_tests/router.test.tsx +++ b/src/core/public/application/integration_tests/router.test.tsx @@ -30,7 +30,6 @@ import { ScopedHistory } from '../scoped_history'; describe('AppRouter', () => { let mounters: MockedMounterMap; let globalHistory: History; - let appStatuses$: BehaviorSubject>; let update: ReturnType; let scopedAppHistory: History; @@ -53,6 +52,17 @@ describe('AppRouter', () => { ); }; + const createMountersRenderer = () => + createRenderer( + + ); + beforeEach(() => { mounters = new Map([ createAppMounter({ appId: 'app1', html: 'App 1' }), @@ -90,16 +100,7 @@ describe('AppRouter', () => { }), ] as Array>); globalHistory = createMemoryHistory(); - appStatuses$ = mountersToAppStatus$(); - update = createRenderer( - - ); + update = createMountersRenderer(); }); it('calls mount handler and returned unmount function when navigating between apps', async () => { @@ -220,15 +221,7 @@ describe('AppRouter', () => { }) ); globalHistory = createMemoryHistory(); - update = createRenderer( - - ); + update = createMountersRenderer(); await navigate('/fake-login'); @@ -252,15 +245,7 @@ describe('AppRouter', () => { }) ); globalHistory = createMemoryHistory(); - update = createRenderer( - - ); + update = createMountersRenderer(); await navigate('/spaces/fake-login'); @@ -268,6 +253,53 @@ describe('AppRouter', () => { expect(mounters.get('login')!.mounter.mount).not.toHaveBeenCalled(); }); + it('should mount an exact route app only when the path is an exact match', async () => { + mounters.set( + ...createAppMounter({ + appId: 'exactApp', + html: '
exact app
', + exactRoute: true, + appRoute: '/app/exact-app', + }) + ); + + globalHistory = createMemoryHistory(); + update = createMountersRenderer(); + + await navigate('/app/exact-app/some-path'); + + expect(mounters.get('exactApp')!.mounter.mount).not.toHaveBeenCalled(); + + await navigate('/app/exact-app'); + + expect(mounters.get('exactApp')!.mounter.mount).toHaveBeenCalledTimes(1); + }); + + it('should mount an an app with a route nested in an exact route app', async () => { + mounters.set( + ...createAppMounter({ + appId: 'exactApp', + html: '
exact app
', + exactRoute: true, + appRoute: '/app/exact-app', + }) + ); + mounters.set( + ...createAppMounter({ + appId: 'nestedApp', + html: '
nested app
', + appRoute: '/app/exact-app/another-app', + }) + ); + globalHistory = createMemoryHistory(); + update = createMountersRenderer(); + + await navigate('/app/exact-app/another-app'); + + expect(mounters.get('exactApp')!.mounter.mount).not.toHaveBeenCalled(); + expect(mounters.get('nestedApp')!.mounter.mount).toHaveBeenCalledTimes(1); + }); + it('should not remount when changing pages within app', async () => { const { mounter, unmount } = mounters.get('app1')!; await navigate('/app/app1/page1'); @@ -304,15 +336,7 @@ describe('AppRouter', () => { it('should not remount when when changing pages within app using hash history', async () => { globalHistory = createHashHistory(); - update = createRenderer( - - ); + update = createMountersRenderer(); const { mounter, unmount } = mounters.get('app1')!; await navigate('/app/app1/page1'); diff --git a/src/core/public/application/integration_tests/utils.tsx b/src/core/public/application/integration_tests/utils.tsx index 8590fb3c820e..80a7fc2c2cad 100644 --- a/src/core/public/application/integration_tests/utils.tsx +++ b/src/core/public/application/integration_tests/utils.tsx @@ -47,11 +47,13 @@ export const createAppMounter = ({ appId, html = `
App ${appId}
`, appRoute = `/app/${appId}`, + exactRoute = false, extraMountHook, }: { appId: string; html?: string; appRoute?: string; + exactRoute?: boolean; extraMountHook?: (params: AppMountParameters) => void; }): MockedMounterTuple => { const unmount = jest.fn(); @@ -62,6 +64,7 @@ export const createAppMounter = ({ appRoute, appBasePath: appRoute, legacy: false, + exactRoute, mount: jest.fn(async (params: AppMountParameters) => { const { appBasePath: basename, element } = params; Object.assign(element, { @@ -90,6 +93,7 @@ export const createLegacyAppMounter = ( appBasePath: `/app/${appId.split(':')[0]}`, unmountBeforeMounting: true, legacy: true, + exactRoute: false, mount: legacyMount, }, unmount: jest.fn(), diff --git a/src/core/public/application/types.ts b/src/core/public/application/types.ts index 44b095bd9e6d..6926b6acf241 100644 --- a/src/core/public/application/types.ts +++ b/src/core/public/application/types.ts @@ -234,6 +234,24 @@ export interface App extends AppBase { * base path from HTTP. */ appRoute?: string; + + /** + * If set to true, the application's route will only be checked against an exact match. Defaults to `false`. + * + * @example + * ```ts + * core.application.register({ + * id: 'my_app', + * title: 'My App' + * exactRoute: true, + * mount: () => { ... }, + * }) + * + * // '[basePath]/app/my_app' will be matched + * // '[basePath]/app/my_app/some/path' will not be matched + * ``` + */ + exactRoute?: boolean; } /** @public */ @@ -569,6 +587,7 @@ export type Mounter = SelectivePartial< appBasePath: string; mount: T extends LegacyApp ? LegacyAppMounter : AppMounter; legacy: boolean; + exactRoute: boolean; unmountBeforeMounting: T extends LegacyApp ? true : boolean; }, T extends LegacyApp ? never : 'unmountBeforeMounting' diff --git a/src/core/public/application/ui/app_container.test.tsx b/src/core/public/application/ui/app_container.test.tsx index 229354a01410..a94313dd53ab 100644 --- a/src/core/public/application/ui/app_container.test.tsx +++ b/src/core/public/application/ui/app_container.test.tsx @@ -55,6 +55,7 @@ describe('AppContainer', () => { appRoute: '/some-route', unmountBeforeMounting: false, legacy: false, + exactRoute: false, mount: async ({ element }: AppMountParameters) => { await promise; const container = document.createElement('div'); @@ -143,6 +144,7 @@ describe('AppContainer', () => { appRoute: '/some-route', unmountBeforeMounting: false, legacy: false, + exactRoute: false, mount: async ({ element }: AppMountParameters) => { await waitPromise; throw new Error(`Mounting failed!`); diff --git a/src/core/public/application/ui/app_router.tsx b/src/core/public/application/ui/app_router.tsx index 5d02f96134b2..f2d2d1e6587a 100644 --- a/src/core/public/application/ui/app_router.tsx +++ b/src/core/public/application/ui/app_router.tsx @@ -63,6 +63,7 @@ export const AppRouter: FunctionComponent = ({ ( { setHelpSupportUrl: jest.fn(), getIsNavDrawerLocked$: jest.fn(), getNavType$: jest.fn(), + getCustomNavLink$: jest.fn(), + setCustomNavLink: jest.fn(), }; startContract.navLinks.getAll.mockReturnValue([]); startContract.getBrand$.mockReturnValue(new BehaviorSubject({} as ChromeBrand)); @@ -81,6 +83,7 @@ const createStartContractMock = () => { startContract.getApplicationClasses$.mockReturnValue(new BehaviorSubject(['class-name'])); startContract.getBadge$.mockReturnValue(new BehaviorSubject({} as ChromeBadge)); startContract.getBreadcrumbs$.mockReturnValue(new BehaviorSubject([{} as ChromeBreadcrumb])); + startContract.getCustomNavLink$.mockReturnValue(new BehaviorSubject(undefined)); startContract.getHelpExtension$.mockReturnValue(new BehaviorSubject(undefined)); startContract.getIsNavDrawerLocked$.mockReturnValue(new BehaviorSubject(false)); startContract.getNavType$.mockReturnValue(new BehaviorSubject('modern' as NavType)); diff --git a/src/core/public/chrome/chrome_service.test.ts b/src/core/public/chrome/chrome_service.test.ts index e39733cc10de..8dc81dceaccd 100644 --- a/src/core/public/chrome/chrome_service.test.ts +++ b/src/core/public/chrome/chrome_service.test.ts @@ -363,6 +363,27 @@ describe('start', () => { }); }); + describe('custom nav link', () => { + it('updates/emits the current custom nav link', async () => { + const { chrome, service } = await start(); + const promise = chrome.getCustomNavLink$().pipe(toArray()).toPromise(); + + chrome.setCustomNavLink({ title: 'Manage cloud deployment' }); + chrome.setCustomNavLink(undefined); + service.stop(); + + await expect(promise).resolves.toMatchInlineSnapshot(` + Array [ + undefined, + Object { + "title": "Manage cloud deployment", + }, + undefined, + ] + `); + }); + }); + describe('help extension', () => { it('updates/emits the current help extension', async () => { const { chrome, service } = await start(); diff --git a/src/core/public/chrome/chrome_service.tsx b/src/core/public/chrome/chrome_service.tsx index 67cd43f0647e..0fe3c1f083cf 100644 --- a/src/core/public/chrome/chrome_service.tsx +++ b/src/core/public/chrome/chrome_service.tsx @@ -34,7 +34,7 @@ import { IUiSettingsClient } from '../ui_settings'; import { KIBANA_ASK_ELASTIC_LINK } from './constants'; import { ChromeDocTitle, DocTitleService } from './doc_title'; import { ChromeNavControls, NavControlsService } from './nav_controls'; -import { ChromeNavLinks, NavLinksService } from './nav_links'; +import { ChromeNavLinks, NavLinksService, ChromeNavLink } from './nav_links'; import { ChromeRecentlyAccessed, RecentlyAccessedService } from './recently_accessed'; import { Header } from './ui'; import { NavType } from './ui/header'; @@ -148,6 +148,7 @@ export class ChromeService { const helpExtension$ = new BehaviorSubject(undefined); const breadcrumbs$ = new BehaviorSubject([]); const badge$ = new BehaviorSubject(undefined); + const customNavLink$ = new BehaviorSubject(undefined); const helpSupportUrl$ = new BehaviorSubject(KIBANA_ASK_ELASTIC_LINK); const isNavDrawerLocked$ = new BehaviorSubject(localStorage.getItem(IS_LOCKED_KEY) === 'true'); @@ -221,6 +222,7 @@ export class ChromeService { badge$={badge$.pipe(takeUntil(this.stop$))} basePath={http.basePath} breadcrumbs$={breadcrumbs$.pipe(takeUntil(this.stop$))} + customNavLink$={customNavLink$.pipe(takeUntil(this.stop$))} kibanaDocLink={docLinks.links.kibana} forceAppSwitcherNavigation$={navLinks.getForceAppSwitcherNavigation$()} helpExtension$={helpExtension$.pipe(takeUntil(this.stop$))} @@ -297,6 +299,12 @@ export class ChromeService { getIsNavDrawerLocked$: () => getIsNavDrawerLocked$, getNavType$: () => getNavType$, + + getCustomNavLink$: () => customNavLink$.pipe(takeUntil(this.stop$)), + + setCustomNavLink: (customNavLink?: ChromeNavLink) => { + customNavLink$.next(customNavLink); + }, }; } @@ -423,6 +431,16 @@ export interface ChromeStart { */ setBreadcrumbs(newBreadcrumbs: ChromeBreadcrumb[]): void; + /** + * Get an observable of the current custom nav link + */ + getCustomNavLink$(): Observable | undefined>; + + /** + * Override the current set of custom nav link + */ + setCustomNavLink(newCustomNavLink?: Partial): void; + /** * Get an observable of the current custom help conttent */ diff --git a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap index 9239811df206..9fee7b50f371 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap @@ -61,6 +61,64 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` } } closeNav={[Function]} + customNavLink$={ + BehaviorSubject { + "_isScalar": false, + "_value": Object { + "baseUrl": "/", + "category": undefined, + "data-test-subj": "Custom link", + "href": "Custom link", + "id": "Custom link", + "isActive": true, + "legacy": false, + "title": "Custom link", + }, + "closed": false, + "hasError": false, + "isStopped": false, + "observers": Array [ + Subscriber { + "_parentOrParents": null, + "_subscriptions": Array [ + SubjectSubscription { + "_parentOrParents": [Circular], + "_subscriptions": null, + "closed": false, + "subject": [Circular], + "subscriber": [Circular], + }, + ], + "closed": false, + "destination": SafeSubscriber { + "_complete": undefined, + "_context": [Circular], + "_error": undefined, + "_next": [Function], + "_parentOrParents": null, + "_parentSubscriber": [Circular], + "_subscriptions": null, + "closed": false, + "destination": Object { + "closed": true, + "complete": [Function], + "error": [Function], + "next": [Function], + }, + "isStopped": false, + "syncErrorThrowable": false, + "syncErrorThrown": false, + "syncErrorValue": null, + }, + "isStopped": false, + "syncErrorThrowable": true, + "syncErrorThrown": false, + "syncErrorValue": null, + }, + ], + "thrownError": null, + } + } homeHref="/" id="collapsibe-nav" isLocked={false} @@ -408,6 +466,46 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` data-test-subj="collapsibleNav" id="collapsibe-nav" > +
+
+ +
+
+
+
+
+ +
+
+
+ +
+ +
+
+ + + +
+
+
+
+
+ +
+
+
+
+
+
    +
  • + +
  • +
+
+
+
+
+
+
+
+
    +
  • + +
  • +
+
+
+
+
+ +
+ +
+
+ +
    + +
  • + +
  • +
    +
+
+
+
+
+
+
+ +
+
{}, closeNav: () => {}, navigateToApp: () => Promise.resolve(), + customNavLink$: new BehaviorSubject(undefined), }; } @@ -120,12 +121,14 @@ describe('CollapsibleNav', () => { mockRecentNavLink({ label: 'recent 1' }), mockRecentNavLink({ label: 'recent 2' }), ]; + const customNavLink = mockLink({ title: 'Custom link' }); const component = mount( ); expect(component).toMatchSnapshot(); diff --git a/src/core/public/chrome/ui/header/collapsible_nav.tsx b/src/core/public/chrome/ui/header/collapsible_nav.tsx index 9494e22920de..07541b1adff1 100644 --- a/src/core/public/chrome/ui/header/collapsible_nav.tsx +++ b/src/core/public/chrome/ui/header/collapsible_nav.tsx @@ -30,7 +30,7 @@ import { } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import { groupBy, sortBy } from 'lodash'; -import React, { useRef } from 'react'; +import React, { Fragment, useRef } from 'react'; import { useObservable } from 'react-use'; import * as Rx from 'rxjs'; import { ChromeNavLink, ChromeRecentlyAccessedHistoryItem } from '../..'; @@ -88,6 +88,7 @@ interface Props { onIsLockedUpdate: OnIsLockedUpdate; closeNav: () => void; navigateToApp: InternalApplicationStart['navigateToApp']; + customNavLink$: Rx.Observable; } export function CollapsibleNav({ @@ -105,6 +106,7 @@ export function CollapsibleNav({ }: Props) { const navLinks = useObservable(observables.navLinks$, []).filter((link) => !link.hidden); const recentlyAccessed = useObservable(observables.recentlyAccessed$, []); + const customNavLink = useObservable(observables.customNavLink$, undefined); const appId = useObservable(observables.appId$, ''); const lockRef = useRef(null); const groupedNavLinks = groupBy(navLinks, (link) => link?.category?.id); @@ -134,6 +136,38 @@ export function CollapsibleNav({ isDocked={isLocked} onClose={closeNav} > + {customNavLink && ( + + + + + + + + + + )} + {/* Pinned items */} { const navLinks$ = new BehaviorSubject([ { id: 'kibana', title: 'kibana', baseUrl: '', legacy: false }, ]); + const customNavLink$ = new BehaviorSubject({ + id: 'cloud-deployment-link', + title: 'Manage cloud deployment', + baseUrl: '', + legacy: false, + }); const recentlyAccessed$ = new BehaviorSubject([ { link: '', label: 'dashboard', id: 'dashboard' }, ]); @@ -87,6 +94,7 @@ describe('Header', () => { recentlyAccessed$={recentlyAccessed$} isLocked$={isLocked$} navType$={navType$} + customNavLink$={customNavLink$} /> ); expect(component).toMatchSnapshot(); diff --git a/src/core/public/chrome/ui/header/header.tsx b/src/core/public/chrome/ui/header/header.tsx index d24b342e0386..3da3caaaa4a4 100644 --- a/src/core/public/chrome/ui/header/header.tsx +++ b/src/core/public/chrome/ui/header/header.tsx @@ -58,6 +58,7 @@ export interface HeaderProps { appTitle$: Observable; badge$: Observable; breadcrumbs$: Observable; + customNavLink$: Observable; homeHref: string; isVisible$: Observable; kibanaDocLink: string; @@ -203,6 +204,7 @@ export function Header({ toggleCollapsibleNavRef.current.focus(); } }} + customNavLink$={observables.customNavLink$} /> ) : ( // TODO #64541 diff --git a/src/core/public/chrome/ui/header/nav_link.tsx b/src/core/public/chrome/ui/header/nav_link.tsx index 969b6728e026..6b5cecd13837 100644 --- a/src/core/public/chrome/ui/header/nav_link.tsx +++ b/src/core/public/chrome/ui/header/nav_link.tsx @@ -35,11 +35,12 @@ function LinkIcon({ url }: { url: string }) { interface Props { link: ChromeNavLink; legacyMode: boolean; - appId: string | undefined; + appId?: string; basePath?: HttpStart['basePath']; dataTestSubj: string; onClick?: Function; navigateToApp: CoreStart['application']['navigateToApp']; + externalLink?: boolean; } // TODO #64541 @@ -54,6 +55,7 @@ export function createEuiListItem({ onClick = () => {}, navigateToApp, dataTestSubj, + externalLink = false, }: Props) { const { legacy, active, id, title, disabled, euiIconType, icon, tooltip } = link; let { href } = link; @@ -69,6 +71,7 @@ export function createEuiListItem({ onClick(event: React.MouseEvent) { onClick(); if ( + !externalLink && // ignore external links !legacyMode && // ignore when in legacy mode !legacy && // ignore links to legacy apps !event.defaultPrevented && // onClick prevented default diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 9a79576b14d1..d10e351f4d13 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -154,6 +154,7 @@ export function __kbnBootstrap__(): void; export interface App extends AppBase { appRoute?: string; chromeless?: boolean; + exactRoute?: boolean; mount: AppMount | AppMountDeprecated; } @@ -466,6 +467,7 @@ export interface ChromeStart { getBadge$(): Observable; getBrand$(): Observable; getBreadcrumbs$(): Observable; + getCustomNavLink$(): Observable | undefined>; getHelpExtension$(): Observable; getIsNavDrawerLocked$(): Observable; getIsVisible$(): Observable; @@ -478,6 +480,7 @@ export interface ChromeStart { setBadge(badge?: ChromeBadge): void; setBrand(brand: ChromeBrand): void; setBreadcrumbs(newBreadcrumbs: ChromeBreadcrumb[]): void; + setCustomNavLink(newCustomNavLink?: Partial): void; setHelpExtension(helpExtension?: ChromeHelpExtension): void; setHelpSupportUrl(url: string): void; setIsVisible(isVisible: boolean): void; diff --git a/src/dev/typescript/projects.ts b/src/dev/typescript/projects.ts index 1e0b631308d9..065321e35525 100644 --- a/src/dev/typescript/projects.ts +++ b/src/dev/typescript/projects.ts @@ -34,6 +34,10 @@ export const PROJECTS = [ name: 'apm/cypress', disableTypeCheck: true, }), + new Project(resolve(REPO_ROOT, 'x-pack/plugins/apm/scripts/tsconfig.json'), { + name: 'apm/scripts', + disableTypeCheck: true, + }), // NOTE: using glob.sync rather than glob-all or globby // because it takes less than 10 ms, while the other modules diff --git a/src/plugins/share/server/saved_objects/url.ts b/src/plugins/share/server/saved_objects/url.ts index c76c21993a13..3ea64ad4719f 100644 --- a/src/plugins/share/server/saved_objects/url.ts +++ b/src/plugins/share/server/saved_objects/url.ts @@ -46,6 +46,7 @@ export const url: SavedObjectsType = { fields: { keyword: { type: 'keyword', + ignore_above: 2048, }, }, }, diff --git a/src/plugins/usage_collection/public/plugin.ts b/src/plugins/usage_collection/public/plugin.ts index cf2f6af1507c..40f27f826992 100644 --- a/src/plugins/usage_collection/public/plugin.ts +++ b/src/plugins/usage_collection/public/plugin.ts @@ -52,12 +52,17 @@ export interface UsageCollectionSetup { }; } +export interface UsageCollectionStart { + reportUiStats: Reporter['reportUiStats']; + METRIC_TYPE: typeof METRIC_TYPE; +} + export function isUnauthenticated(http: HttpSetup) { const { anonymousPaths } = http; return anonymousPaths.isAnonymous(window.location.pathname); } -export class UsageCollectionPlugin implements Plugin { +export class UsageCollectionPlugin implements Plugin { private readonly legacyAppId$ = new Subject(); private trackUserAgent: boolean = true; private reporter?: Reporter; @@ -90,7 +95,7 @@ export class UsageCollectionPlugin implements Plugin { public start({ http, application }: CoreStart) { if (!this.reporter) { - return; + throw new Error('Usage collection reporter not set up correctly'); } if (this.config.uiMetric.enabled && !isUnauthenticated(http)) { @@ -100,7 +105,13 @@ export class UsageCollectionPlugin implements Plugin { if (this.trackUserAgent) { this.reporter.reportUserAgent('kibana'); } + reportApplicationUsage(merge(application.currentAppId$, this.legacyAppId$), this.reporter); + + return { + reportUiStats: this.reporter.reportUiStats, + METRIC_TYPE, + }; } public stop() {} diff --git a/src/plugins/vis_type_timeseries/common/types.ts b/src/plugins/vis_type_timeseries/common/types.ts new file mode 100644 index 000000000000..452006924452 --- /dev/null +++ b/src/plugins/vis_type_timeseries/common/types.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { TypeOf } from '@kbn/config-schema'; +import { metricsItems, panel, seriesItems } from './vis_schema'; + +export type SeriesItemsSchema = TypeOf; +export type MetricsItemsSchema = TypeOf; +export type PanelSchema = TypeOf; diff --git a/src/plugins/vis_type_timeseries/common/ui_restrictions.js b/src/plugins/vis_type_timeseries/common/ui_restrictions.ts similarity index 73% rename from src/plugins/vis_type_timeseries/common/ui_restrictions.js rename to src/plugins/vis_type_timeseries/common/ui_restrictions.ts index 96726d51e4a7..4508735f39ff 100644 --- a/src/plugins/vis_type_timeseries/common/ui_restrictions.js +++ b/src/plugins/vis_type_timeseries/common/ui_restrictions.ts @@ -22,21 +22,30 @@ * @constant * @public */ -export const RESTRICTIONS_KEYS = { +export enum RESTRICTIONS_KEYS { /** * Key for getting the white listed group by fields from the UIRestrictions object. */ - WHITE_LISTED_GROUP_BY_FIELDS: 'whiteListedGroupByFields', + WHITE_LISTED_GROUP_BY_FIELDS = 'whiteListedGroupByFields', /** * Key for getting the white listed metrics from the UIRestrictions object. */ - WHITE_LISTED_METRICS: 'whiteListedMetrics', + WHITE_LISTED_METRICS = 'whiteListedMetrics', /** * Key for getting the white listed Time Range modes from the UIRestrictions object. */ - WHITE_LISTED_TIMERANGE_MODES: 'whiteListedTimerangeModes', + WHITE_LISTED_TIMERANGE_MODES = 'whiteListedTimerangeModes', +} + +export interface UIRestrictions { + '*': boolean; + [restriction: string]: boolean; +} + +export type TimeseriesUIRestrictions = { + [key in RESTRICTIONS_KEYS]: Record; }; /** @@ -44,6 +53,6 @@ export const RESTRICTIONS_KEYS = { * @constant * @public */ -export const DEFAULT_UI_RESTRICTION = { +export const DEFAULT_UI_RESTRICTION: UIRestrictions = { '*': true, }; diff --git a/src/plugins/vis_type_timeseries/server/routes/post_vis_schema.ts b/src/plugins/vis_type_timeseries/common/vis_schema.ts similarity index 73% rename from src/plugins/vis_type_timeseries/server/routes/post_vis_schema.ts rename to src/plugins/vis_type_timeseries/common/vis_schema.ts index bf2ea8651c5a..7161c197b694 100644 --- a/src/plugins/vis_type_timeseries/server/routes/post_vis_schema.ts +++ b/src/plugins/vis_type_timeseries/common/vis_schema.ts @@ -76,7 +76,7 @@ const gaugeColorRulesItems = schema.object({ operator: stringOptionalNullable, value: schema.maybe(schema.nullable(schema.number())), }); -const metricsItems = schema.object({ +export const metricsItems = schema.object({ field: stringOptionalNullable, id: stringRequired, metric_agg: stringOptionalNullable, @@ -133,7 +133,7 @@ const splitFiltersItems = schema.object({ label: stringOptionalNullable, }); -const seriesItems = schema.object({ +export const seriesItems = schema.object({ aggregate_by: stringOptionalNullable, aggregate_function: stringOptionalNullable, axis_position: stringRequired, @@ -195,66 +195,66 @@ const seriesItems = schema.object({ var_name: stringOptionalNullable, }); +export const panel = schema.object({ + annotations: schema.maybe(schema.arrayOf(annotationsItems)), + axis_formatter: stringRequired, + axis_position: stringRequired, + axis_scale: stringRequired, + axis_min: stringOrNumberOptionalNullable, + axis_max: stringOrNumberOptionalNullable, + bar_color_rules: schema.maybe(arrayNullable), + background_color: stringOptionalNullable, + background_color_rules: schema.maybe(schema.arrayOf(backgroundColorRulesItems)), + default_index_pattern: stringOptionalNullable, + default_timefield: stringOptionalNullable, + drilldown_url: stringOptionalNullable, + drop_last_bucket: numberIntegerOptional, + filter: schema.nullable( + schema.oneOf([ + stringOptionalNullable, + schema.object({ + language: stringOptionalNullable, + query: stringOptionalNullable, + }), + ]) + ), + gauge_color_rules: schema.maybe(schema.arrayOf(gaugeColorRulesItems)), + gauge_width: schema.nullable(schema.oneOf([stringOptionalNullable, numberOptional])), + gauge_inner_color: stringOptionalNullable, + gauge_inner_width: stringOrNumberOptionalNullable, + gauge_style: stringOptionalNullable, + gauge_max: stringOrNumberOptionalNullable, + id: stringRequired, + ignore_global_filters: numberOptional, + ignore_global_filter: numberOptional, + index_pattern: stringRequired, + interval: stringRequired, + isModelInvalid: schema.maybe(schema.boolean()), + legend_position: stringOptionalNullable, + markdown: stringOptionalNullable, + markdown_scrollbars: numberIntegerOptional, + markdown_openLinksInNewTab: numberIntegerOptional, + markdown_vertical_align: stringOptionalNullable, + markdown_less: stringOptionalNullable, + markdown_css: stringOptionalNullable, + pivot_id: stringOptionalNullable, + pivot_label: stringOptionalNullable, + pivot_type: stringOptionalNullable, + pivot_rows: stringOptionalNullable, + series: schema.arrayOf(seriesItems), + show_grid: numberIntegerRequired, + show_legend: numberIntegerRequired, + tooltip_mode: schema.maybe( + schema.oneOf([schema.literal('show_all'), schema.literal('show_focused')]) + ), + time_field: stringOptionalNullable, + time_range_mode: stringOptionalNullable, + type: stringRequired, +}); + export const visPayloadSchema = schema.object({ filters: arrayNullable, - panels: schema.arrayOf( - schema.object({ - annotations: schema.maybe(schema.arrayOf(annotationsItems)), - axis_formatter: stringRequired, - axis_position: stringRequired, - axis_scale: stringRequired, - axis_min: stringOrNumberOptionalNullable, - axis_max: stringOrNumberOptionalNullable, - bar_color_rules: schema.maybe(arrayNullable), - background_color: stringOptionalNullable, - background_color_rules: schema.maybe(schema.arrayOf(backgroundColorRulesItems)), - default_index_pattern: stringOptionalNullable, - default_timefield: stringOptionalNullable, - drilldown_url: stringOptionalNullable, - drop_last_bucket: numberIntegerOptional, - filter: schema.nullable( - schema.oneOf([ - stringOptionalNullable, - schema.object({ - language: stringOptionalNullable, - query: stringOptionalNullable, - }), - ]) - ), - gauge_color_rules: schema.maybe(schema.arrayOf(gaugeColorRulesItems)), - gauge_width: schema.nullable(schema.oneOf([stringOptionalNullable, numberOptional])), - gauge_inner_color: stringOptionalNullable, - gauge_inner_width: stringOrNumberOptionalNullable, - gauge_style: stringOptionalNullable, - gauge_max: stringOrNumberOptionalNullable, - id: stringRequired, - ignore_global_filters: numberOptional, - ignore_global_filter: numberOptional, - index_pattern: stringRequired, - interval: stringRequired, - isModelInvalid: schema.maybe(schema.boolean()), - legend_position: stringOptionalNullable, - markdown: stringOptionalNullable, - markdown_scrollbars: numberIntegerOptional, - markdown_openLinksInNewTab: numberIntegerOptional, - markdown_vertical_align: stringOptionalNullable, - markdown_less: stringOptionalNullable, - markdown_css: stringOptionalNullable, - pivot_id: stringOptionalNullable, - pivot_label: stringOptionalNullable, - pivot_type: stringOptionalNullable, - pivot_rows: stringOptionalNullable, - series: schema.arrayOf(seriesItems), - show_grid: numberIntegerRequired, - show_legend: numberIntegerRequired, - tooltip_mode: schema.maybe( - schema.oneOf([schema.literal('show_all'), schema.literal('show_focused')]) - ), - time_field: stringOptionalNullable, - time_range_mode: stringOptionalNullable, - type: stringRequired, - }) - ), + panels: schema.arrayOf(panel), // general query: schema.nullable(schema.arrayOf(queryObject)), state: schema.object({ diff --git a/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.js b/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.tsx similarity index 77% rename from src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.js rename to src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.tsx index 7afa71d6ba38..0fb3e80344e2 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.js +++ b/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.test.tsx @@ -18,51 +18,49 @@ */ import React from 'react'; -import { expect } from 'chai'; import { shallowWithIntl } from 'test_utils/enzyme_helpers'; -import sinon from 'sinon'; import { AddDeleteButtons } from './add_delete_buttons'; describe('AddDeleteButtons', () => { it('calls onAdd={handleAdd}', () => { - const handleAdd = sinon.spy(); + const handleAdd = jest.fn(); const wrapper = shallowWithIntl(); wrapper.find('EuiButtonIcon').at(0).simulate('click'); - expect(handleAdd.calledOnce).to.equal(true); + expect(handleAdd).toHaveBeenCalled(); }); it('calls onDelete={handleDelete}', () => { - const handleDelete = sinon.spy(); + const handleDelete = jest.fn(); const wrapper = shallowWithIntl(); wrapper.find('EuiButtonIcon').at(1).simulate('click'); - expect(handleDelete.calledOnce).to.equal(true); + expect(handleDelete).toHaveBeenCalled(); }); it('calls onClone={handleClone}', () => { - const handleClone = sinon.spy(); + const handleClone = jest.fn(); const wrapper = shallowWithIntl(); wrapper.find('EuiButtonIcon').at(0).simulate('click'); - expect(handleClone.calledOnce).to.equal(true); + expect(handleClone).toHaveBeenCalled(); }); it('disableDelete={true}', () => { const wrapper = shallowWithIntl(); - expect(wrapper.find({ text: 'Delete' })).to.have.length(0); + expect(wrapper.find({ text: 'Delete' })).toHaveLength(0); }); it('disableAdd={true}', () => { const wrapper = shallowWithIntl(); - expect(wrapper.find({ text: 'Add' })).to.have.length(0); + expect(wrapper.find({ text: 'Add' })).toHaveLength(0); }); it('should not display clone by default', () => { const wrapper = shallowWithIntl(); - expect(wrapper.find({ text: 'Clone' })).to.have.length(0); + expect(wrapper.find({ text: 'Clone' })).toHaveLength(0); }); it('should not display clone when disableAdd={true}', () => { - const fn = sinon.spy(); + const fn = jest.fn(); const wrapper = shallowWithIntl(); - expect(wrapper.find({ text: 'Clone' })).to.have.length(0); + expect(wrapper.find({ text: 'Clone' })).toHaveLength(0); }); }); diff --git a/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.js b/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.tsx similarity index 87% rename from src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.js rename to src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.tsx index 798d16947c3d..7502de1cb1aa 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.js +++ b/src/plugins/vis_type_timeseries/public/application/components/add_delete_buttons.tsx @@ -17,13 +17,29 @@ * under the License. */ -import PropTypes from 'prop-types'; -import React from 'react'; -import { EuiToolTip, EuiButtonIcon, EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import React, { MouseEvent } from 'react'; +import { EuiButtonIcon, EuiFlexGroup, EuiFlexItem, EuiToolTip } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import { isBoolean } from 'lodash'; -export function AddDeleteButtons(props) { +interface AddDeleteButtonsProps { + addTooltip: string; + deleteTooltip: string; + cloneTooltip: string; + activatePanelTooltip: string; + deactivatePanelTooltip: string; + isPanelActive?: boolean; + disableAdd?: boolean; + disableDelete?: boolean; + responsive?: boolean; + testSubj: string; + togglePanelActivation?: () => void; + onClone?: () => void; + onAdd?: () => void; + onDelete?: (event: MouseEvent) => void; +} + +export function AddDeleteButtons(props: AddDeleteButtonsProps) { const { testSubj } = props; const createDelete = () => { if (props.disableDelete) { @@ -147,19 +163,3 @@ AddDeleteButtons.defaultProps = { } ), }; - -AddDeleteButtons.propTypes = { - addTooltip: PropTypes.string, - deleteTooltip: PropTypes.string, - cloneTooltip: PropTypes.string, - activatePanelTooltip: PropTypes.string, - deactivatePanelTooltip: PropTypes.string, - togglePanelActivation: PropTypes.func, - isPanelActive: PropTypes.bool, - disableAdd: PropTypes.bool, - disableDelete: PropTypes.bool, - onClone: PropTypes.func, - onAdd: PropTypes.func, - onDelete: PropTypes.func, - responsive: PropTypes.bool, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg.tsx similarity index 70% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/agg.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/agg.tsx index d547f64f13f6..e5236c3833b1 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg.tsx @@ -17,15 +17,33 @@ * under the License. */ -import PropTypes from 'prop-types'; -import React from 'react'; +import React, { HTMLAttributes } from 'react'; +// @ts-ignore import { aggToComponent } from '../lib/agg_to_component'; +// @ts-ignore +import { isMetricEnabled } from '../../lib/check_ui_restrictions'; import { UnsupportedAgg } from './unsupported_agg'; import { TemporaryUnsupportedAgg } from './temporary_unsupported_agg'; +import { MetricsItemsSchema, PanelSchema, SeriesItemsSchema } from '../../../../common/types'; +import { DragHandleProps } from '../../../types'; +import { TimeseriesUIRestrictions } from '../../../../common/ui_restrictions'; +import { IFieldType } from '../../../../../data/common/index_patterns/fields'; -import { isMetricEnabled } from '../../lib/check_ui_restrictions'; +interface AggProps extends HTMLAttributes { + disableDelete: boolean; + fields: IFieldType[]; + model: MetricsItemsSchema; + panel: PanelSchema; + series: SeriesItemsSchema; + siblings: MetricsItemsSchema[]; + uiRestrictions: TimeseriesUIRestrictions; + dragHandleProps: DragHandleProps; + onAdd: () => void; + onChange: () => void; + onDelete: () => void; +} -export function Agg(props) { +export function Agg(props: AggProps) { const { model, uiRestrictions } = props; let Component = aggToComponent[model.type]; @@ -59,17 +77,3 @@ export function Agg(props) {
); } - -Agg.propTypes = { - disableDelete: PropTypes.bool, - fields: PropTypes.object, - model: PropTypes.object, - onAdd: PropTypes.func, - onChange: PropTypes.func, - onDelete: PropTypes.func, - panel: PropTypes.object, - series: PropTypes.object, - siblings: PropTypes.array, - uiRestrictions: PropTypes.object, - dragHandleProps: PropTypes.object, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.tsx similarity index 86% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.tsx index a2f1640904dd..0363ba486a77 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_row.tsx @@ -17,15 +17,26 @@ * under the License. */ -import PropTypes from 'prop-types'; import React from 'react'; import { last } from 'lodash'; -import { AddDeleteButtons } from '../add_delete_buttons'; import { EuiIcon, EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; -import { SeriesDragHandler } from '../series_drag_handler'; import { i18n } from '@kbn/i18n'; +import { AddDeleteButtons } from '../add_delete_buttons'; +import { SeriesDragHandler } from '../series_drag_handler'; +import { MetricsItemsSchema } from '../../../../common/types'; +import { DragHandleProps } from '../../../types'; -export function AggRow(props) { +interface AggRowProps { + disableDelete: boolean; + model: MetricsItemsSchema; + siblings: MetricsItemsSchema[]; + dragHandleProps: DragHandleProps; + children: React.ReactNode; + onAdd: () => void; + onDelete: () => void; +} + +export function AggRow(props: AggRowProps) { let iconType = 'eyeClosed'; let iconColor = 'subdued'; const lastSibling = last(props.siblings); @@ -71,12 +82,3 @@ export function AggRow(props) {
); } - -AggRow.propTypes = { - disableDelete: PropTypes.bool, - model: PropTypes.object, - onAdd: PropTypes.func, - onDelete: PropTypes.func, - siblings: PropTypes.array, - dragHandleProps: PropTypes.object, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx similarity index 88% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx index 7ff6b6eb5669..6fa1a2adaa08 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx @@ -17,14 +17,17 @@ * under the License. */ -import PropTypes from 'prop-types'; import React from 'react'; -import { EuiComboBox } from '@elastic/eui'; +import { EuiComboBox, EuiComboBoxOptionOption } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { injectI18n } from '@kbn/i18n/react'; +// @ts-ignore import { isMetricEnabled } from '../../lib/check_ui_restrictions'; +import { MetricsItemsSchema } from '../../../../common/types'; +import { TimeseriesUIRestrictions } from '../../../../common/ui_restrictions'; -const metricAggs = [ +type AggSelectOption = EuiComboBoxOptionOption; + +const metricAggs: AggSelectOption[] = [ { label: i18n.translate('visTypeTimeseries.aggSelect.metricsAggs.averageLabel', { defaultMessage: 'Average', @@ -123,7 +126,7 @@ const metricAggs = [ }, ]; -const pipelineAggs = [ +const pipelineAggs: AggSelectOption[] = [ { label: i18n.translate('visTypeTimeseries.aggSelect.pipelineAggs.bucketScriptLabel', { defaultMessage: 'Bucket Script', @@ -162,7 +165,7 @@ const pipelineAggs = [ }, ]; -const siblingAggs = [ +const siblingAggs: AggSelectOption[] = [ { label: i18n.translate('visTypeTimeseries.aggSelect.siblingAggs.overallAverageLabel', { defaultMessage: 'Overall Average', @@ -207,7 +210,7 @@ const siblingAggs = [ }, ]; -const specialAggs = [ +const specialAggs: AggSelectOption[] = [ { label: i18n.translate('visTypeTimeseries.aggSelect.specialAggs.seriesAggLabel', { defaultMessage: 'Series Agg', @@ -224,14 +227,23 @@ const specialAggs = [ const allAggOptions = [...metricAggs, ...pipelineAggs, ...siblingAggs, ...specialAggs]; -function filterByPanelType(panelType) { - return (agg) => { +function filterByPanelType(panelType: string) { + return (agg: AggSelectOption) => { if (panelType === 'table') return agg.value !== 'series_agg'; return true; }; } -function AggSelectUi(props) { +interface AggSelectUiProps { + id: string; + panelType: string; + siblings: MetricsItemsSchema[]; + value: string; + uiRestrictions?: TimeseriesUIRestrictions; + onChange: (currentlySelectedOptions: AggSelectOption[]) => void; +} + +export function AggSelect(props: AggSelectUiProps) { const { siblings, panelType, value, onChange, uiRestrictions, ...rest } = props; const selectedOptions = allAggOptions.filter((option) => { @@ -242,11 +254,11 @@ function AggSelectUi(props) { if (siblings.length <= 1) enablePipelines = false; - let options; + let options: EuiComboBoxOptionOption[]; if (panelType === 'metrics') { options = metricAggs; } else { - const disableSiblingAggs = (agg) => ({ + const disableSiblingAggs = (agg: AggSelectOption) => ({ ...agg, disabled: !enablePipelines || !isMetricEnabled(agg.value, uiRestrictions), }); @@ -282,9 +294,9 @@ function AggSelectUi(props) { ]; } - const handleChange = (selectedOptions) => { - if (!selectedOptions || selectedOptions.length <= 0) return; - onChange(selectedOptions); + const handleChange = (currentlySelectedOptions: AggSelectOption[]) => { + if (!currentlySelectedOptions || currentlySelectedOptions.length <= 0) return; + onChange(currentlySelectedOptions); }; return ( @@ -303,13 +315,3 @@ function AggSelectUi(props) {
); } - -AggSelectUi.propTypes = { - onChange: PropTypes.func, - panelType: PropTypes.string, - siblings: PropTypes.array, - value: PropTypes.string, - uiRestrictions: PropTypes.object, -}; - -export const AggSelect = injectI18n(AggSelectUi); diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.tsx similarity index 83% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.tsx index 772b62b14f81..af3e42a59612 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/aggs.tsx @@ -18,18 +18,29 @@ */ import React, { PureComponent } from 'react'; -import PropTypes from 'prop-types'; import { EuiDraggable, EuiDroppable } from '@elastic/eui'; import { Agg } from './agg'; -import { newMetricAggFn } from '../lib/new_metric_agg_fn'; +// @ts-ignore import { seriesChangeHandler } from '../lib/series_change_handler'; +// @ts-ignore import { handleAdd, handleDelete } from '../lib/collection_actions'; +import { newMetricAggFn } from '../lib/new_metric_agg_fn'; +import { PanelSchema, SeriesItemsSchema } from '../../../../common/types'; +import { TimeseriesUIRestrictions } from '../../../../common/ui_restrictions'; +import { IFieldType } from '../../../../../data/common/index_patterns/fields'; const DROPPABLE_ID = 'aggs_dnd'; -export class Aggs extends PureComponent { +export interface AggsProps { + panel: PanelSchema; + model: SeriesItemsSchema; + fields: IFieldType[]; + uiRestrictions: TimeseriesUIRestrictions; +} + +export class Aggs extends PureComponent { render() { const { panel, model, fields, uiRestrictions } = this.props; const list = model.metrics; @@ -68,12 +79,3 @@ export class Aggs extends PureComponent { ); } } - -Aggs.propTypes = { - name: PropTypes.string.isRequired, - fields: PropTypes.object.isRequired, - model: PropTypes.object.isRequired, - onChange: PropTypes.func.isRequired, - panel: PropTypes.object.isRequired, - dragHandleProps: PropTypes.object, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.tsx similarity index 79% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.tsx index fd64559cc1ec..ef8876a19b1a 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/multi_value_row.tsx @@ -16,8 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import PropTypes from 'prop-types'; -import React from 'react'; +import React, { ChangeEvent } from 'react'; import { get } from 'lodash'; import { FormattedMessage } from '@kbn/i18n/react'; import { @@ -31,10 +30,29 @@ import { import { AddDeleteButtons } from '../../add_delete_buttons'; -export const MultiValueRow = ({ model, onChange, onDelete, onAdd, disableAdd, disableDelete }) => { +interface MultiValueRowProps { + model: { + id: number; + value: string; + }; + disableAdd: boolean; + disableDelete: boolean; + onChange: ({ value, id }: { id: number; value: string }) => void; + onDelete: (model: { id: number; value: string }) => void; + onAdd: () => void; +} + +export const MultiValueRow = ({ + model, + onChange, + onDelete, + onAdd, + disableAdd, + disableDelete, +}: MultiValueRowProps) => { const htmlId = htmlIdGenerator(); - const onFieldNumberChange = (event) => + const onFieldNumberChange = (event: ChangeEvent) => onChange({ ...model, value: get(event, 'target.value'), @@ -54,7 +72,7 @@ export const MultiValueRow = ({ model, onChange, onDelete, onAdd, disableAdd, di @@ -78,12 +96,3 @@ MultiValueRow.defaultProps = { value: '', }, }; - -MultiValueRow.propTypes = { - model: PropTypes.object, - onChange: PropTypes.func, - onDelete: PropTypes.func, - onAdd: PropTypes.func, - defaultAddValue: PropTypes.string, - disableDelete: PropTypes.bool, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx similarity index 75% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx index c8af4089ed78..a16f5aeefc49 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx @@ -17,16 +17,7 @@ * under the License. */ -import PropTypes from 'prop-types'; import React from 'react'; -import { assign } from 'lodash'; -import { AggSelect } from '../agg_select'; -import { FieldSelect } from '../field_select'; -import { AggRow } from '../agg_row'; -import { createChangeHandler } from '../../lib/create_change_handler'; -import { createSelectHandler } from '../../lib/create_select_handler'; -import { PercentileRankValues } from './percentile_rank_values'; - import { htmlIdGenerator, EuiFlexGroup, @@ -36,11 +27,36 @@ import { EuiSpacer, } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; -import { KBN_FIELD_TYPES } from '../../../../../../../plugins/data/public'; +import { AggSelect } from '../agg_select'; +// @ts-ignore +import { FieldSelect } from '../field_select'; +// @ts-ignore +import { createChangeHandler } from '../../lib/create_change_handler'; +// @ts-ignore +import { createSelectHandler } from '../../lib/create_select_handler'; +import { AggRow } from '../agg_row'; +import { PercentileRankValues } from './percentile_rank_values'; + +import { IFieldType, KBN_FIELD_TYPES } from '../../../../../../../plugins/data/public'; +import { MetricsItemsSchema, PanelSchema, SeriesItemsSchema } from '../../../../../common/types'; +import { DragHandleProps } from '../../../../types'; const RESTRICT_FIELDS = [KBN_FIELD_TYPES.NUMBER]; -export const PercentileRankAgg = (props) => { +interface PercentileRankAggProps { + disableDelete: boolean; + fields: IFieldType[]; + model: MetricsItemsSchema; + panel: PanelSchema; + series: SeriesItemsSchema; + siblings: MetricsItemsSchema[]; + dragHandleProps: DragHandleProps; + onAdd(): void; + onChange(): void; + onDelete(): void; +} + +export const PercentileRankAgg = (props: PercentileRankAggProps) => { const { series, panel, fields } = props; const defaults = { values: [''] }; const model = { ...defaults, ...props.model }; @@ -52,12 +68,11 @@ export const PercentileRankAgg = (props) => { const handleChange = createChangeHandler(props.onChange, model); const handleSelectChange = createSelectHandler(handleChange); - const handlePercentileRankValuesChange = (values) => { - handleChange( - assign({}, model, { - values, - }) - ); + const handlePercentileRankValuesChange = (values: MetricsItemsSchema['values']) => { + handleChange({ + ...model, + values, + }); }; return ( @@ -108,25 +123,15 @@ export const PercentileRankAgg = (props) => { - + {model.values && ( + + )} ); }; - -PercentileRankAgg.propTypes = { - disableDelete: PropTypes.bool, - fields: PropTypes.object, - model: PropTypes.object, - onAdd: PropTypes.func, - onChange: PropTypes.func, - onDelete: PropTypes.func, - panel: PropTypes.object, - series: PropTypes.object, - siblings: PropTypes.array, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.tsx similarity index 67% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.tsx index 6d52eb9e3515..b66d79d67f42 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank_values.tsx @@ -16,34 +16,49 @@ * specific language governing permissions and limitations * under the License. */ -import PropTypes from 'prop-types'; import React from 'react'; import { last } from 'lodash'; import { EuiFlexGroup } from '@elastic/eui'; import { MultiValueRow } from './multi_value_row'; -export const PercentileRankValues = (props) => { +interface PercentileRankValuesProps { + model: Array; + disableDelete: boolean; + disableAdd: boolean; + showOnlyLastRow: boolean; + onChange: (values: any[]) => void; +} + +export const PercentileRankValues = (props: PercentileRankValuesProps) => { const model = props.model || []; const { onChange, disableAdd, disableDelete, showOnlyLastRow } = props; - const onChangeValue = ({ value, id }) => { + const onChangeValue = ({ value, id }: { value: string; id: number }) => { model[id] = value; onChange(model); }; - const onDeleteValue = ({ id }) => + const onDeleteValue = ({ id }: { id: number }) => onChange(model.filter((item, currentIndex) => id !== currentIndex)); const onAddValue = () => onChange([...model, '']); - const renderRow = ({ rowModel, disableDelete, disableAdd }) => ( + const renderRow = ({ + rowModel, + disableDeleteRow, + disableAddRow, + }: { + rowModel: { id: number; value: string }; + disableDeleteRow: boolean; + disableAddRow: boolean; + }) => ( ); @@ -54,10 +69,10 @@ export const PercentileRankValues = (props) => { renderRow({ rowModel: { id: model.length - 1, - value: last(model), + value: last(model) || '', }, - disableAdd: true, - disableDelete: true, + disableAddRow: true, + disableDeleteRow: true, })} {!showOnlyLastRow && @@ -65,20 +80,12 @@ export const PercentileRankValues = (props) => { renderRow({ rowModel: { id, - value, + value: value || '', }, - disableAdd, - disableDelete: disableDelete || array.length < 2, + disableAddRow: disableAdd, + disableDeleteRow: disableDelete || array.length < 2, }) )} ); }; - -PercentileRankValues.propTypes = { - model: PropTypes.array, - onChange: PropTypes.func, - disableDelete: PropTypes.bool, - disableAdd: PropTypes.bool, - showOnlyLastRow: PropTypes.bool, -}; diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.tsx similarity index 79% rename from src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.js rename to src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.tsx index bae0491d978a..d10c7ea7a7e3 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.js +++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/temporary_unsupported_agg.tsx @@ -17,12 +17,23 @@ * under the License. */ -import { AggRow } from './agg_row'; import React from 'react'; import { EuiCode, EuiTitle } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; +import { AggRow } from './agg_row'; +import { MetricsItemsSchema } from '../../../../common/types'; +import { DragHandleProps } from '../../../types'; + +interface TemporaryUnsupportedAggProps { + disableDelete: boolean; + model: MetricsItemsSchema; + siblings: MetricsItemsSchema[]; + dragHandleProps: DragHandleProps; + onAdd: () => void; + onDelete: () => void; +} -export function TemporaryUnsupportedAgg(props) { +export function TemporaryUnsupportedAgg(props: TemporaryUnsupportedAggProps) { return ( void; + onDelete: () => void; +} -export function UnsupportedAgg(props) { +export function UnsupportedAgg(props: UnsupportedAggProps) { return ( { +export const newMetricAggFn = (): MetricsItemsSchema => { return { id: uuid.v1(), type: 'count', diff --git a/src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.js b/src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.tsx similarity index 85% rename from src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.js rename to src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.tsx index f978348a5e45..73293a0d330f 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.js +++ b/src/plugins/vis_type_timeseries/public/application/components/series_drag_handler.tsx @@ -18,11 +18,20 @@ */ import React, { PureComponent } from 'react'; -import PropTypes from 'prop-types'; import { EuiFlexItem, EuiToolTip, EuiIcon } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; +import { DragHandleProps } from '../../types'; + +interface SeriesDragHandlerProps { + hideDragHandler: boolean; + dragHandleProps: DragHandleProps; +} + +export class SeriesDragHandler extends PureComponent { + static defaultProps = { + hideDragHandler: true, + }; -export class SeriesDragHandler extends PureComponent { render() { const { dragHandleProps, hideDragHandler } = this.props; @@ -49,12 +58,3 @@ export class SeriesDragHandler extends PureComponent { ); } } - -SeriesDragHandler.defaultProps = { - hideDragHandler: true, -}; - -SeriesDragHandler.propTypes = { - hideDragHandler: PropTypes.bool, - dragHandleProps: PropTypes.object.isRequired, -}; diff --git a/src/plugins/vis_type_timeseries/public/types.ts b/src/plugins/vis_type_timeseries/public/types.ts new file mode 100644 index 000000000000..338118dcdc5a --- /dev/null +++ b/src/plugins/vis_type_timeseries/public/types.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import { EuiDraggable } from '@elastic/eui'; + +type PropsOf = T extends React.ComponentType ? ComponentProps : never; +type FirstArgumentOf = Func extends (arg1: infer FirstArgument, ...rest: any[]) => any + ? FirstArgument + : never; +export type DragHandleProps = FirstArgumentOf< + Exclude['children'], React.ReactElement> +>['dragHandleProps']; diff --git a/src/plugins/vis_type_timeseries/server/routes/vis.ts b/src/plugins/vis_type_timeseries/server/routes/vis.ts index 744020b58388..48efd4398e4d 100644 --- a/src/plugins/vis_type_timeseries/server/routes/vis.ts +++ b/src/plugins/vis_type_timeseries/server/routes/vis.ts @@ -20,7 +20,7 @@ import { IRouter, KibanaRequest } from 'kibana/server'; import { schema } from '@kbn/config-schema'; import { getVisData, GetVisDataOptions } from '../lib/get_vis_data'; -import { visPayloadSchema } from './post_vis_schema'; +import { visPayloadSchema } from '../../common/vis_schema'; import { Framework, ValidationTelemetryServiceSetup } from '../index'; const escapeHatch = schema.object({}, { unknowns: 'allow' }); diff --git a/test/functional/apps/dashboard/dashboard_state.js b/test/functional/apps/dashboard/dashboard_state.js index 5bba2447cde2..3656c824394f 100644 --- a/test/functional/apps/dashboard/dashboard_state.js +++ b/test/functional/apps/dashboard/dashboard_state.js @@ -251,8 +251,7 @@ export default function ({ getService, getPageObjects }) { }); }); - // Unskip once https://github.com/elastic/kibana/issues/15736 is fixed. - it.skip('and updates the pie slice legend color', async function () { + it('and updates the pie slice legend color', async function () { await retry.try(async () => { const colorExists = await PageObjects.visChart.doesSelectedLegendColorExist('#FFFFFF'); expect(colorExists).to.be(true); @@ -272,8 +271,7 @@ export default function ({ getService, getPageObjects }) { }); }); - // Unskip once https://github.com/elastic/kibana/issues/15736 is fixed. - it.skip('resets the legend color as well', async function () { + it('resets the legend color as well', async function () { await retry.try(async () => { const colorExists = await PageObjects.visChart.doesSelectedLegendColorExist('#57c17b'); expect(colorExists).to.be(true); diff --git a/test/functional/apps/dashboard/empty_dashboard.js b/test/functional/apps/dashboard/empty_dashboard.js index e7ebbcf09e82..7f13aca43884 100644 --- a/test/functional/apps/dashboard/empty_dashboard.js +++ b/test/functional/apps/dashboard/empty_dashboard.js @@ -49,10 +49,11 @@ export default function ({ getService, getPageObjects }) { expect(emptyWidgetExists).to.be(true); }); - it.skip('should open add panel when add button is clicked', async () => { + it('should open add panel when add button is clicked', async () => { await testSubjects.click('dashboardAddPanelButton'); const isAddPanelOpen = await dashboardAddPanel.isAddPanelOpen(); expect(isAddPanelOpen).to.be(true); + await testSubjects.click('euiFlyoutCloseButton'); }); it('should add new visualization from dashboard', async () => { diff --git a/test/functional/apps/discover/_errors.js b/test/functional/apps/discover/_errors.js index 5113fc8568d5..f3936d06bb6d 100644 --- a/test/functional/apps/discover/_errors.js +++ b/test/functional/apps/discover/_errors.js @@ -35,7 +35,7 @@ export default function ({ getService, getPageObjects }) { await esArchiver.unload('invalid_scripted_field'); }); - // https://github.com/elastic/kibana/issues/61366 + // ES issue https://github.com/elastic/elasticsearch/issues/54235 describe.skip('invalid scripted field error', () => { it('is rendered', async () => { const isFetchErrorVisible = await testSubjects.exists('discoverFetchError'); diff --git a/test/functional/apps/visualize/_data_table_nontimeindex.js b/test/functional/apps/visualize/_data_table_nontimeindex.js index 4ae66d14ec30..d64629a65c2c 100644 --- a/test/functional/apps/visualize/_data_table_nontimeindex.js +++ b/test/functional/apps/visualize/_data_table_nontimeindex.js @@ -27,7 +27,7 @@ export default function ({ getService, getPageObjects }) { const renderable = getService('renderable'); const PageObjects = getPageObjects(['visualize', 'visEditor', 'header', 'visChart']); - describe.skip('data table with index without time filter', function indexPatternCreation() { + describe('data table with index without time filter', function indexPatternCreation() { const vizName1 = 'Visualization DataTable without time filter'; before(async function () { @@ -112,65 +112,49 @@ export default function ({ getService, getPageObjects }) { expect(data.trim().split('\n')).to.be.eql(['14,004 1,412.6']); }); - it('should show correct data for a data table with date histogram', async () => { - await PageObjects.visualize.navigateToNewVisualization(); - await PageObjects.visualize.clickDataTable(); - await PageObjects.visualize.clickNewSearch( - PageObjects.visualize.index.LOGSTASH_NON_TIME_BASED - ); - await PageObjects.visEditor.clickBucket('Split rows'); - await PageObjects.visEditor.selectAggregation('Date Histogram'); - await PageObjects.visEditor.selectField('@timestamp'); - await PageObjects.visEditor.setInterval('Daily'); - await PageObjects.visEditor.clickGo(); - const data = await PageObjects.visChart.getTableVisData(); - log.debug(data.split('\n')); - expect(data.trim().split('\n')).to.be.eql([ - '2015-09-20', - '4,757', - '2015-09-21', - '4,614', - '2015-09-22', - '4,633', - ]); - }); + // bug https://github.com/elastic/kibana/issues/68977 + describe.skip('data table with date histogram', async () => { + before(async () => { + await PageObjects.visualize.navigateToNewVisualization(); + await PageObjects.visualize.clickDataTable(); + await PageObjects.visualize.clickNewSearch( + PageObjects.visualize.index.LOGSTASH_NON_TIME_BASED + ); + await PageObjects.visEditor.clickBucket('Split rows'); + await PageObjects.visEditor.selectAggregation('Date Histogram'); + await PageObjects.visEditor.selectField('@timestamp'); + await PageObjects.visEditor.setInterval('Daily'); + await PageObjects.visEditor.clickGo(); + }); - it('should show correct data for a data table with date histogram', async () => { - await PageObjects.visualize.navigateToNewVisualization(); - await PageObjects.visualize.clickDataTable(); - await PageObjects.visualize.clickNewSearch( - PageObjects.visualize.index.LOGSTASH_NON_TIME_BASED - ); - await PageObjects.visEditor.clickBucket('Split rows'); - await PageObjects.visEditor.selectAggregation('Date Histogram'); - await PageObjects.visEditor.selectField('@timestamp'); - await PageObjects.visEditor.setInterval('Daily'); - await PageObjects.visEditor.clickGo(); - const data = await PageObjects.visChart.getTableVisData(); - expect(data.trim().split('\n')).to.be.eql([ - '2015-09-20', - '4,757', - '2015-09-21', - '4,614', - '2015-09-22', - '4,633', - ]); - }); + it('should show correct data', async () => { + const data = await PageObjects.visChart.getTableVisData(); + log.debug(data.split('\n')); + expect(data.trim().split('\n')).to.be.eql([ + '2015-09-20', + '4,757', + '2015-09-21', + '4,614', + '2015-09-22', + '4,633', + ]); + }); - it('should correctly filter for applied time filter on the main timefield', async () => { - await filterBar.addFilter('@timestamp', 'is between', '2015-09-19', '2015-09-21'); - await PageObjects.header.waitUntilLoadingHasFinished(); - await renderable.waitForRender(); - const data = await PageObjects.visChart.getTableVisData(); - expect(data.trim().split('\n')).to.be.eql(['2015-09-20', '4,757']); - }); + it('should correctly filter for applied time filter on the main timefield', async () => { + await filterBar.addFilter('@timestamp', 'is between', '2015-09-19', '2015-09-21'); + await PageObjects.header.waitUntilLoadingHasFinished(); + await renderable.waitForRender(); + const data = await PageObjects.visChart.getTableVisData(); + expect(data.trim().split('\n')).to.be.eql(['2015-09-20', '4,757']); + }); - it('should correctly filter for pinned filters', async () => { - await filterBar.toggleFilterPinned('@timestamp'); - await PageObjects.header.waitUntilLoadingHasFinished(); - await renderable.waitForRender(); - const data = await PageObjects.visChart.getTableVisData(); - expect(data.trim().split('\n')).to.be.eql(['2015-09-20', '4,757']); + it('should correctly filter for pinned filters', async () => { + await filterBar.toggleFilterPinned('@timestamp'); + await PageObjects.header.waitUntilLoadingHasFinished(); + await renderable.waitForRender(); + const data = await PageObjects.visChart.getTableVisData(); + expect(data.trim().split('\n')).to.be.eql(['2015-09-20', '4,757']); + }); }); }); } diff --git a/test/functional/apps/visualize/_tsvb_chart.ts b/test/functional/apps/visualize/_tsvb_chart.ts index f1c5c916a89b..7e22f543bc7d 100644 --- a/test/functional/apps/visualize/_tsvb_chart.ts +++ b/test/functional/apps/visualize/_tsvb_chart.ts @@ -28,8 +28,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { const security = getService('security'); const PageObjects = getPageObjects(['visualize', 'visualBuilder', 'timePicker', 'visChart']); - // FLAKY: https://github.com/elastic/kibana/issues/43150 - describe.skip('visual builder', function describeIndexTests() { + describe('visual builder', function describeIndexTests() { this.tags('includeFirefox'); beforeEach(async () => { await security.testUser.setRoles(['kibana_admin', 'test_logstash_reader']); @@ -74,7 +73,6 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { }); }); - // FLAKY: https://github.com/elastic/kibana/issues/46677 describe('gauge', () => { beforeEach(async () => { await PageObjects.visualBuilder.resetPage(); @@ -107,7 +105,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { }); }); - describe('switch index patterns', () => { + // FLAKY: https://github.com/elastic/kibana/issues/43150 + describe.skip('switch index patterns', () => { beforeEach(async () => { log.debug('Load kibana_sample_data_flights data'); await esArchiver.loadIfNeeded('kibana_sample_data_flights'); diff --git a/test/scripts/jenkins_security_solution_cypress.sh b/test/scripts/jenkins_security_solution_cypress.sh index 23b83cf946d4..8aa3425be0be 100644 --- a/test/scripts/jenkins_security_solution_cypress.sh +++ b/test/scripts/jenkins_security_solution_cypress.sh @@ -11,11 +11,16 @@ export KIBANA_INSTALL_DIR="$destDir" echo " -> Running security solution cypress tests" cd "$XPACK_DIR" -checks-reporter-with-killswitch "Security solution Cypress Tests" \ - node scripts/functional_tests \ - --debug --bail \ - --kibana-install-dir "$KIBANA_INSTALL_DIR" \ - --config test/security_solution_cypress/config.ts +# Failures across multiple suites, skipping all +# https://github.com/elastic/kibana/issues/69847 +# https://github.com/elastic/kibana/issues/69848 +# https://github.com/elastic/kibana/issues/69849 + +# checks-reporter-with-killswitch "Security solution Cypress Tests" \ +# node scripts/functional_tests \ +# --debug --bail \ +# --kibana-install-dir "$KIBANA_INSTALL_DIR" \ +# --config test/security_solution_cypress/config.ts echo "" echo "" diff --git a/x-pack/.i18nrc.json b/x-pack/.i18nrc.json index 36cfdf904d6d..596ba17d343c 100644 --- a/x-pack/.i18nrc.json +++ b/x-pack/.i18nrc.json @@ -8,6 +8,7 @@ "xpack.apm": ["legacy/plugins/apm", "plugins/apm"], "xpack.beatsManagement": ["legacy/plugins/beats_management", "plugins/beats_management"], "xpack.canvas": "plugins/canvas", + "xpack.cloud": "plugins/cloud", "xpack.dashboard": "plugins/dashboard_enhanced", "xpack.discover": "plugins/discover_enhanced", "xpack.crossClusterReplication": "plugins/cross_cluster_replication", diff --git a/x-pack/plugins/apm/common/ml_job_constants.test.ts b/x-pack/plugins/apm/common/ml_job_constants.test.ts index 45bb7133e852..96e3ba826d20 100644 --- a/x-pack/plugins/apm/common/ml_job_constants.test.ts +++ b/x-pack/plugins/apm/common/ml_job_constants.test.ts @@ -4,45 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - getMlJobId, - getMlPrefix, - getMlJobServiceName, - getSeverity, - severity, -} from './ml_job_constants'; +import { getSeverity, severity } from './ml_job_constants'; describe('ml_job_constants', () => { - it('getMlPrefix', () => { - expect(getMlPrefix('myServiceName')).toBe('myservicename-'); - expect(getMlPrefix('myServiceName', 'myTransactionType')).toBe( - 'myservicename-mytransactiontype-' - ); - }); - - it('getMlJobId', () => { - expect(getMlJobId('myServiceName')).toBe( - 'myservicename-high_mean_response_time' - ); - expect(getMlJobId('myServiceName', 'myTransactionType')).toBe( - 'myservicename-mytransactiontype-high_mean_response_time' - ); - expect(getMlJobId('my service name')).toBe( - 'my_service_name-high_mean_response_time' - ); - expect(getMlJobId('my service name', 'my transaction type')).toBe( - 'my_service_name-my_transaction_type-high_mean_response_time' - ); - }); - - describe('getMlJobServiceName', () => { - it('extracts the service name from a job id', () => { - expect( - getMlJobServiceName('opbeans-node-request-high_mean_response_time') - ).toEqual('opbeans-node'); - }); - }); - describe('getSeverity', () => { describe('when score is undefined', () => { it('returns undefined', () => { diff --git a/x-pack/plugins/apm/common/ml_job_constants.ts b/x-pack/plugins/apm/common/ml_job_constants.ts index f9b0119d8a10..b8c2546bd0c8 100644 --- a/x-pack/plugins/apm/common/ml_job_constants.ts +++ b/x-pack/plugins/apm/common/ml_job_constants.ts @@ -11,25 +11,6 @@ export enum severity { warning = 'warning', } -export const APM_ML_JOB_GROUP_NAME = 'apm'; - -export function getMlPrefix(serviceName: string, transactionType?: string) { - const maybeTransactionType = transactionType ? `${transactionType}-` : ''; - return encodeForMlApi(`${serviceName}-${maybeTransactionType}`); -} - -export function getMlJobId(serviceName: string, transactionType?: string) { - return `${getMlPrefix(serviceName, transactionType)}high_mean_response_time`; -} - -export function getMlJobServiceName(jobId: string) { - return jobId.split('-').slice(0, -2).join('-'); -} - -export function encodeForMlApi(value: string) { - return value.replace(/\s+/g, '_').toLowerCase(); -} - export function getSeverity(score?: number) { if (typeof score !== 'number') { return undefined; diff --git a/x-pack/plugins/apm/common/service_map.ts b/x-pack/plugins/apm/common/service_map.ts index 7d7a7811eeba..43f3585d0ebb 100644 --- a/x-pack/plugins/apm/common/service_map.ts +++ b/x-pack/plugins/apm/common/service_map.ts @@ -34,16 +34,6 @@ export interface Connection { destination: ConnectionNode; } -export interface ServiceAnomaly { - anomaly_score: number; - anomaly_severity: string; - actual_value: number; - typical_value: number; - ml_job_id: string; -} - -export type ServiceNode = ConnectionNode & Partial; - export interface ServiceNodeMetrics { avgMemoryUsage: number | null; avgCpuUsage: number | null; diff --git a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/TransactionSelect.tsx b/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/TransactionSelect.tsx deleted file mode 100644 index 42f7246b6ea3..000000000000 --- a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/TransactionSelect.tsx +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { - EuiFlexGroup, - EuiFlexItem, - EuiFormRow, - EuiSuperSelect, - EuiText, -} from '@elastic/eui'; -import { i18n } from '@kbn/i18n'; -import React from 'react'; - -interface TransactionSelectProps { - transactionTypes: string[]; - onChange: (value: string) => void; - selectedTransactionType: string; -} - -export function TransactionSelect({ - transactionTypes, - onChange, - selectedTransactionType, -}: TransactionSelectProps) { - return ( - - { - return { - value: transactionType, - inputDisplay: transactionType, - dropdownDisplay: ( - - - {transactionType} - - - ), - }; - })} - /> - - ); -} diff --git a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/index.tsx b/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/index.tsx deleted file mode 100644 index 91778b2940c6..000000000000 --- a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/index.tsx +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { i18n } from '@kbn/i18n'; -import React, { Component } from 'react'; -import { toMountPoint } from '../../../../../../../../../src/plugins/kibana_react/public'; -import { startMLJob, MLError } from '../../../../../services/rest/ml'; -import { IUrlParams } from '../../../../../context/UrlParamsContext/types'; -import { MLJobLink } from '../../../../shared/Links/MachineLearningLinks/MLJobLink'; -import { MachineLearningFlyoutView } from './view'; -import { ApmPluginContext } from '../../../../../context/ApmPluginContext'; - -interface Props { - isOpen: boolean; - onClose: () => void; - urlParams: IUrlParams; -} - -interface State { - isCreatingJob: boolean; -} - -export class MachineLearningFlyout extends Component { - static contextType = ApmPluginContext; - - public state: State = { - isCreatingJob: false, - }; - - public onClickCreate = async ({ - transactionType, - }: { - transactionType: string; - }) => { - this.setState({ isCreatingJob: true }); - try { - const { http } = this.context.core; - const { serviceName } = this.props.urlParams; - if (!serviceName) { - throw new Error('Service name is required to create this ML job'); - } - const res = await startMLJob({ http, serviceName, transactionType }); - const didSucceed = res.datafeeds[0].success && res.jobs[0].success; - if (!didSucceed) { - throw new Error('Creating ML job failed'); - } - this.addSuccessToast({ transactionType }); - } catch (e) { - this.addErrorToast(e as MLError); - } - - this.setState({ isCreatingJob: false }); - this.props.onClose(); - }; - - public addErrorToast = (error: MLError) => { - const { core } = this.context; - - const { urlParams } = this.props; - const { serviceName } = urlParams; - - if (!serviceName) { - return; - } - - const errorDescription = error?.body?.message; - const errorText = errorDescription - ? `${error.message}: ${errorDescription}` - : error.message; - - core.notifications.toasts.addWarning({ - title: i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.jobCreationFailedNotificationTitle', - { - defaultMessage: 'Job creation failed', - } - ), - text: toMountPoint( - <> -

{errorText}

-

- {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.jobCreationFailedNotificationText', - { - defaultMessage: - 'Your current license may not allow for creating machine learning jobs, or this job may already exist.', - } - )} -

- - ), - }); - }; - - public addSuccessToast = ({ - transactionType, - }: { - transactionType: string; - }) => { - const { core } = this.context; - const { urlParams } = this.props; - const { serviceName } = urlParams; - - if (!serviceName) { - return; - } - - core.notifications.toasts.addSuccess({ - title: i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.jobCreatedNotificationTitle', - { - defaultMessage: 'Job successfully created', - } - ), - text: toMountPoint( -

- {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.jobCreatedNotificationText', - { - defaultMessage: - 'The analysis is now running for {serviceName} ({transactionType}). It might take a while before results are added to the response times graph.', - values: { - serviceName, - transactionType, - }, - } - )}{' '} - - - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.jobCreatedNotificationText.viewJobLinkText', - { - defaultMessage: 'View job', - } - )} - - -

- ), - }); - }; - - public render() { - const { isOpen, onClose, urlParams } = this.props; - const { serviceName } = urlParams; - const { isCreatingJob } = this.state; - - if (!isOpen || !serviceName) { - return null; - } - - return ( - - ); - } -} diff --git a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/view.tsx b/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/view.tsx deleted file mode 100644 index 72e8193ba2de..000000000000 --- a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/MachineLearningFlyout/view.tsx +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { - EuiButton, - EuiCallOut, - EuiFlexGroup, - EuiFlexItem, - EuiFlyout, - EuiFlyoutBody, - EuiFlyoutFooter, - EuiFlyoutHeader, - EuiFormRow, - EuiSpacer, - EuiText, - EuiTitle, -} from '@elastic/eui'; -import { i18n } from '@kbn/i18n'; -import { FormattedMessage } from '@kbn/i18n/react'; -import React, { useState, useEffect } from 'react'; -import { isEmpty } from 'lodash'; -import { FETCH_STATUS, useFetcher } from '../../../../../hooks/useFetcher'; -import { getHasMLJob } from '../../../../../services/rest/ml'; -import { MLJobLink } from '../../../../shared/Links/MachineLearningLinks/MLJobLink'; -import { MLLink } from '../../../../shared/Links/MachineLearningLinks/MLLink'; -import { TransactionSelect } from './TransactionSelect'; -import { IUrlParams } from '../../../../../context/UrlParamsContext/types'; -import { useServiceTransactionTypes } from '../../../../../hooks/useServiceTransactionTypes'; -import { useApmPluginContext } from '../../../../../hooks/useApmPluginContext'; - -interface Props { - isCreatingJob: boolean; - onClickCreate: ({ transactionType }: { transactionType: string }) => void; - onClose: () => void; - urlParams: IUrlParams; -} - -export function MachineLearningFlyoutView({ - isCreatingJob, - onClickCreate, - onClose, - urlParams, -}: Props) { - const { serviceName } = urlParams; - const transactionTypes = useServiceTransactionTypes(urlParams); - - const [selectedTransactionType, setSelectedTransactionType] = useState< - string | undefined - >(undefined); - - const { http } = useApmPluginContext().core; - - const { data: hasMLJob, status } = useFetcher( - () => { - if (serviceName && selectedTransactionType) { - return getHasMLJob({ - serviceName, - transactionType: selectedTransactionType, - http, - }); - } - }, - [serviceName, selectedTransactionType, http], - { showToastOnError: false } - ); - - // update selectedTransactionType when list of transaction types has loaded - useEffect(() => { - setSelectedTransactionType(transactionTypes[0]); - }, [transactionTypes]); - - if (!serviceName || !selectedTransactionType || isEmpty(transactionTypes)) { - return null; - } - - const isLoadingMLJob = status === FETCH_STATUS.LOADING; - const isMlAvailable = status !== FETCH_STATUS.FAILURE; - - return ( - - - -

- {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.enableAnomalyDetectionTitle', - { - defaultMessage: 'Enable anomaly detection', - } - )} -

-
- -
- - {!isMlAvailable && ( -
- -

- {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.callout.mlNotAvailableDescription', - { - defaultMessage: - 'Unable to connect to Machine learning. Make sure it is enabled in Kibana to use anomaly detection.', - } - )} -

-
- -
- )} - {hasMLJob && ( -
- -

- {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.callout.jobExistsDescription', - { - defaultMessage: - 'There is currently a job running for {serviceName} ({transactionType}).', - values: { - serviceName, - transactionType: selectedTransactionType, - }, - } - )}{' '} - - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.callout.jobExistsDescription.viewJobLinkText', - { - defaultMessage: 'View existing job', - } - )} - -

-
- -
- )} - -

- - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.createMLJobDescription.transactionDurationGraphText', - { - defaultMessage: 'transaction duration', - } - )} - - ), - serviceMapAnnotationText: ( - - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.createMLJobDescription.serviceMapAnnotationText', - { - defaultMessage: 'service maps', - } - )} - - ), - }} - /> -

-

- - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.manageMLJobDescription.mlJobsPageLinkText', - { - defaultMessage: 'Machine Learning Job Management page', - } - )} - - ), - }} - />{' '} - - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.manageMLJobDescription.noteText', - { - defaultMessage: - 'Note: It might take a few minutes for the job to begin calculating results.', - } - )} - -

-
- - -
- - - - {transactionTypes.length > 1 ? ( - { - setSelectedTransactionType(value); - }} - /> - ) : null} - - - - - onClickCreate({ transactionType: selectedTransactionType }) - } - fill - disabled={isCreatingJob || hasMLJob || isLoadingMLJob} - > - {i18n.translate( - 'xpack.apm.serviceDetails.enableAnomalyDetectionPanel.createNewJobButtonLabel', - { - defaultMessage: 'Create job', - } - )} - - - - - -
- ); -} diff --git a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/index.tsx b/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/index.tsx index 321617ed8496..0a7dcbd0be3d 100644 --- a/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/index.tsx +++ b/x-pack/plugins/apm/public/components/app/ServiceDetails/ServiceIntegrations/index.tsx @@ -4,18 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - EuiButtonEmpty, - EuiContextMenu, - EuiContextMenuPanelItemDescriptor, - EuiPopover, -} from '@elastic/eui'; +import { EuiButtonEmpty, EuiContextMenu, EuiPopover } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { memoize } from 'lodash'; -import React, { Fragment } from 'react'; +import React from 'react'; import { IUrlParams } from '../../../../context/UrlParamsContext/types'; -import { LicenseContext } from '../../../../context/LicenseContext'; -import { MachineLearningFlyout } from './MachineLearningFlyout'; import { WatcherFlyout } from './WatcherFlyout'; import { ApmPluginContext } from '../../../../context/ApmPluginContext'; @@ -26,7 +18,7 @@ interface State { isPopoverOpen: boolean; activeFlyout: FlyoutName; } -type FlyoutName = null | 'ML' | 'Watcher'; +type FlyoutName = null | 'Watcher'; export class ServiceIntegrations extends React.Component { static contextType = ApmPluginContext; @@ -34,38 +26,6 @@ export class ServiceIntegrations extends React.Component { public state: State = { isPopoverOpen: false, activeFlyout: null }; - public getPanelItems = memoize((mlAvailable: boolean | undefined) => { - let panelItems: EuiContextMenuPanelItemDescriptor[] = []; - if (mlAvailable) { - panelItems = panelItems.concat(this.getMLPanelItems()); - } - return panelItems.concat(this.getWatcherPanelItems()); - }); - - public getMLPanelItems = () => { - return [ - { - name: i18n.translate( - 'xpack.apm.serviceDetails.integrationsMenu.enableMLAnomalyDetectionButtonLabel', - { - defaultMessage: 'Enable ML anomaly detection', - } - ), - icon: 'machineLearningApp', - toolTipContent: i18n.translate( - 'xpack.apm.serviceDetails.integrationsMenu.enableMLAnomalyDetectionButtonTooltip', - { - defaultMessage: 'Set up a machine learning job for this service', - } - ), - onClick: () => { - this.closePopover(); - this.openFlyout('ML'); - }, - }, - ]; - }; - public getWatcherPanelItems = () => { const { core } = this.context; @@ -132,42 +92,31 @@ export class ServiceIntegrations extends React.Component { ); return ( - - {(license) => ( - - - - - - - - )} - + <> + + + + + ); } } diff --git a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/Contents.tsx b/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/Contents.tsx index ff68288916af..78779bdcc205 100644 --- a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/Contents.tsx +++ b/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/Contents.tsx @@ -15,8 +15,6 @@ import React, { MouseEvent } from 'react'; import { Buttons } from './Buttons'; import { Info } from './Info'; import { ServiceMetricFetcher } from './ServiceMetricFetcher'; -import { AnomalyDetection } from './anomaly_detection'; -import { ServiceNode } from '../../../../../common/service_map'; import { popoverMinWidth } from '../cytoscapeOptions'; interface ContentsProps { @@ -70,12 +68,13 @@ export function Contents({ - {isService && ( + {/* //TODO [APM ML] add service health stats here: + isService && ( - + - )} + )*/} {isService ? ( diff --git a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/anomaly_detection.tsx b/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/anomaly_detection.tsx deleted file mode 100644 index 531bbb139d58..000000000000 --- a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/anomaly_detection.tsx +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { i18n } from '@kbn/i18n'; -import React from 'react'; -import styled from 'styled-components'; -import { - EuiFlexGroup, - EuiFlexItem, - EuiTitle, - EuiIconTip, - EuiHealth, -} from '@elastic/eui'; -import { useTheme } from '../../../../hooks/useTheme'; -import { fontSize, px } from '../../../../style/variables'; -import { asInteger } from '../../../../utils/formatters'; -import { MLJobLink } from '../../../shared/Links/MachineLearningLinks/MLJobLink'; -import { getSeverityColor, popoverMinWidth } from '../cytoscapeOptions'; -import { getMetricChangeDescription } from '../../../../../../ml/public'; -import { ServiceNode } from '../../../../../common/service_map'; - -const HealthStatusTitle = styled(EuiTitle)` - display: inline; - text-transform: uppercase; -`; - -const VerticallyCentered = styled.div` - display: flex; - align-items: center; -`; - -const SubduedText = styled.span` - color: ${({ theme }) => theme.eui.euiTextSubduedColor}; -`; - -const EnableText = styled.section` - color: ${({ theme }) => theme.eui.euiTextSubduedColor}; - line-height: 1.4; - font-size: ${fontSize}; - width: ${px(popoverMinWidth)}; -`; - -export const ContentLine = styled.section` - line-height: 2; -`; - -interface AnomalyDetectionProps { - serviceNodeData: cytoscape.NodeDataDefinition & ServiceNode; -} - -export function AnomalyDetection({ serviceNodeData }: AnomalyDetectionProps) { - const theme = useTheme(); - const anomalySeverity = serviceNodeData.anomaly_severity; - const anomalyScore = serviceNodeData.anomaly_score; - const actualValue = serviceNodeData.actual_value; - const typicalValue = serviceNodeData.typical_value; - const mlJobId = serviceNodeData.ml_job_id; - const hasAnomalyDetectionScore = - anomalySeverity !== undefined && anomalyScore !== undefined; - const anomalyDescription = - hasAnomalyDetectionScore && - actualValue !== undefined && - typicalValue !== undefined - ? getMetricChangeDescription(actualValue, typicalValue).message - : null; - - return ( - <> -
- -

{ANOMALY_DETECTION_TITLE}

-
-   - - {!mlJobId && {ANOMALY_DETECTION_DISABLED_TEXT}} -
- {hasAnomalyDetectionScore && ( - - - - - - {ANOMALY_DETECTION_SCORE_METRIC} - - - -
- {getDisplayedAnomalyScore(anomalyScore as number)} - {anomalyDescription && ( -  ({anomalyDescription}) - )} -
-
-
-
- )} - {mlJobId && !hasAnomalyDetectionScore && ( - {ANOMALY_DETECTION_NO_DATA_TEXT} - )} - {mlJobId && ( - - - {ANOMALY_DETECTION_LINK} - - - )} - - ); -} - -function getDisplayedAnomalyScore(score: number) { - if (score > 0 && score < 1) { - return '< 1'; - } - return asInteger(score); -} - -const ANOMALY_DETECTION_TITLE = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverTitle', - { defaultMessage: 'Anomaly Detection' } -); - -const ANOMALY_DETECTION_TOOLTIP = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverTooltip', - { - defaultMessage: - 'Service health indicators are powered by the anomaly detection feature in machine learning', - } -); - -const ANOMALY_DETECTION_SCORE_METRIC = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverScoreMetric', - { defaultMessage: 'Score (max.)' } -); - -const ANOMALY_DETECTION_LINK = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverLink', - { defaultMessage: 'View anomalies' } -); - -const ANOMALY_DETECTION_DISABLED_TEXT = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverDisabled', - { - defaultMessage: - 'Display service health indicators by enabling anomaly detection from the Integrations menu in the Service details view.', - } -); - -const ANOMALY_DETECTION_NO_DATA_TEXT = i18n.translate( - 'xpack.apm.serviceMap.anomalyDetectionPopoverNoData', - { - defaultMessage: `We couldn't find an anomaly score within the selected time range. See details in the anomaly explorer.`, - } -); diff --git a/x-pack/plugins/apm/public/components/app/TransactionOverview/index.tsx b/x-pack/plugins/apm/public/components/app/TransactionOverview/index.tsx index 9018fbb2bc41..fc5347d08131 100644 --- a/x-pack/plugins/apm/public/components/app/TransactionOverview/index.tsx +++ b/x-pack/plugins/apm/public/components/app/TransactionOverview/index.tsx @@ -22,8 +22,6 @@ import { TransactionCharts } from '../../shared/charts/TransactionCharts'; import { TransactionBreakdown } from '../../shared/TransactionBreakdown'; import { TransactionList } from './List'; import { useRedirect } from './useRedirect'; -import { useFetcher } from '../../../hooks/useFetcher'; -import { getHasMLJob } from '../../../services/rest/ml'; import { history } from '../../../utils/history'; import { useLocation } from '../../../hooks/useLocation'; import { ChartsSyncContextProvider } from '../../../context/ChartsSyncContext'; @@ -34,7 +32,6 @@ import { PROJECTION } from '../../../../common/projections/typings'; import { useUrlParams } from '../../../hooks/useUrlParams'; import { useServiceTransactionTypes } from '../../../hooks/useServiceTransactionTypes'; import { TransactionTypeFilter } from '../../shared/LocalUIFilters/TransactionTypeFilter'; -import { useApmPluginContext } from '../../../hooks/useApmPluginContext'; function getRedirectLocation({ urlParams, @@ -86,18 +83,6 @@ export function TransactionOverview() { status: transactionListStatus, } = useTransactionList(urlParams); - const { http } = useApmPluginContext().core; - - const { data: hasMLJob = false } = useFetcher( - () => { - if (serviceName && transactionType) { - return getHasMLJob({ serviceName, transactionType, http }); - } - }, - [http, serviceName, transactionType], - { showToastOnError: false } - ); - const localFiltersConfig: React.ComponentProps = useMemo( () => ({ filterNames: [ @@ -140,7 +125,8 @@ export function TransactionOverview() { { - it('should produce the correct URL with serviceName', async () => { - const href = await getRenderedHref( - () => ( - - ), - { search: '?rangeFrom=now/w&rangeTo=now-4h' } as Location - ); - - expect(href).toEqual( - `/basepath/app/ml#/timeseriesexplorer?_g=(ml:(jobIds:!(myservicename-mytransactiontype-high_mean_response_time)),refreshInterval:(pause:true,value:'0'),time:(from:now%2Fw,to:now-4h))` - ); - }); it('should produce the correct URL with jobId', async () => { const href = await getRenderedHref( () => ( diff --git a/x-pack/plugins/apm/public/components/shared/Links/MachineLearningLinks/MLJobLink.tsx b/x-pack/plugins/apm/public/components/shared/Links/MachineLearningLinks/MLJobLink.tsx index 346748964d52..1e1f9ea5f23b 100644 --- a/x-pack/plugins/apm/public/components/shared/Links/MachineLearningLinks/MLJobLink.tsx +++ b/x-pack/plugins/apm/public/components/shared/Links/MachineLearningLinks/MLJobLink.tsx @@ -5,28 +5,16 @@ */ import React from 'react'; -import { getMlJobId } from '../../../../../common/ml_job_constants'; import { MLLink } from './MLLink'; -interface PropsServiceName { - serviceName: string; - transactionType?: string; -} -interface PropsJobId { +interface Props { jobId: string; -} - -type Props = (PropsServiceName | PropsJobId) & { external?: boolean; -}; +} export const MLJobLink: React.FC = (props) => { - const jobId = - 'jobId' in props - ? props.jobId - : getMlJobId(props.serviceName, props.transactionType); const query = { - ml: { jobIds: [jobId] }, + ml: { jobIds: [props.jobId] }, }; return ( diff --git a/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx b/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx index 4821e06419e3..00ff6f996972 100644 --- a/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx +++ b/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx @@ -101,11 +101,13 @@ export class TransactionCharts extends Component { return null; } - const { serviceName, transactionType, kuery } = this.props.urlParams; + const { serviceName, kuery } = this.props.urlParams; if (!serviceName) { return null; } + const linkedJobId = ''; // TODO [APM ML] link to ML job id for the selected environment + const hasKuery = !isEmpty(kuery); const icon = hasKuery ? ( { } )}{' '} - - View Job - + View Job ); diff --git a/x-pack/plugins/apm/public/services/rest/ml.ts b/x-pack/plugins/apm/public/services/rest/ml.ts deleted file mode 100644 index 47032501d9fb..000000000000 --- a/x-pack/plugins/apm/public/services/rest/ml.ts +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { HttpSetup } from 'kibana/public'; -import { - PROCESSOR_EVENT, - SERVICE_NAME, - TRANSACTION_TYPE, -} from '../../../common/elasticsearch_fieldnames'; -import { - APM_ML_JOB_GROUP_NAME, - getMlJobId, - getMlPrefix, - encodeForMlApi, -} from '../../../common/ml_job_constants'; -import { callApi } from './callApi'; -import { ESFilter } from '../../../typings/elasticsearch'; -import { callApmApi } from './createCallApmApi'; - -interface MlResponseItem { - id: string; - success: boolean; - error?: { - msg: string; - body: string; - path: string; - response: string; - statusCode: number; - }; -} - -interface StartedMLJobApiResponse { - datafeeds: MlResponseItem[]; - jobs: MlResponseItem[]; -} - -async function getTransactionIndices() { - const indices = await callApmApi({ - method: 'GET', - pathname: `/api/apm/settings/apm-indices`, - }); - return indices['apm_oss.transactionIndices']; -} - -export async function startMLJob({ - serviceName, - transactionType, - http, -}: { - serviceName: string; - transactionType: string; - http: HttpSetup; -}) { - const transactionIndices = await getTransactionIndices(); - const groups = [ - APM_ML_JOB_GROUP_NAME, - encodeForMlApi(serviceName), - encodeForMlApi(transactionType), - ]; - const filter: ESFilter[] = [ - { term: { [SERVICE_NAME]: serviceName } }, - { term: { [PROCESSOR_EVENT]: 'transaction' } }, - { term: { [TRANSACTION_TYPE]: transactionType } }, - ]; - return callApi(http, { - method: 'POST', - pathname: `/api/ml/modules/setup/apm_transaction`, - body: { - prefix: getMlPrefix(serviceName, transactionType), - groups, - indexPatternName: transactionIndices, - startDatafeed: true, - query: { - bool: { - filter, - }, - }, - }, - }); -} - -// https://www.elastic.co/guide/en/elasticsearch/reference/6.5/ml-get-job.html -export interface MLJobApiResponse { - count: number; - jobs: Array<{ - job_id: string; - }>; -} - -export type MLError = Error & { body?: { message?: string } }; - -export async function getHasMLJob({ - serviceName, - transactionType, - http, -}: { - serviceName: string; - transactionType: string; - http: HttpSetup; -}) { - try { - await callApi(http, { - method: 'GET', - pathname: `/api/ml/anomaly_detectors/${getMlJobId( - serviceName, - transactionType - )}`, - }); - return true; - } catch (error) { - if ( - error?.body?.statusCode === 404 && - error?.body?.attributes?.body?.error?.type === - 'resource_not_found_exception' - ) { - return false; // false only if ML api responds with resource_not_found_exception - } - throw error; - } -} diff --git a/x-pack/plugins/apm/scripts/aggregate-latency-metrics.js b/x-pack/plugins/apm/scripts/aggregate-latency-metrics.js new file mode 100644 index 000000000000..287f267343b1 --- /dev/null +++ b/x-pack/plugins/apm/scripts/aggregate-latency-metrics.js @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +// eslint-disable-next-line import/no-extraneous-dependencies +require('@babel/register')({ + extensions: ['.ts'], + plugins: [ + '@babel/plugin-proposal-optional-chaining', + '@babel/plugin-proposal-nullish-coalescing-operator', + ], + presets: [ + '@babel/typescript', + ['@babel/preset-env', { targets: { node: 'current' } }], + ], +}); + +const { + aggregateLatencyMetrics, +} = require('./aggregate-latency-metrics/index.ts'); + +aggregateLatencyMetrics().catch((err) => { + if (err.meta && err.meta.body) { + // error from elasticsearch client + console.error(err.meta.body); + } else { + console.error(err); + } + process.exit(1); +}); diff --git a/x-pack/plugins/apm/scripts/aggregate-latency-metrics/index.ts b/x-pack/plugins/apm/scripts/aggregate-latency-metrics/index.ts new file mode 100644 index 000000000000..6bc370be903d --- /dev/null +++ b/x-pack/plugins/apm/scripts/aggregate-latency-metrics/index.ts @@ -0,0 +1,444 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Client } from '@elastic/elasticsearch'; +import { argv } from 'yargs'; +import pLimit from 'p-limit'; +import pRetry from 'p-retry'; +import { parse, format } from 'url'; +import { unique, without, set, merge, flatten } from 'lodash'; +import * as histogram from 'hdr-histogram-js'; +import { ESSearchResponse } from '../../typings/elasticsearch'; +import { + HOST_NAME, + SERVICE_NAME, + TRANSACTION_NAME, + TRANSACTION_TYPE, + AGENT_NAME, + SERVICE_ENVIRONMENT, + POD_NAME, + CONTAINER_ID, + SERVICE_VERSION, + TRANSACTION_RESULT, + PROCESSOR_EVENT, +} from '../../common/elasticsearch_fieldnames'; +import { stampLogger } from '../shared/stamp-logger'; +import { createOrUpdateIndex } from '../shared/create-or-update-index'; + +// This script will try to estimate how many latency metric documents +// will be created based on the available transaction documents. +// It can also generate metric documents based on a painless script +// and hdr histograms. +// +// Options: +// - interval: the interval (in minutes) for which latency metrics will be aggregated. +// Defaults to 1. +// - concurrency: number of maximum concurrent requests to ES. Defaults to 3. +// - from: start of the date range that should be processed. Should be a valid ISO timestamp. +// - to: end of the date range that should be processed. Should be a valid ISO timestamp. +// - source: from which transaction documents should be read. Should be location of ES (basic auth +// is supported) plus the index name (or an index pattern). Example: +// https://foo:bar@apm.elstc.co:9999/apm-8.0.0-transaction +// - dest: to which metric documents should be written. If this is not set, no metric documents +// will be created.Should be location of ES (basic auth is supported) plus the index name. +// Example: https://foo:bar@apm.elstc.co:9999/apm-8.0.0-metric +// - include: comma-separated list of fields that should be aggregated on, in addition to the +// default ones. +// - exclude: comma-separated list of fields that should be not be aggregated on. + +stampLogger(); + +export async function aggregateLatencyMetrics() { + const interval = parseInt(String(argv.interval), 10) || 1; + const concurrency = parseInt(String(argv.concurrency), 10) || 3; + const numSigFigures = (parseInt(String(argv.sigfig), 10) || 2) as + | 1 + | 2 + | 3 + | 4 + | 5; + + const from = new Date(String(argv.from)).getTime(); + const to = new Date(String(argv.to)).getTime(); + + if (isNaN(from) || isNaN(to)) { + throw new Error( + `from and to are not valid dates - please supply valid ISO timestamps` + ); + } + + if (to <= from) { + throw new Error('to cannot be earlier than from'); + } + + const limit = pLimit(concurrency); + // retry function to handle ES timeouts + const retry = (fn: (...args: any[]) => any) => { + return () => + pRetry(fn, { + factor: 1, + retries: 3, + minTimeout: 2500, + }); + }; + + const tasks: Array> = []; + + const defaultFields = [ + SERVICE_NAME, + SERVICE_VERSION, + SERVICE_ENVIRONMENT, + AGENT_NAME, + HOST_NAME, + POD_NAME, + CONTAINER_ID, + TRANSACTION_NAME, + TRANSACTION_RESULT, + TRANSACTION_TYPE, + ]; + + const include = String(argv.include ?? '') + .split(',') + .filter(Boolean) as string[]; + + const exclude = String(argv.exclude ?? '') + .split(',') + .filter(Boolean) as string[]; + + const only = String(argv.only ?? '') + .split(',') + .filter(Boolean) as string[]; + + const fields = only.length + ? unique(only) + : without(unique([...include, ...defaultFields]), ...exclude); + + const globalFilter = argv.filter ? JSON.parse(String(argv.filter)) : {}; + + // eslint-disable-next-line no-console + console.log('Aggregating on', fields.join(',')); + + const source = String(argv.source ?? ''); + const dest = String(argv.dest ?? ''); + + function getClientOptionsFromIndexUrl( + url: string + ): { node: string; index: string } { + const parsed = parse(url); + const { pathname, ...rest } = parsed; + + return { + node: format(rest), + index: pathname!.replace('/', ''), + }; + } + + const sourceOptions = getClientOptionsFromIndexUrl(source); + + const sourceClient = new Client({ + node: sourceOptions.node, + ssl: { + rejectUnauthorized: false, + }, + requestTimeout: 120000, + }); + + let destClient: Client | undefined; + let destOptions: { node: string; index: string } | undefined; + + const uploadMetrics = !!dest; + + if (uploadMetrics) { + destOptions = getClientOptionsFromIndexUrl(dest); + destClient = new Client({ + node: destOptions.node, + ssl: { + rejectUnauthorized: false, + }, + }); + + const mappings = ( + await sourceClient.indices.getMapping({ + index: sourceOptions.index, + }) + ).body; + + const lastMapping = mappings[Object.keys(mappings)[0]]; + + const newMapping = merge({}, lastMapping, { + mappings: { + properties: { + transaction: { + properties: { + duration: { + properties: { + histogram: { + type: 'histogram', + }, + }, + }, + }, + }, + }, + }, + }); + + await createOrUpdateIndex({ + client: destClient, + indexName: destOptions.index, + clear: false, + template: newMapping, + }); + } else { + // eslint-disable-next-line no-console + console.log( + 'No destination was defined, not uploading aggregated documents' + ); + } + + let at = to; + while (at > from) { + const end = at; + const start = Math.max(from, at - interval * 60 * 1000); + + tasks.push( + limit( + retry(async () => { + const filter = [ + { + term: { + [PROCESSOR_EVENT]: 'transaction', + }, + }, + { + range: { + '@timestamp': { + gte: start, + lt: end, + }, + }, + }, + ]; + + const query: { + query: Record; + } = { + ...globalFilter, + query: { + ...(globalFilter?.query ?? {}), + bool: { + ...(globalFilter?.query?.bool ?? {}), + filter: [ + ...Object.values(globalFilter?.query?.bool?.filter ?? {}), + ...filter, + ], + }, + }, + }; + + async function paginateThroughBuckets( + buckets: Array<{ + doc_count: number; + key: any; + recorded_values?: { value: unknown }; + }>, + after?: any + ): Promise< + Array<{ + doc_count: number; + key: any; + recorded_values?: { value: unknown }; + }> + > { + const params = { + index: sourceOptions.index, + body: { + ...query, + aggs: { + transactionGroups: { + composite: { + ...(after ? { after } : {}), + size: 10000, + sources: fields.map((field) => ({ + [field]: { + terms: { + field, + missing_bucket: true, + }, + }, + })), + }, + ...(dest + ? { + // scripted metric agg to get all the values (rather than downloading all the documents) + aggs: { + recorded_values: { + scripted_metric: { + init_script: 'state.values = new ArrayList()', + map_script: ` + if (!doc['transaction.duration.us'].empty) { + state.values.add(doc['transaction.duration.us'].value); + } + `, + combine_script: 'return state.values', + reduce_script: ` + return states.stream().flatMap(l -> l.stream()).collect(Collectors.toList()) + `, + }, + }, + }, + } + : {}), + }, + }, + }, + }; + + const response = (await sourceClient.search(params)) + .body as ESSearchResponse; + + const { aggregations } = response; + + if (!aggregations) { + return buckets; + } + + const { transactionGroups } = aggregations; + + const nextBuckets = buckets.concat(transactionGroups.buckets); + + if (!transactionGroups.after_key) { + return nextBuckets; + } + + return nextBuckets.concat( + await paginateThroughBuckets(buckets, transactionGroups.after_key) + ); + } + + async function getNumberOfTransactionDocuments() { + const params = { + index: sourceOptions.index, + body: { + query: { + bool: { + filter, + }, + }, + track_total_hits: true, + }, + }; + + const response = (await sourceClient.search(params)) + .body as ESSearchResponse; + + return response.hits.total.value; + } + + const [buckets, numberOfTransactionDocuments] = await Promise.all([ + paginateThroughBuckets([]), + getNumberOfTransactionDocuments(), + ]); + + const rangeLabel = `${new Date(start).toISOString()}-${new Date( + end + ).toISOString()}`; + + // eslint-disable-next-line no-console + console.log( + `${rangeLabel}: Compression: ${ + buckets.length + }/${numberOfTransactionDocuments} (${( + (buckets.length / numberOfTransactionDocuments) * + 100 + ).toPrecision(2)}%)` + ); + + const docs: Array> = []; + + if (uploadMetrics) { + buckets.forEach((bucket) => { + const values = (bucket.recorded_values?.value ?? []) as number[]; + const h = histogram.build({ + numberOfSignificantValueDigits: numSigFigures, + }); + values.forEach((value) => { + h.recordValue(value); + }); + + const iterator = h.recordedValuesIterator; + + const distribution = { + values: [] as number[], + counts: [] as number[], + }; + + iterator.reset(); + + while (iterator.hasNext()) { + const value = iterator.next(); + distribution.values.push(value.valueIteratedTo); + distribution.counts.push(value.countAtValueIteratedTo); + } + + const structured = Object.keys(bucket.key).reduce((prev, key) => { + set(prev, key, bucket.key[key]); + return prev; + }, {}); + + const doc = merge({}, structured, { + '@timestamp': new Date(start).toISOString(), + timestamp: { + us: start * 1000, + }, + processor: { + name: 'metric', + event: 'metric', + }, + transaction: { + duration: { + histogram: distribution, + }, + }, + }); + + docs.push(doc); + }); + + if (!docs.length) { + // eslint-disable-next-line no-console + console.log(`${rangeLabel}: No docs to upload`); + return; + } + + const response = await destClient?.bulk({ + refresh: 'wait_for', + body: flatten( + docs.map((doc) => [ + { index: { _index: destOptions?.index } }, + doc, + ]) + ), + }); + + if (response?.body.errors) { + throw new Error( + `${rangeLabel}: Could not upload all metric documents` + ); + } + // eslint-disable-next-line no-console + console.log( + `${rangeLabel}: Uploaded ${docs.length} metric documents` + ); + } + }) + ) + ); + at = start; + } + + await Promise.all(tasks); +} diff --git a/x-pack/plugins/apm/scripts/package.json b/x-pack/plugins/apm/scripts/package.json index 9121449c5361..c5a9df792f85 100644 --- a/x-pack/plugins/apm/scripts/package.json +++ b/x-pack/plugins/apm/scripts/package.json @@ -4,7 +4,10 @@ "main": "index.js", "license": "MIT", "dependencies": { + "@elastic/elasticsearch": "^7.6.1", "@octokit/rest": "^16.35.0", - "console-stamp": "^0.2.9" + "@types/console-stamp": "^0.2.32", + "console-stamp": "^0.2.9", + "hdr-histogram-js": "^1.2.0" } } diff --git a/x-pack/plugins/apm/scripts/shared/create-or-update-index.ts b/x-pack/plugins/apm/scripts/shared/create-or-update-index.ts new file mode 100644 index 000000000000..3f88b73f5598 --- /dev/null +++ b/x-pack/plugins/apm/scripts/shared/create-or-update-index.ts @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Client } from '@elastic/elasticsearch'; + +export async function createOrUpdateIndex({ + client, + clear, + indexName, + template, +}: { + client: Client; + clear: boolean; + indexName: string; + template: any; +}) { + if (clear) { + try { + await client.indices.delete({ + index: indexName, + }); + } catch (err) { + // 404 = index not found, totally okay + if (err.body.status !== 404) { + throw err; + } + } + } + + const indexExists = ( + await client.indices.exists({ + index: indexName, + }) + ).body as boolean; + + if (!indexExists) { + await client.indices.create({ + index: indexName, + body: template, + }); + } else { + await Promise.all([ + template.mappings + ? client.indices.putMapping({ + index: indexName, + body: template.mappings, + }) + : Promise.resolve(undefined as any), + template.settings + ? client.indices.putSettings({ + index: indexName, + body: template.settings, + }) + : Promise.resolve(undefined as any), + ]); + } +} diff --git a/x-pack/plugins/apm/scripts/upload-telemetry-data/download-telemetry-template.ts b/x-pack/plugins/apm/scripts/shared/download-telemetry-template.ts similarity index 68% rename from x-pack/plugins/apm/scripts/upload-telemetry-data/download-telemetry-template.ts rename to x-pack/plugins/apm/scripts/shared/download-telemetry-template.ts index 31559f1ab3c7..f20c6328281f 100644 --- a/x-pack/plugins/apm/scripts/upload-telemetry-data/download-telemetry-template.ts +++ b/x-pack/plugins/apm/scripts/shared/download-telemetry-template.ts @@ -4,15 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -// @ts-ignore import { Octokit } from '@octokit/rest'; -export async function downloadTelemetryTemplate(octokit: Octokit) { +export async function downloadTelemetryTemplate({ + githubToken, +}: { + githubToken: string; +}) { + const octokit = new Octokit({ + auth: githubToken, + }); const file = await octokit.repos.getContents({ owner: 'elastic', repo: 'telemetry', path: 'config/templates/xpack-phone-home.json', - // @ts-ignore mediaType: { format: 'application/vnd.github.VERSION.raw', }, @@ -22,5 +27,11 @@ export async function downloadTelemetryTemplate(octokit: Octokit) { throw new Error('Expected single response, got array'); } - return JSON.parse(Buffer.from(file.data.content!, 'base64').toString()); + return JSON.parse(Buffer.from(file.data.content!, 'base64').toString()) as { + index_patterns: string[]; + mappings: { + properties: Record; + }; + settings: Record; + }; } diff --git a/x-pack/plugins/apm/scripts/shared/get-http-auth.ts b/x-pack/plugins/apm/scripts/shared/get-http-auth.ts new file mode 100644 index 000000000000..b662deb863a3 --- /dev/null +++ b/x-pack/plugins/apm/scripts/shared/get-http-auth.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { KibanaConfig } from './read-kibana-config'; + +export const getHttpAuth = (config: KibanaConfig) => { + const httpAuth = + config['elasticsearch.username'] && config['elasticsearch.password'] + ? { + username: config['elasticsearch.username'], + password: config['elasticsearch.password'], + } + : null; + + return httpAuth; +}; diff --git a/x-pack/plugins/apm/scripts/shared/read-kibana-config.ts b/x-pack/plugins/apm/scripts/shared/read-kibana-config.ts new file mode 100644 index 000000000000..bc5f1afc63ca --- /dev/null +++ b/x-pack/plugins/apm/scripts/shared/read-kibana-config.ts @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import path from 'path'; +import fs from 'fs'; +import yaml from 'js-yaml'; +import { identity, pick } from 'lodash'; + +export type KibanaConfig = ReturnType; + +export const readKibanaConfig = () => { + const kibanaConfigDir = path.join(__filename, '../../../../../../config'); + const kibanaDevConfig = path.join(kibanaConfigDir, 'kibana.dev.yml'); + const kibanaConfig = path.join(kibanaConfigDir, 'kibana.yml'); + + const loadedKibanaConfig = (yaml.safeLoad( + fs.readFileSync( + fs.existsSync(kibanaDevConfig) ? kibanaDevConfig : kibanaConfig, + 'utf8' + ) + ) || {}) as {}; + + const cliEsCredentials = pick( + { + 'elasticsearch.username': process.env.ELASTICSEARCH_USERNAME, + 'elasticsearch.password': process.env.ELASTICSEARCH_PASSWORD, + 'elasticsearch.hosts': process.env.ELASTICSEARCH_HOST, + }, + identity + ) as { + 'elasticsearch.username'?: string; + 'elasticsearch.password'?: string; + 'elasticsearch.hosts'?: string; + }; + + return { + 'apm_oss.transactionIndices': 'apm-*', + 'apm_oss.metricsIndices': 'apm-*', + 'apm_oss.errorIndices': 'apm-*', + 'apm_oss.spanIndices': 'apm-*', + 'apm_oss.onboardingIndices': 'apm-*', + 'apm_oss.sourcemapIndices': 'apm-*', + 'elasticsearch.hosts': 'http://localhost:9200', + ...loadedKibanaConfig, + ...cliEsCredentials, + }; +}; diff --git a/x-pack/plugins/canvas/public/components/positionable/index.js b/x-pack/plugins/apm/scripts/shared/stamp-logger.ts similarity index 63% rename from x-pack/plugins/canvas/public/components/positionable/index.js rename to x-pack/plugins/apm/scripts/shared/stamp-logger.ts index e5c3c32acb02..65d24bbae700 100644 --- a/x-pack/plugins/canvas/public/components/positionable/index.js +++ b/x-pack/plugins/apm/scripts/shared/stamp-logger.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { pure } from 'recompose'; -import { Positionable as Component } from './positionable'; +import consoleStamp from 'console-stamp'; -export const Positionable = pure(Component); +export function stampLogger() { + consoleStamp(console, { pattern: '[HH:MM:ss.l]' }); +} diff --git a/x-pack/plugins/apm/scripts/tsconfig.json b/x-pack/plugins/apm/scripts/tsconfig.json new file mode 100644 index 000000000000..350db55e7244 --- /dev/null +++ b/x-pack/plugins/apm/scripts/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../tsconfig.json", + "include": [ + "./**/*" + ], + "exclude": [], + "compilerOptions": { + "types": [ + "node" + ] + } +} diff --git a/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts b/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts index a3c97cd8828d..5f9c72810fc9 100644 --- a/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts +++ b/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts @@ -11,115 +11,50 @@ // - Easier testing of the telemetry tasks // - Validate whether we can run the queries we want to on the telemetry data -import fs from 'fs'; -import path from 'path'; -// @ts-ignore -import { Octokit } from '@octokit/rest'; -import { merge, chunk, flatten, pick, identity } from 'lodash'; -import axios from 'axios'; -import yaml from 'js-yaml'; -import { Client } from 'elasticsearch'; +import { merge, chunk, flatten } from 'lodash'; +import { Client } from '@elastic/elasticsearch'; import { argv } from 'yargs'; -import { promisify } from 'util'; import { Logger } from 'kibana/server'; -// @ts-ignore -import consoleStamp from 'console-stamp'; +import { stampLogger } from '../shared/stamp-logger'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { CollectTelemetryParams } from '../../server/lib/apm_telemetry/collect_data_telemetry'; -import { downloadTelemetryTemplate } from './download-telemetry-template'; -import mapping from '../../mappings.json'; +import { downloadTelemetryTemplate } from '../shared/download-telemetry-template'; +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { apmTelemetry } from '../../server/saved_objects/apm_telemetry'; import { generateSampleDocuments } from './generate-sample-documents'; +import { readKibanaConfig } from '../shared/read-kibana-config'; +import { getHttpAuth } from '../shared/get-http-auth'; +import { createOrUpdateIndex } from '../shared/create-or-update-index'; -consoleStamp(console, '[HH:MM:ss.l]'); - -const githubToken = process.env.GITHUB_TOKEN; +stampLogger(); -if (!githubToken) { - throw new Error('GITHUB_TOKEN was not provided.'); -} +async function uploadData() { + const githubToken = process.env.GITHUB_TOKEN; -const kibanaConfigDir = path.join(__filename, '../../../../../../config'); -const kibanaDevConfig = path.join(kibanaConfigDir, 'kibana.dev.yml'); -const kibanaConfig = path.join(kibanaConfigDir, 'kibana.yml'); - -const xpackTelemetryIndexName = 'xpack-phone-home'; - -const loadedKibanaConfig = (yaml.safeLoad( - fs.readFileSync( - fs.existsSync(kibanaDevConfig) ? kibanaDevConfig : kibanaConfig, - 'utf8' - ) -) || {}) as {}; - -const cliEsCredentials = pick( - { - 'elasticsearch.username': process.env.ELASTICSEARCH_USERNAME, - 'elasticsearch.password': process.env.ELASTICSEARCH_PASSWORD, - 'elasticsearch.hosts': process.env.ELASTICSEARCH_HOST, - }, - identity -) as { - 'elasticsearch.username'?: string; - 'elasticsearch.password'?: string; - 'elasticsearch.hosts'?: string; -}; - -const config = { - 'apm_oss.transactionIndices': 'apm-*', - 'apm_oss.metricsIndices': 'apm-*', - 'apm_oss.errorIndices': 'apm-*', - 'apm_oss.spanIndices': 'apm-*', - 'apm_oss.onboardingIndices': 'apm-*', - 'apm_oss.sourcemapIndices': 'apm-*', - 'elasticsearch.hosts': 'http://localhost:9200', - ...loadedKibanaConfig, - ...cliEsCredentials, -}; + if (!githubToken) { + throw new Error('GITHUB_TOKEN was not provided.'); + } -async function uploadData() { - const octokit = new Octokit({ - auth: githubToken, + const xpackTelemetryIndexName = 'xpack-phone-home'; + const telemetryTemplate = await downloadTelemetryTemplate({ + githubToken, }); - const telemetryTemplate = await downloadTelemetryTemplate(octokit); + const kibanaMapping = apmTelemetry.mappings; - const kibanaMapping = mapping['apm-telemetry']; + const config = readKibanaConfig(); - const httpAuth = - config['elasticsearch.username'] && config['elasticsearch.password'] - ? { - username: config['elasticsearch.username'], - password: config['elasticsearch.password'], - } - : null; + const httpAuth = getHttpAuth(config); const client = new Client({ - host: config['elasticsearch.hosts'], + nodes: [config['elasticsearch.hosts']], ...(httpAuth ? { - httpAuth: `${httpAuth.username}:${httpAuth.password}`, + auth: httpAuth, } : {}), }); - if (argv.clear) { - try { - await promisify(client.indices.delete.bind(client))({ - index: xpackTelemetryIndexName, - }); - } catch (err) { - // 404 = index not found, totally okay - if (err.status !== 404) { - throw err; - } - } - } - - const axiosInstance = axios.create({ - baseURL: config['elasticsearch.hosts'], - ...(httpAuth ? { auth: httpAuth } : {}), - }); - const newTemplate = merge(telemetryTemplate, { settings: { index: { mapping: { total_fields: { limit: 10000 } } }, @@ -129,7 +64,12 @@ async function uploadData() { // override apm mapping instead of merging newTemplate.mappings.properties.stack_stats.properties.kibana.properties.plugins.properties.apm = kibanaMapping; - await axiosInstance.put(`/_template/xpack-phone-home`, newTemplate); + await createOrUpdateIndex({ + indexName: xpackTelemetryIndexName, + client, + template: newTemplate, + clear: !!argv.clear, + }); const sampleDocuments = await generateSampleDocuments({ collectTelemetryParams: { @@ -140,19 +80,16 @@ async function uploadData() { apmAgentConfigurationIndex: '.apm-agent-configuration', }, search: (body) => { - return promisify(client.search.bind(client))({ - ...body, - requestTimeout: 120000, - }) as any; + return client.search(body as any).then((res) => res.body); }, indicesStats: (body) => { - return promisify(client.indices.stats.bind(client))({ - ...body, - requestTimeout: 120000, - }) as any; + return client.indices.stats(body as any); }, transportRequest: ((params) => { - return axiosInstance[params.method](params.path); + return client.transport.request({ + method: params.method, + path: params.path, + }); }) as CollectTelemetryParams['transportRequest'], }, }); @@ -162,20 +99,27 @@ async function uploadData() { await chunks.reduce>((prev, documents) => { return prev.then(async () => { const body = flatten( - documents.map((doc) => [{ index: { _index: 'xpack-phone-home' } }, doc]) + documents.map((doc) => [ + { index: { _index: xpackTelemetryIndexName } }, + doc, + ]) ); - return promisify(client.bulk.bind(client))({ - body, - refresh: true, - }).then((response: any) => { - if (response.errors) { - const firstError = response.items.filter( - (item: any) => item.index.status >= 400 - )[0].index.error; - throw new Error(`Failed to upload documents: ${firstError.reason} `); - } - }); + return client + .bulk({ + body, + refresh: 'wait_for', + }) + .then((response: any) => { + if (response.errors) { + const firstError = response.items.filter( + (item: any) => item.index.status >= 400 + )[0].index.error; + throw new Error( + `Failed to upload documents: ${firstError.reason} ` + ); + } + }); }); }, Promise.resolve()); } diff --git a/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.test.ts b/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.test.ts deleted file mode 100644 index aefd074c373f..000000000000 --- a/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.test.ts +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { getApmMlJobCategory } from './get_service_anomalies'; -import { Job as AnomalyDetectionJob } from '../../../../ml/server'; - -describe('getApmMlJobCategory', () => { - it('should match service names with different casings', () => { - const mlJob = { - job_id: 'testservice-request-high_mean_response_time', - groups: ['apm', 'testservice', 'request'], - } as AnomalyDetectionJob; - const serviceNames = ['testService']; - const apmMlJobCategory = getApmMlJobCategory(mlJob, serviceNames); - - expect(apmMlJobCategory).toEqual({ - jobId: 'testservice-request-high_mean_response_time', - serviceName: 'testService', - transactionType: 'request', - }); - }); - - it('should match service names with spaces', () => { - const mlJob = { - job_id: 'test_service-request-high_mean_response_time', - groups: ['apm', 'test_service', 'request'], - } as AnomalyDetectionJob; - const serviceNames = ['Test Service']; - const apmMlJobCategory = getApmMlJobCategory(mlJob, serviceNames); - - expect(apmMlJobCategory).toEqual({ - jobId: 'test_service-request-high_mean_response_time', - serviceName: 'Test Service', - transactionType: 'request', - }); - }); -}); diff --git a/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.ts b/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.ts deleted file mode 100644 index 900141e9040a..000000000000 --- a/x-pack/plugins/apm/server/lib/service_map/get_service_anomalies.ts +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -import { intersection } from 'lodash'; -import { leftJoin } from '../../../common/utils/left_join'; -import { Job as AnomalyDetectionJob } from '../../../../ml/server'; -import { PromiseReturnType } from '../../../typings/common'; -import { IEnvOptions } from './get_service_map'; -import { Setup } from '../helpers/setup_request'; -import { - APM_ML_JOB_GROUP_NAME, - encodeForMlApi, -} from '../../../common/ml_job_constants'; - -async function getApmAnomalyDetectionJobs( - setup: Setup -): Promise { - const { ml } = setup; - - if (!ml) { - return []; - } - try { - const { jobs } = await ml.anomalyDetectors.jobs(APM_ML_JOB_GROUP_NAME); - return jobs; - } catch (error) { - if (error.statusCode === 404) { - return []; - } - throw error; - } -} - -type ApmMlJobCategory = NonNullable>; - -export const getApmMlJobCategory = ( - mlJob: AnomalyDetectionJob, - serviceNames: string[] -) => { - const serviceByGroupNameMap = new Map( - serviceNames.map((serviceName) => [ - encodeForMlApi(serviceName), - serviceName, - ]) - ); - if (!mlJob.groups.includes(APM_ML_JOB_GROUP_NAME)) { - // ML job missing "apm" group name - return; - } - const apmJobGroups = mlJob.groups.filter( - (groupName) => groupName !== APM_ML_JOB_GROUP_NAME - ); - const apmJobServiceNames = apmJobGroups.map( - (groupName) => serviceByGroupNameMap.get(groupName) || groupName - ); - const [serviceName] = intersection(apmJobServiceNames, serviceNames); - if (!serviceName) { - // APM ML job service was not found - return; - } - const serviceGroupName = encodeForMlApi(serviceName); - const [transactionType] = apmJobGroups.filter( - (groupName) => groupName !== serviceGroupName - ); - if (!transactionType) { - // APM ML job transaction type was not found. - return; - } - return { jobId: mlJob.job_id, serviceName, transactionType }; -}; - -export type ServiceAnomalies = PromiseReturnType; - -export async function getServiceAnomalies( - options: IEnvOptions, - serviceNames: string[] -) { - const { start, end, ml } = options.setup; - - if (!ml || serviceNames.length === 0) { - return []; - } - - const apmMlJobs = await getApmAnomalyDetectionJobs(options.setup); - if (apmMlJobs.length === 0) { - return []; - } - const apmMlJobCategories = apmMlJobs - .map((job) => getApmMlJobCategory(job, serviceNames)) - .filter( - (apmJobCategory) => apmJobCategory !== undefined - ) as ApmMlJobCategory[]; - const apmJobIds = apmMlJobs.map((job) => job.job_id); - const params = { - body: { - size: 0, - query: { - bool: { - filter: [ - { term: { result_type: 'record' } }, - { - terms: { - job_id: apmJobIds, - }, - }, - { - range: { - timestamp: { gte: start, lte: end, format: 'epoch_millis' }, - }, - }, - ], - }, - }, - aggs: { - jobs: { - terms: { field: 'job_id', size: apmJobIds.length }, - aggs: { - top_score_hits: { - top_hits: { - sort: [{ record_score: { order: 'desc' as const } }], - _source: ['record_score', 'timestamp', 'typical', 'actual'], - size: 1, - }, - }, - }, - }, - }, - }, - }; - - const response = (await ml.mlSystem.mlAnomalySearch(params)) as { - aggregations: { - jobs: { - buckets: Array<{ - key: string; - top_score_hits: { - hits: { - hits: Array<{ - _source: { - record_score: number; - timestamp: number; - typical: number[]; - actual: number[]; - }; - }>; - }; - }; - }>; - }; - }; - }; - const anomalyScores = response.aggregations.jobs.buckets.map((jobBucket) => { - const jobId = jobBucket.key; - const bucketSource = jobBucket.top_score_hits.hits.hits?.[0]?._source; - return { - jobId, - anomalyScore: bucketSource.record_score, - timestamp: bucketSource.timestamp, - typical: bucketSource.typical[0], - actual: bucketSource.actual[0], - }; - }); - return leftJoin(apmMlJobCategories, 'jobId', anomalyScores); -} diff --git a/x-pack/plugins/apm/server/lib/service_map/get_service_map.ts b/x-pack/plugins/apm/server/lib/service_map/get_service_map.ts index 9f3ded82d7cb..4d488cd1a550 100644 --- a/x-pack/plugins/apm/server/lib/service_map/get_service_map.ts +++ b/x-pack/plugins/apm/server/lib/service_map/get_service_map.ts @@ -13,14 +13,9 @@ import { getServicesProjection } from '../../../common/projections/services'; import { mergeProjection } from '../../../common/projections/util/merge_projection'; import { PromiseReturnType } from '../../../typings/common'; import { Setup, SetupTimeRange } from '../helpers/setup_request'; -import { - transformServiceMapResponses, - getAllNodes, - getServiceNodes, -} from './transform_service_map_responses'; +import { transformServiceMapResponses } from './transform_service_map_responses'; import { getServiceMapFromTraceIds } from './get_service_map_from_trace_ids'; import { getTraceSampleIds } from './get_trace_sample_ids'; -import { getServiceAnomalies, ServiceAnomalies } from './get_service_anomalies'; export interface IEnvOptions { setup: Setup & SetupTimeRange; @@ -132,7 +127,6 @@ async function getServicesData(options: IEnvOptions) { ); } -export { ServiceAnomalies }; export type ConnectionsResponse = PromiseReturnType; export type ServicesResponse = PromiseReturnType; export type ServiceMapAPIResponse = PromiseReturnType; @@ -143,19 +137,8 @@ export async function getServiceMap(options: IEnvOptions) { getServicesData(options), ]); - // Derive all related service names from connection and service data - const allNodes = getAllNodes(servicesData, connectionData.connections); - const serviceNodes = getServiceNodes(allNodes); - const serviceNames = serviceNodes.map( - (serviceData) => serviceData[SERVICE_NAME] - ); - - // Get related service anomalies - const serviceAnomalies = await getServiceAnomalies(options, serviceNames); - return transformServiceMapResponses({ ...connectionData, - anomalies: serviceAnomalies, services: servicesData, }); } diff --git a/x-pack/plugins/apm/server/lib/service_map/ml_helpers.test.ts b/x-pack/plugins/apm/server/lib/service_map/ml_helpers.test.ts deleted file mode 100644 index f07b575cc0a3..000000000000 --- a/x-pack/plugins/apm/server/lib/service_map/ml_helpers.test.ts +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ServiceAnomalies } from './get_service_map'; -import { addAnomaliesDataToNodes } from './ml_helpers'; - -describe('addAnomaliesDataToNodes', () => { - it('adds anomalies to nodes', () => { - const nodes = [ - { - 'service.name': 'opbeans-ruby', - 'agent.name': 'ruby', - 'service.environment': null, - }, - { - 'service.name': 'opbeans-java', - 'agent.name': 'java', - 'service.environment': null, - }, - ]; - - const serviceAnomalies: ServiceAnomalies = [ - { - jobId: 'opbeans-ruby-request-high_mean_response_time', - serviceName: 'opbeans-ruby', - transactionType: 'request', - anomalyScore: 50, - timestamp: 1591351200000, - actual: 2000, - typical: 1000, - }, - { - jobId: 'opbeans-java-request-high_mean_response_time', - serviceName: 'opbeans-java', - transactionType: 'request', - anomalyScore: 100, - timestamp: 1591351200000, - actual: 9000, - typical: 3000, - }, - ]; - - const result = [ - { - 'service.name': 'opbeans-ruby', - 'agent.name': 'ruby', - 'service.environment': null, - anomaly_score: 50, - anomaly_severity: 'major', - actual_value: 2000, - typical_value: 1000, - ml_job_id: 'opbeans-ruby-request-high_mean_response_time', - }, - { - 'service.name': 'opbeans-java', - 'agent.name': 'java', - 'service.environment': null, - anomaly_score: 100, - anomaly_severity: 'critical', - actual_value: 9000, - typical_value: 3000, - ml_job_id: 'opbeans-java-request-high_mean_response_time', - }, - ]; - - expect( - addAnomaliesDataToNodes( - nodes, - (serviceAnomalies as unknown) as ServiceAnomalies - ) - ).toEqual(result); - }); -}); diff --git a/x-pack/plugins/apm/server/lib/service_map/ml_helpers.ts b/x-pack/plugins/apm/server/lib/service_map/ml_helpers.ts deleted file mode 100644 index 8162417616b6..000000000000 --- a/x-pack/plugins/apm/server/lib/service_map/ml_helpers.ts +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { SERVICE_NAME } from '../../../common/elasticsearch_fieldnames'; -import { getSeverity } from '../../../common/ml_job_constants'; -import { ConnectionNode, ServiceNode } from '../../../common/service_map'; -import { ServiceAnomalies } from './get_service_map'; - -export function addAnomaliesDataToNodes( - nodes: ConnectionNode[], - serviceAnomalies: ServiceAnomalies -) { - const anomaliesMap = serviceAnomalies.reduce( - (acc, anomalyJob) => { - const serviceAnomaly: typeof acc[string] | undefined = - acc[anomalyJob.serviceName]; - const hasAnomalyJob = serviceAnomaly !== undefined; - const hasAnomalyScore = serviceAnomaly?.anomaly_score !== undefined; - const hasNewAnomalyScore = anomalyJob.anomalyScore !== undefined; - const hasNewMaxAnomalyScore = - hasNewAnomalyScore && - (!hasAnomalyScore || - (anomalyJob?.anomalyScore ?? 0) > - (serviceAnomaly?.anomaly_score ?? 0)); - - if (!hasAnomalyJob || hasNewMaxAnomalyScore) { - acc[anomalyJob.serviceName] = { - anomaly_score: anomalyJob.anomalyScore, - actual_value: anomalyJob.actual, - typical_value: anomalyJob.typical, - ml_job_id: anomalyJob.jobId, - }; - } - - return acc; - }, - {} as { - [serviceName: string]: { - anomaly_score?: number; - actual_value?: number; - typical_value?: number; - ml_job_id: string; - }; - } - ); - - const servicesDataWithAnomalies: ServiceNode[] = nodes.map((service) => { - const serviceAnomaly = anomaliesMap[service[SERVICE_NAME]]; - if (serviceAnomaly) { - const anomalyScore = serviceAnomaly.anomaly_score; - return { - ...service, - anomaly_score: anomalyScore, - anomaly_severity: getSeverity(anomalyScore), - actual_value: serviceAnomaly.actual_value, - typical_value: serviceAnomaly.typical_value, - ml_job_id: serviceAnomaly.ml_job_id, - }; - } - return service; - }); - - return servicesDataWithAnomalies; -} diff --git a/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.test.ts b/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.test.ts index 6c9880c2dc4d..1e26634bdf0f 100644 --- a/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.test.ts +++ b/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.test.ts @@ -12,7 +12,6 @@ import { SPAN_SUBTYPE, SPAN_TYPE, } from '../../../common/elasticsearch_fieldnames'; -import { ServiceAnomalies } from './get_service_map'; import { transformServiceMapResponses, ServiceMapResponse, @@ -36,12 +35,9 @@ const javaService = { [AGENT_NAME]: 'java', }; -const serviceAnomalies: ServiceAnomalies = []; - describe('transformServiceMapResponses', () => { it('maps external destinations to internal services', () => { const response: ServiceMapResponse = { - anomalies: serviceAnomalies, services: [nodejsService, javaService], discoveredServices: [ { @@ -73,7 +69,6 @@ describe('transformServiceMapResponses', () => { it('collapses external destinations based on span.destination.resource.name', () => { const response: ServiceMapResponse = { - anomalies: serviceAnomalies, services: [nodejsService, javaService], discoveredServices: [ { @@ -109,7 +104,6 @@ describe('transformServiceMapResponses', () => { it('picks the first span.type/subtype in an alphabetically sorted list', () => { const response: ServiceMapResponse = { - anomalies: serviceAnomalies, services: [javaService], discoveredServices: [], connections: [ @@ -148,7 +142,6 @@ describe('transformServiceMapResponses', () => { it('processes connections without a matching "service" aggregation', () => { const response: ServiceMapResponse = { - anomalies: serviceAnomalies, services: [javaService], discoveredServices: [], connections: [ diff --git a/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.ts b/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.ts index 53abf54cbcf3..835c00b8df23 100644 --- a/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.ts +++ b/x-pack/plugins/apm/server/lib/service_map/transform_service_map_responses.ts @@ -17,12 +17,7 @@ import { ServiceConnectionNode, ExternalConnectionNode, } from '../../../common/service_map'; -import { - ConnectionsResponse, - ServicesResponse, - ServiceAnomalies, -} from './get_service_map'; -import { addAnomaliesDataToNodes } from './ml_helpers'; +import { ConnectionsResponse, ServicesResponse } from './get_service_map'; function getConnectionNodeId(node: ConnectionNode): string { if ('span.destination.service.resource' in node) { @@ -67,12 +62,11 @@ export function getServiceNodes(allNodes: ConnectionNode[]) { } export type ServiceMapResponse = ConnectionsResponse & { - anomalies: ServiceAnomalies; services: ServicesResponse; }; export function transformServiceMapResponses(response: ServiceMapResponse) { - const { anomalies, discoveredServices, services, connections } = response; + const { discoveredServices, services, connections } = response; const allNodes = getAllNodes(services, connections); const serviceNodes = getServiceNodes(allNodes); @@ -214,18 +208,10 @@ export function transformServiceMapResponses(response: ServiceMapResponse) { return prev.concat(connection); }, []); - // Add anomlies data - const dedupedNodesWithAnomliesData = addAnomaliesDataToNodes( - dedupedNodes, - anomalies - ); - // Put everything together in elements, with everything in the "data" property - const elements = [...dedupedConnections, ...dedupedNodesWithAnomliesData].map( - (element) => ({ - data: element, - }) - ); + const elements = [...dedupedConnections, ...dedupedNodes].map((element) => ({ + data: element, + })); return { elements }; } diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/fetcher.test.ts.snap b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/fetcher.test.ts.snap deleted file mode 100644 index cf3fdac221b5..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/fetcher.test.ts.snap +++ /dev/null @@ -1,68 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`anomalyAggsFetcher when ES returns valid response should call client with correct query 1`] = ` -Array [ - Array [ - Object { - "body": Object { - "aggs": Object { - "ml_avg_response_times": Object { - "aggs": Object { - "anomaly_score": Object { - "max": Object { - "field": "anomaly_score", - }, - }, - "lower": Object { - "min": Object { - "field": "model_lower", - }, - }, - "upper": Object { - "max": Object { - "field": "model_upper", - }, - }, - }, - "date_histogram": Object { - "extended_bounds": Object { - "max": 200000, - "min": 90000, - }, - "field": "timestamp", - "fixed_interval": "myInterval", - "min_doc_count": 0, - }, - }, - }, - "query": Object { - "bool": Object { - "filter": Array [ - Object { - "term": Object { - "job_id": "myservicename-mytransactiontype-high_mean_response_time", - }, - }, - Object { - "exists": Object { - "field": "bucket_span", - }, - }, - Object { - "range": Object { - "timestamp": Object { - "format": "epoch_millis", - "gte": 90000, - "lte": 200000, - }, - }, - }, - ], - }, - }, - "size": 0, - }, - }, - ], -] -`; diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/index.test.ts.snap b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/index.test.ts.snap deleted file mode 100644 index 971fa3b92cc8..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/index.test.ts.snap +++ /dev/null @@ -1,38 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`getAnomalySeries should match snapshot 1`] = ` -Object { - "anomalyBoundaries": Array [ - Object { - "x": 5000, - "y": 200, - "y0": 20, - }, - Object { - "x": 15000, - "y": 100, - "y0": 20, - }, - Object { - "x": 25000, - "y": 50, - "y0": 10, - }, - Object { - "x": 30000, - "y": 50, - "y0": 10, - }, - ], - "anomalyScore": Array [ - Object { - "x": 25000, - "x0": 15000, - }, - Object { - "x": 35000, - "x0": 25000, - }, - ], -} -`; diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/transform.test.ts.snap b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/transform.test.ts.snap deleted file mode 100644 index 8cf471cb34ed..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/__snapshots__/transform.test.ts.snap +++ /dev/null @@ -1,33 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`anomalySeriesTransform should match snapshot 1`] = ` -Object { - "anomalyBoundaries": Array [ - Object { - "x": 10000, - "y": 200, - "y0": 20, - }, - Object { - "x": 15000, - "y": 100, - "y0": 20, - }, - Object { - "x": 25000, - "y": 50, - "y0": 10, - }, - ], - "anomalyScore": Array [ - Object { - "x": 25000, - "x0": 15000, - }, - Object { - "x": 25000, - "x0": 25000, - }, - ], -} -`; diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.test.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.test.ts deleted file mode 100644 index 313cf818a322..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.test.ts +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { anomalySeriesFetcher, ESResponse } from './fetcher'; - -describe('anomalyAggsFetcher', () => { - describe('when ES returns valid response', () => { - let response: ESResponse | undefined; - let clientSpy: jest.Mock; - - beforeEach(async () => { - clientSpy = jest.fn().mockReturnValue('ES Response'); - response = await anomalySeriesFetcher({ - serviceName: 'myServiceName', - transactionType: 'myTransactionType', - intervalString: 'myInterval', - mlBucketSize: 10, - setup: { - ml: { - mlSystem: { - mlAnomalySearch: clientSpy, - }, - } as any, - start: 100000, - end: 200000, - } as any, - }); - }); - - it('should call client with correct query', () => { - expect(clientSpy.mock.calls).toMatchSnapshot(); - }); - - it('should return correct response', () => { - expect(response).toBe('ES Response'); - }); - }); - - it('should swallow HTTP errors', () => { - const httpError = new Error('anomaly lookup failed') as any; - httpError.statusCode = 418; - const failedRequestSpy = jest.fn(() => Promise.reject(httpError)); - - return expect( - anomalySeriesFetcher({ - setup: { - ml: { - mlSystem: { - mlAnomalySearch: failedRequestSpy, - }, - } as any, - }, - } as any) - ).resolves.toEqual(undefined); - }); - - it('should throw other errors', () => { - const otherError = new Error('anomaly lookup ASPLODED') as any; - const failedRequestSpy = jest.fn(() => Promise.reject(otherError)); - - return expect( - anomalySeriesFetcher({ - setup: { - ml: { - mlSystem: { - mlAnomalySearch: failedRequestSpy, - }, - } as any, - }, - } as any) - ).rejects.toThrow(otherError); - }); -}); diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts deleted file mode 100644 index 8ee078de7f3c..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { getMlJobId } from '../../../../../common/ml_job_constants'; -import { PromiseReturnType } from '../../../../../../observability/typings/common'; -import { Setup, SetupTimeRange } from '../../../helpers/setup_request'; - -export type ESResponse = Exclude< - PromiseReturnType, - undefined ->; - -export async function anomalySeriesFetcher({ - serviceName, - transactionType, - intervalString, - mlBucketSize, - setup, -}: { - serviceName: string; - transactionType: string; - intervalString: string; - mlBucketSize: number; - setup: Setup & SetupTimeRange; -}) { - const { ml, start, end } = setup; - if (!ml) { - return; - } - - // move the start back with one bucket size, to ensure to get anomaly data in the beginning - // this is required because ML has a minimum bucket size (default is 900s) so if our buckets are smaller, we might have several null buckets in the beginning - const newStart = start - mlBucketSize * 1000; - const jobId = getMlJobId(serviceName, transactionType); - - const params = { - body: { - size: 0, - query: { - bool: { - filter: [ - { term: { job_id: jobId } }, - { exists: { field: 'bucket_span' } }, - { - range: { - timestamp: { - gte: newStart, - lte: end, - format: 'epoch_millis', - }, - }, - }, - ], - }, - }, - aggs: { - ml_avg_response_times: { - date_histogram: { - field: 'timestamp', - fixed_interval: intervalString, - min_doc_count: 0, - extended_bounds: { - min: newStart, - max: end, - }, - }, - aggs: { - anomaly_score: { max: { field: 'anomaly_score' } }, - lower: { min: { field: 'model_lower' } }, - upper: { max: { field: 'model_upper' } }, - }, - }, - }, - }, - }; - - try { - const response = await ml.mlSystem.mlAnomalySearch(params); - return response; - } catch (err) { - const isHttpError = 'statusCode' in err; - if (isHttpError) { - return; - } - throw err; - } -} diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts deleted file mode 100644 index d649bfb19273..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { getMlJobId } from '../../../../../common/ml_job_constants'; -import { Setup, SetupTimeRange } from '../../../helpers/setup_request'; - -interface IOptions { - serviceName: string; - transactionType: string; - setup: Setup & SetupTimeRange; -} - -interface ESResponse { - bucket_span: number; -} - -export async function getMlBucketSize({ - serviceName, - transactionType, - setup, -}: IOptions): Promise { - const { ml, start, end } = setup; - if (!ml) { - return 0; - } - const jobId = getMlJobId(serviceName, transactionType); - - const params = { - body: { - _source: 'bucket_span', - size: 1, - query: { - bool: { - filter: [ - { term: { job_id: jobId } }, - { exists: { field: 'bucket_span' } }, - { - range: { - timestamp: { - gte: start, - lte: end, - format: 'epoch_millis', - }, - }, - }, - ], - }, - }, - }, - }; - - try { - const resp = await ml.mlSystem.mlAnomalySearch(params); - return resp.hits.hits[0]?._source.bucket_span || 0; - } catch (err) { - const isHttpError = 'statusCode' in err; - if (isHttpError) { - return 0; - } - throw err; - } -} diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.test.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.test.ts deleted file mode 100644 index fb87f1b5707d..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.test.ts +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { getAnomalySeries } from '.'; -import { mlAnomalyResponse } from './mock_responses/ml_anomaly_response'; -import { mlBucketSpanResponse } from './mock_responses/ml_bucket_span_response'; -import { PromiseReturnType } from '../../../../../../observability/typings/common'; -import { APMConfig } from '../../../..'; - -describe('getAnomalySeries', () => { - let avgAnomalies: PromiseReturnType; - beforeEach(async () => { - const clientSpy = jest - .fn() - .mockResolvedValueOnce(mlBucketSpanResponse) - .mockResolvedValueOnce(mlAnomalyResponse); - - avgAnomalies = await getAnomalySeries({ - serviceName: 'myServiceName', - transactionType: 'myTransactionType', - transactionName: undefined, - timeSeriesDates: [100, 100000], - setup: { - start: 0, - end: 500000, - client: { search: () => {} } as any, - internalClient: { search: () => {} } as any, - config: new Proxy( - {}, - { - get: () => 'myIndex', - } - ) as APMConfig, - uiFiltersES: [], - indices: { - 'apm_oss.sourcemapIndices': 'myIndex', - 'apm_oss.errorIndices': 'myIndex', - 'apm_oss.onboardingIndices': 'myIndex', - 'apm_oss.spanIndices': 'myIndex', - 'apm_oss.transactionIndices': 'myIndex', - 'apm_oss.metricsIndices': 'myIndex', - apmAgentConfigurationIndex: 'myIndex', - apmCustomLinkIndex: 'myIndex', - }, - dynamicIndexPattern: null as any, - ml: { - mlSystem: { - mlAnomalySearch: clientSpy, - mlCapabilities: async () => ({ isPlatinumOrTrialLicense: true }), - }, - } as any, - }, - }); - }); - - it('should remove buckets lower than threshold and outside date range from anomalyScore', () => { - expect(avgAnomalies!.anomalyScore).toEqual([ - { x0: 15000, x: 25000 }, - { x0: 25000, x: 35000 }, - ]); - }); - - it('should remove buckets outside date range from anomalyBoundaries', () => { - expect( - avgAnomalies!.anomalyBoundaries!.filter( - (bucket) => bucket.x < 100 || bucket.x > 100000 - ).length - ).toBe(0); - }); - - it('should remove buckets with null from anomalyBoundaries', () => { - expect( - avgAnomalies!.anomalyBoundaries!.filter((p) => p.y === null).length - ).toBe(0); - }); - - it('should match snapshot', async () => { - expect(avgAnomalies).toMatchSnapshot(); - }); -}); diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts index 6f44cfa1df9f..b2d11f2ffe19 100644 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts +++ b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts @@ -4,15 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { getBucketSize } from '../../../helpers/get_bucket_size'; import { Setup, SetupTimeRange, SetupUIFilters, } from '../../../helpers/setup_request'; -import { anomalySeriesFetcher } from './fetcher'; -import { getMlBucketSize } from './get_ml_bucket_size'; -import { anomalySeriesTransform } from './transform'; +import { Coordinate, RectCoordinate } from '../../../../../typings/timeseries'; + +interface AnomalyTimeseries { + anomalyBoundaries: Coordinate[]; + anomalyScore: RectCoordinate[]; +} export async function getAnomalySeries({ serviceName, @@ -26,7 +28,7 @@ export async function getAnomalySeries({ transactionName: string | undefined; timeSeriesDates: number[]; setup: Setup & SetupTimeRange & SetupUIFilters; -}) { +}): Promise { // don't fetch anomalies for transaction details page if (transactionName) { return; @@ -53,29 +55,6 @@ export async function getAnomalySeries({ return; } - const mlBucketSize = await getMlBucketSize({ - serviceName, - transactionType, - setup, - }); - - const { start, end } = setup; - const { intervalString, bucketSize } = getBucketSize(start, end, 'auto'); - - const esResponse = await anomalySeriesFetcher({ - serviceName, - transactionType, - intervalString, - mlBucketSize, - setup, - }); - - return esResponse - ? anomalySeriesTransform( - esResponse, - mlBucketSize, - bucketSize, - timeSeriesDates - ) - : undefined; + // TODO [APM ML] return a series of anomaly scores, upper & lower bounds for the given timeSeriesDates + return; } diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_anomaly_response.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_anomaly_response.ts deleted file mode 100644 index 523161ec1027..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_anomaly_response.ts +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ESResponse } from '../fetcher'; - -export const mlAnomalyResponse: ESResponse = ({ - took: 3, - timed_out: false, - _shards: { - total: 5, - successful: 5, - skipped: 0, - failed: 0, - }, - hits: { - total: 10, - max_score: 0, - hits: [], - }, - aggregations: { - ml_avg_response_times: { - buckets: [ - { - key_as_string: '2018-07-02T09:16:40.000Z', - key: 0, - doc_count: 0, - anomaly_score: { - value: null, - }, - upper: { - value: 200, - }, - lower: { - value: 20, - }, - }, - { - key_as_string: '2018-07-02T09:25:00.000Z', - key: 5000, - doc_count: 4, - anomaly_score: { - value: null, - }, - upper: { - value: null, - }, - lower: { - value: null, - }, - }, - { - key_as_string: '2018-07-02T09:33:20.000Z', - key: 10000, - doc_count: 0, - anomaly_score: { - value: null, - }, - upper: { - value: null, - }, - lower: { - value: null, - }, - }, - { - key_as_string: '2018-07-02T09:41:40.000Z', - key: 15000, - doc_count: 2, - anomaly_score: { - value: 90, - }, - upper: { - value: 100, - }, - lower: { - value: 20, - }, - }, - { - key_as_string: '2018-07-02T09:50:00.000Z', - key: 20000, - doc_count: 0, - anomaly_score: { - value: null, - }, - upper: { - value: null, - }, - lower: { - value: null, - }, - }, - { - key_as_string: '2018-07-02T09:58:20.000Z', - key: 25000, - doc_count: 2, - anomaly_score: { - value: 100, - }, - upper: { - value: 50, - }, - lower: { - value: 10, - }, - }, - { - key_as_string: '2018-07-02T10:15:00.000Z', - key: 30000, - doc_count: 2, - anomaly_score: { - value: 0, - }, - upper: { - value: null, - }, - lower: { - value: null, - }, - }, - ], - }, - }, -} as unknown) as ESResponse; diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_bucket_span_response.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_bucket_span_response.ts deleted file mode 100644 index 3689529a07c4..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/mock_responses/ml_bucket_span_response.ts +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -export const mlBucketSpanResponse = { - took: 1, - timed_out: false, - _shards: { - total: 1, - successful: 1, - skipped: 0, - failed: 0, - }, - hits: { - total: 192, - max_score: 1.0, - hits: [ - { - _index: '.ml-anomalies-shared', - _id: - 'opbeans-go-request-high_mean_response_time_model_plot_1542636000000_900_0_29791_0', - _score: 1.0, - _source: { - bucket_span: 10, - }, - }, - ], - }, -}; diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.test.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.test.ts deleted file mode 100644 index eb94c83e9257..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.test.ts +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ESResponse } from './fetcher'; -import { mlAnomalyResponse } from './mock_responses/ml_anomaly_response'; -import { anomalySeriesTransform, replaceFirstAndLastBucket } from './transform'; - -describe('anomalySeriesTransform', () => { - it('should match snapshot', () => { - const getMlBucketSize = 10; - const bucketSize = 5; - const timeSeriesDates = [10000, 25000]; - const anomalySeries = anomalySeriesTransform( - mlAnomalyResponse, - getMlBucketSize, - bucketSize, - timeSeriesDates - ); - expect(anomalySeries).toMatchSnapshot(); - }); - - describe('anomalyScoreSeries', () => { - it('should only returns bucket within range and above threshold', () => { - const esResponse = getESResponse([ - { - key: 0, - anomaly_score: { value: 90 }, - }, - { - key: 5000, - anomaly_score: { value: 0 }, - }, - { - key: 10000, - anomaly_score: { value: 90 }, - }, - { - key: 15000, - anomaly_score: { value: 0 }, - }, - { - key: 20000, - anomaly_score: { value: 90 }, - }, - ]); - - const getMlBucketSize = 5; - const bucketSize = 5; - const timeSeriesDates = [5000, 15000]; - const anomalySeries = anomalySeriesTransform( - esResponse, - getMlBucketSize, - bucketSize, - timeSeriesDates - ); - - const buckets = anomalySeries!.anomalyScore; - expect(buckets).toEqual([{ x0: 10000, x: 15000 }]); - }); - - it('should decrease the x-value to avoid going beyond last date', () => { - const esResponse = getESResponse([ - { - key: 0, - anomaly_score: { value: 0 }, - }, - { - key: 5000, - anomaly_score: { value: 90 }, - }, - ]); - - const getMlBucketSize = 10; - const bucketSize = 5; - const timeSeriesDates = [0, 10000]; - const anomalySeries = anomalySeriesTransform( - esResponse, - getMlBucketSize, - bucketSize, - timeSeriesDates - ); - - const buckets = anomalySeries!.anomalyScore; - expect(buckets).toEqual([{ x0: 5000, x: 10000 }]); - }); - }); - - describe('anomalyBoundariesSeries', () => { - it('should trim buckets to time range', () => { - const esResponse = getESResponse([ - { - key: 0, - upper: { value: 15 }, - lower: { value: 10 }, - }, - { - key: 5000, - upper: { value: 25 }, - lower: { value: 20 }, - }, - { - key: 10000, - upper: { value: 35 }, - lower: { value: 30 }, - }, - { - key: 15000, - upper: { value: 45 }, - lower: { value: 40 }, - }, - ]); - - const mlBucketSize = 10; - const bucketSize = 5; - const timeSeriesDates = [5000, 10000]; - const anomalySeries = anomalySeriesTransform( - esResponse, - mlBucketSize, - bucketSize, - timeSeriesDates - ); - - const buckets = anomalySeries!.anomalyBoundaries; - expect(buckets).toEqual([ - { x: 5000, y: 25, y0: 20 }, - { x: 10000, y: 35, y0: 30 }, - ]); - }); - - it('should replace first bucket in range', () => { - const esResponse = getESResponse([ - { - key: 0, - anomaly_score: { value: 0 }, - upper: { value: 15 }, - lower: { value: 10 }, - }, - { - key: 5000, - anomaly_score: { value: 0 }, - upper: { value: null }, - lower: { value: null }, - }, - { - key: 10000, - anomaly_score: { value: 0 }, - upper: { value: 25 }, - lower: { value: 20 }, - }, - ]); - - const getMlBucketSize = 10; - const bucketSize = 5; - const timeSeriesDates = [5000, 10000]; - const anomalySeries = anomalySeriesTransform( - esResponse, - getMlBucketSize, - bucketSize, - timeSeriesDates - ); - - const buckets = anomalySeries!.anomalyBoundaries; - expect(buckets).toEqual([ - { x: 5000, y: 15, y0: 10 }, - { x: 10000, y: 25, y0: 20 }, - ]); - }); - - it('should replace last bucket in range', () => { - const esResponse = getESResponse([ - { - key: 0, - anomaly_score: { value: 0 }, - upper: { value: 15 }, - lower: { value: 10 }, - }, - { - key: 5000, - anomaly_score: { value: 0 }, - upper: { value: null }, - lower: { value: null }, - }, - { - key: 10000, - anomaly_score: { value: 0 }, - upper: { value: null }, - lower: { value: null }, - }, - ]); - - const getMlBucketSize = 10; - const bucketSize = 5; - const timeSeriesDates = [5000, 10000]; - const anomalySeries = anomalySeriesTransform( - esResponse, - getMlBucketSize, - bucketSize, - timeSeriesDates - ); - - const buckets = anomalySeries!.anomalyBoundaries; - expect(buckets).toEqual([ - { x: 5000, y: 15, y0: 10 }, - { x: 10000, y: 15, y0: 10 }, - ]); - }); - }); -}); - -describe('replaceFirstAndLastBucket', () => { - it('should extend the first bucket', () => { - const buckets = [ - { - x: 0, - lower: 10, - upper: 20, - }, - { - x: 5, - lower: null, - upper: null, - }, - { - x: 10, - lower: null, - upper: null, - }, - { - x: 15, - lower: 30, - upper: 40, - }, - ]; - - const timeSeriesDates = [10, 15]; - expect(replaceFirstAndLastBucket(buckets as any, timeSeriesDates)).toEqual([ - { x: 10, lower: 10, upper: 20 }, - { x: 15, lower: 30, upper: 40 }, - ]); - }); - - it('should extend the last bucket', () => { - const buckets = [ - { - x: 10, - lower: 30, - upper: 40, - }, - { - x: 15, - lower: null, - upper: null, - }, - { - x: 20, - lower: null, - upper: null, - }, - ] as any; - - const timeSeriesDates = [10, 15, 20]; - expect(replaceFirstAndLastBucket(buckets, timeSeriesDates)).toEqual([ - { x: 10, lower: 30, upper: 40 }, - { x: 15, lower: null, upper: null }, - { x: 20, lower: 30, upper: 40 }, - ]); - }); -}); - -function getESResponse(buckets: any): ESResponse { - return ({ - took: 3, - timed_out: false, - _shards: { - total: 5, - successful: 5, - skipped: 0, - failed: 0, - }, - hits: { - total: 10, - max_score: 0, - hits: [], - }, - aggregations: { - ml_avg_response_times: { - buckets: buckets.map((bucket: any) => { - return { - ...bucket, - lower: { value: bucket?.lower?.value || null }, - upper: { value: bucket?.upper?.value || null }, - anomaly_score: { - value: bucket?.anomaly_score?.value || null, - }, - }; - }), - }, - }, - } as unknown) as ESResponse; -} diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts deleted file mode 100644 index 454a6add3e25..000000000000 --- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { first, last } from 'lodash'; -import { Coordinate, RectCoordinate } from '../../../../../typings/timeseries'; -import { ESResponse } from './fetcher'; - -type IBucket = ReturnType; -function getBucket( - bucket: Required< - ESResponse - >['aggregations']['ml_avg_response_times']['buckets'][0] -) { - return { - x: bucket.key, - anomalyScore: bucket.anomaly_score.value, - lower: bucket.lower.value, - upper: bucket.upper.value, - }; -} - -export type AnomalyTimeSeriesResponse = ReturnType< - typeof anomalySeriesTransform ->; -export function anomalySeriesTransform( - response: ESResponse, - mlBucketSize: number, - bucketSize: number, - timeSeriesDates: number[] -) { - const buckets = - response.aggregations?.ml_avg_response_times.buckets.map(getBucket) || []; - - const bucketSizeInMillis = Math.max(bucketSize, mlBucketSize) * 1000; - - return { - anomalyScore: getAnomalyScoreDataPoints( - buckets, - timeSeriesDates, - bucketSizeInMillis - ), - anomalyBoundaries: getAnomalyBoundaryDataPoints(buckets, timeSeriesDates), - }; -} - -export function getAnomalyScoreDataPoints( - buckets: IBucket[], - timeSeriesDates: number[], - bucketSizeInMillis: number -): RectCoordinate[] { - const ANOMALY_THRESHOLD = 75; - const firstDate = first(timeSeriesDates); - const lastDate = last(timeSeriesDates); - - return buckets - .filter( - (bucket) => - bucket.anomalyScore !== null && bucket.anomalyScore > ANOMALY_THRESHOLD - ) - .filter(isInDateRange(firstDate, lastDate)) - .map((bucket) => { - return { - x0: bucket.x, - x: Math.min(bucket.x + bucketSizeInMillis, lastDate), // don't go beyond last date - }; - }); -} - -export function getAnomalyBoundaryDataPoints( - buckets: IBucket[], - timeSeriesDates: number[] -): Coordinate[] { - return replaceFirstAndLastBucket(buckets, timeSeriesDates) - .filter((bucket) => bucket.lower !== null) - .map((bucket) => { - return { - x: bucket.x, - y0: bucket.lower, - y: bucket.upper, - }; - }); -} - -export function replaceFirstAndLastBucket( - buckets: IBucket[], - timeSeriesDates: number[] -) { - const firstDate = first(timeSeriesDates); - const lastDate = last(timeSeriesDates); - - const preBucketWithValue = buckets - .filter((p) => p.x <= firstDate) - .reverse() - .find((p) => p.lower !== null); - - const bucketsInRange = buckets.filter(isInDateRange(firstDate, lastDate)); - - // replace first bucket if it is null - const firstBucket = first(bucketsInRange); - if (preBucketWithValue && firstBucket && firstBucket.lower === null) { - firstBucket.lower = preBucketWithValue.lower; - firstBucket.upper = preBucketWithValue.upper; - } - - const lastBucketWithValue = [...buckets] - .reverse() - .find((p) => p.lower !== null); - - // replace last bucket if it is null - const lastBucket = last(bucketsInRange); - if (lastBucketWithValue && lastBucket && lastBucket.lower === null) { - lastBucket.lower = lastBucketWithValue.lower; - lastBucket.upper = lastBucketWithValue.upper; - } - - return bucketsInRange; -} - -// anomaly time series contain one or more buckets extra in the beginning -// these extra buckets should be removed -function isInDateRange(firstDate: number, lastDate: number) { - return (p: IBucket) => p.x >= firstDate && p.x <= lastDate; -} diff --git a/x-pack/plugins/canvas/public/components/element_content/element_content.js b/x-pack/plugins/canvas/public/components/element_content/element_content.js index 114a457d167e..e2c1a61c348d 100644 --- a/x-pack/plugins/canvas/public/components/element_content/element_content.js +++ b/x-pack/plugins/canvas/public/components/element_content/element_content.js @@ -12,6 +12,7 @@ import { getType } from '@kbn/interpreter/common'; import { Loading } from '../loading'; import { RenderWithFn } from '../render_with_fn'; import { ElementShareContainer } from '../element_share_container'; +import { assignHandlers } from '../../lib/create_handlers'; import { InvalidExpression } from './invalid_expression'; import { InvalidElementType } from './invalid_element_type'; @@ -46,7 +47,7 @@ const branches = [ export const ElementContent = compose( pure, ...branches -)(({ renderable, renderFunction, size, handlers }) => { +)(({ renderable, renderFunction, width, height, handlers }) => { const { getFilter, setFilter, @@ -62,7 +63,7 @@ export const ElementContent = compose(
diff --git a/x-pack/plugins/canvas/public/components/element_wrapper/element_wrapper.js b/x-pack/plugins/canvas/public/components/element_wrapper/element_wrapper.js index 845fc5927d83..de7748413b71 100644 --- a/x-pack/plugins/canvas/public/components/element_wrapper/element_wrapper.js +++ b/x-pack/plugins/canvas/public/components/element_wrapper/element_wrapper.js @@ -14,7 +14,13 @@ export const ElementWrapper = (props) => { return ( - + ); }; diff --git a/x-pack/plugins/canvas/public/components/element_wrapper/index.js b/x-pack/plugins/canvas/public/components/element_wrapper/index.js index 390c349ab2ee..6fc582bfee44 100644 --- a/x-pack/plugins/canvas/public/components/element_wrapper/index.js +++ b/x-pack/plugins/canvas/public/components/element_wrapper/index.js @@ -10,12 +10,12 @@ import { compose, withPropsOnChange, mapProps } from 'recompose'; import isEqual from 'react-fast-compare'; import { getResolvedArgs, getSelectedPage } from '../../state/selectors/workpad'; import { getState, getValue } from '../../lib/resolved_arg'; +import { createDispatchedHandlerFactory } from '../../lib/create_handlers'; import { ElementWrapper as Component } from './element_wrapper'; -import { createHandlers as createHandlersWithDispatch } from './lib/handlers'; function selectorFactory(dispatch) { let result = {}; - const createHandlers = createHandlersWithDispatch(dispatch); + const createHandlers = createDispatchedHandlerFactory(dispatch); return (nextState, nextOwnProps) => { const { element, ...restOwnProps } = nextOwnProps; diff --git a/x-pack/plugins/canvas/public/components/element_wrapper/lib/handlers.js b/x-pack/plugins/canvas/public/components/element_wrapper/lib/handlers.js deleted file mode 100644 index 33e8eacd902d..000000000000 --- a/x-pack/plugins/canvas/public/components/element_wrapper/lib/handlers.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { isEqual } from 'lodash'; -import { setFilter } from '../../../state/actions/elements'; -import { - updateEmbeddableExpression, - fetchEmbeddableRenderable, -} from '../../../state/actions/embeddable'; - -export const createHandlers = (dispatch) => { - let isComplete = false; - let oldElement; - let completeFn = () => {}; - - return (element) => { - // reset isComplete when element changes - if (!isEqual(oldElement, element)) { - isComplete = false; - oldElement = element; - } - - return { - setFilter(text) { - dispatch(setFilter(text, element.id, true)); - }, - - getFilter() { - return element.filter; - }, - - onComplete(fn) { - completeFn = fn; - }, - - getElementId: () => element.id, - - onEmbeddableInputChange(embeddableExpression) { - dispatch(updateEmbeddableExpression({ elementId: element.id, embeddableExpression })); - }, - - onEmbeddableDestroyed() { - dispatch(fetchEmbeddableRenderable(element.id)); - }, - - done() { - // don't emit if the element is already done - if (isComplete) { - return; - } - - isComplete = true; - completeFn(); - }, - }; - }; -}; diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/lib/handlers.js b/x-pack/plugins/canvas/public/components/positionable/index.ts similarity index 61% rename from x-pack/plugins/canvas/public/components/render_with_fn/lib/handlers.js rename to x-pack/plugins/canvas/public/components/positionable/index.ts index 9e5032efa97e..964e2ee41df7 100644 --- a/x-pack/plugins/canvas/public/components/render_with_fn/lib/handlers.js +++ b/x-pack/plugins/canvas/public/components/positionable/index.ts @@ -4,16 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export class ElementHandlers { - resize() {} - - destroy() {} - - onResize(fn) { - this.resize = fn; - } - - onDestroy(fn) { - this.destroy = fn; - } -} +export { Positionable } from './positionable'; diff --git a/x-pack/plugins/canvas/public/components/positionable/positionable.js b/x-pack/plugins/canvas/public/components/positionable/positionable.js deleted file mode 100644 index 9898f50cbb0f..000000000000 --- a/x-pack/plugins/canvas/public/components/positionable/positionable.js +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React from 'react'; -import PropTypes from 'prop-types'; -import { matrixToCSS } from '../../lib/dom'; - -export const Positionable = ({ children, transformMatrix, width, height }) => { - // Throw if there is more than one child - React.Children.only(children); - // This could probably be made nicer by having just one child - const wrappedChildren = React.Children.map(children, (child) => { - const newStyle = { - width, - height, - marginLeft: -width / 2, - marginTop: -height / 2, - position: 'absolute', - transform: matrixToCSS(transformMatrix.map((n, i) => (i < 12 ? n : Math.round(n)))), - }; - - const stepChild = React.cloneElement(child, { size: { width, height } }); - return ( -
- {stepChild} -
- ); - }); - - return wrappedChildren; -}; - -Positionable.propTypes = { - onChange: PropTypes.func, - children: PropTypes.element.isRequired, - transformMatrix: PropTypes.arrayOf(PropTypes.number).isRequired, - width: PropTypes.number.isRequired, - height: PropTypes.number.isRequired, -}; diff --git a/x-pack/plugins/canvas/public/components/positionable/positionable.tsx b/x-pack/plugins/canvas/public/components/positionable/positionable.tsx new file mode 100644 index 000000000000..3344398b0019 --- /dev/null +++ b/x-pack/plugins/canvas/public/components/positionable/positionable.tsx @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { FC, ReactElement, CSSProperties } from 'react'; +import PropTypes from 'prop-types'; +import { matrixToCSS } from '../../lib/dom'; +import { TransformMatrix3d } from '../../lib/aeroelastic'; + +interface Props { + children: ReactElement; + transformMatrix: TransformMatrix3d; + height: number; + width: number; +} + +export const Positionable: FC = ({ children, transformMatrix, width, height }) => { + // Throw if there is more than one child + const childNode = React.Children.only(children); + + const matrix = (transformMatrix.map((n, i) => + i < 12 ? n : Math.round(n) + ) as any) as TransformMatrix3d; + + const newStyle: CSSProperties = { + width, + height, + marginLeft: -width / 2, + marginTop: -height / 2, + position: 'absolute', + transform: matrixToCSS(matrix), + }; + + return ( +
+ {childNode} +
+ ); +}; + +Positionable.propTypes = { + children: PropTypes.element.isRequired, + transformMatrix: PropTypes.arrayOf(PropTypes.number).isRequired, + width: PropTypes.number.isRequired, + height: PropTypes.number.isRequired, +}; diff --git a/x-pack/plugins/canvas/public/components/render_to_dom/index.js b/x-pack/plugins/canvas/public/components/render_to_dom/index.js deleted file mode 100644 index e8a3f8cd8c93..000000000000 --- a/x-pack/plugins/canvas/public/components/render_to_dom/index.js +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { compose, withState } from 'recompose'; -import { RenderToDom as Component } from './render_to_dom'; - -export const RenderToDom = compose( - withState('domNode', 'setDomNode') // Still don't like this, seems to be the only way todo it. -)(Component); diff --git a/x-pack/plugins/canvas/public/components/render_to_dom/index.ts b/x-pack/plugins/canvas/public/components/render_to_dom/index.ts new file mode 100644 index 000000000000..43a5dad059c9 --- /dev/null +++ b/x-pack/plugins/canvas/public/components/render_to_dom/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { RenderToDom } from './render_to_dom'; diff --git a/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.js b/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.js deleted file mode 100644 index db393a8dde4f..000000000000 --- a/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.js +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React from 'react'; -import PropTypes from 'prop-types'; - -export class RenderToDom extends React.Component { - static propTypes = { - domNode: PropTypes.object, - setDomNode: PropTypes.func.isRequired, - render: PropTypes.func.isRequired, - style: PropTypes.object, - }; - - shouldComponentUpdate(nextProps) { - return this.props.domNode !== nextProps.domNode; - } - - componentDidUpdate() { - // Calls render function once we have the reference to the DOM element to render into - if (this.props.domNode) { - this.props.render(this.props.domNode); - } - } - - render() { - const { domNode, setDomNode, style } = this.props; - const linkRef = (refNode) => { - if (!domNode && refNode) { - // Initialize the domNode property. This should only happen once, even if config changes. - setDomNode(refNode); - } - }; - - return
; - } -} diff --git a/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.tsx b/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.tsx new file mode 100644 index 000000000000..a37c0fc096e5 --- /dev/null +++ b/x-pack/plugins/canvas/public/components/render_to_dom/render_to_dom.tsx @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useCallback, FC } from 'react'; +import CSS from 'csstype'; + +interface Props { + render: (element: HTMLElement) => void; + style?: CSS.Properties; +} + +export const RenderToDom: FC = ({ render, style }) => { + // https://reactjs.org/docs/hooks-faq.html#how-can-i-measure-a-dom-node + const ref = useCallback( + (node: HTMLDivElement) => { + if (node !== null) { + render(node); + } + }, + [render] + ); + + return
; +}; diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/index.js b/x-pack/plugins/canvas/public/components/render_with_fn/index.js deleted file mode 100644 index 37c49624a394..000000000000 --- a/x-pack/plugins/canvas/public/components/render_with_fn/index.js +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { compose, withProps, withPropsOnChange } from 'recompose'; -import PropTypes from 'prop-types'; -import isEqual from 'react-fast-compare'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; -import { RenderWithFn as Component } from './render_with_fn'; -import { ElementHandlers } from './lib/handlers'; - -export const RenderWithFn = compose( - withPropsOnChange( - // rebuild elementHandlers when handlers object changes - (props, nextProps) => !isEqual(props.handlers, nextProps.handlers), - ({ handlers }) => ({ - handlers: Object.assign(new ElementHandlers(), handlers), - }) - ), - withKibana, - withProps((props) => ({ - onError: props.kibana.services.canvas.notify.error, - })) -)(Component); - -RenderWithFn.propTypes = { - handlers: PropTypes.object, -}; diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/index.ts b/x-pack/plugins/canvas/public/components/render_with_fn/index.ts new file mode 100644 index 000000000000..4bfef734d34f --- /dev/null +++ b/x-pack/plugins/canvas/public/components/render_with_fn/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { RenderWithFn } from './render_with_fn'; diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.js b/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.js deleted file mode 100644 index 763cbd5e53eb..000000000000 --- a/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.js +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React from 'react'; -import PropTypes from 'prop-types'; -import { isEqual, cloneDeep } from 'lodash'; -import { RenderToDom } from '../render_to_dom'; -import { ErrorStrings } from '../../../i18n'; - -const { RenderWithFn: strings } = ErrorStrings; - -export class RenderWithFn extends React.Component { - static propTypes = { - name: PropTypes.string.isRequired, - renderFn: PropTypes.func.isRequired, - reuseNode: PropTypes.bool, - handlers: PropTypes.shape({ - // element handlers, see components/element_wrapper/lib/handlers.js - setFilter: PropTypes.func.isRequired, - getFilter: PropTypes.func.isRequired, - done: PropTypes.func.isRequired, - // render handlers, see lib/handlers.js - resize: PropTypes.func.isRequired, - onResize: PropTypes.func.isRequired, - destroy: PropTypes.func.isRequired, - onDestroy: PropTypes.func.isRequired, - }), - config: PropTypes.object, - size: PropTypes.object.isRequired, - onError: PropTypes.func.isRequired, - }; - - static defaultProps = { - reuseNode: false, - }; - - componentDidMount() { - this.firstRender = true; - this.renderTarget = null; - } - - UNSAFE_componentWillReceiveProps({ renderFn }) { - const newRenderFunction = renderFn !== this.props.renderFn; - - if (newRenderFunction) { - this._resetRenderTarget(this._domNode); - } - } - - shouldComponentUpdate(prevProps) { - return !isEqual(this.props.size, prevProps.size) || this._shouldFullRerender(prevProps); - } - - componentDidUpdate(prevProps) { - const { handlers, size } = this.props; - // Config changes - if (this._shouldFullRerender(prevProps)) { - // This should be the only place you call renderFn besides the first time - this._callRenderFn(); - } - - // Size changes - if (!isEqual(size, prevProps.size)) { - return handlers.resize(size); - } - } - - componentWillUnmount() { - this.props.handlers.destroy(); - } - - _domNode = null; - - _callRenderFn = () => { - const { handlers, config, renderFn, reuseNode, name: functionName } = this.props; - // TODO: We should wait until handlers.done() is called before replacing the element content? - if (!reuseNode || !this.renderTarget) { - this._resetRenderTarget(this._domNode); - } - // else if (!firstRender) handlers.destroy(); - - const renderConfig = cloneDeep(config); - - // TODO: this is hacky, but it works. it stops Kibana from blowing up when a render throws - try { - renderFn(this.renderTarget, renderConfig, handlers); - this.firstRender = false; - } catch (err) { - console.error('renderFn threw', err); - this.props.onError(err, { title: strings.getRenderErrorMessage(functionName) }); - } - }; - - _resetRenderTarget = (domNode) => { - const { handlers } = this.props; - - if (!domNode) { - throw new Error('RenderWithFn can not reset undefined target node'); - } - - // call destroy on existing element - if (!this.firstRender) { - handlers.destroy(); - } - - while (domNode.firstChild) { - domNode.removeChild(domNode.firstChild); - } - - this.firstRender = true; - this.renderTarget = this._createRenderTarget(); - domNode.appendChild(this.renderTarget); - }; - - _createRenderTarget = () => { - const div = document.createElement('div'); - div.style.width = '100%'; - div.style.height = '100%'; - return div; - }; - - _shouldFullRerender = (prevProps) => { - // required to stop re-renders on element move, anything that should - // cause a re-render needs to be checked here - // TODO: fix props passed in to remove this check - return ( - this.props.handlers !== prevProps.handlers || - !isEqual(this.props.config, prevProps.config) || - !isEqual(this.props.renderFn.toString(), prevProps.renderFn.toString()) - ); - }; - - destroy = () => { - this.props.handlers.destroy(); - }; - - render() { - // NOTE: the data-shared-* attributes here are used for reporting - return ( -
- { - this._domNode = domNode; - this._callRenderFn(); - }} - /> -
- ); - } -} diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx b/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx new file mode 100644 index 000000000000..bc51128cf0c8 --- /dev/null +++ b/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useState, useEffect, useRef, FC, useCallback } from 'react'; +import { useDebounce } from 'react-use'; + +import { useKibana } from '../../../../../../src/plugins/kibana_react/public'; +import { RenderToDom } from '../render_to_dom'; +import { ErrorStrings } from '../../../i18n'; +import { RendererHandlers } from '../../../types'; + +const { RenderWithFn: strings } = ErrorStrings; + +interface Props { + name: string; + renderFn: ( + domNode: HTMLElement, + config: Record, + handlers: RendererHandlers + ) => void | Promise; + reuseNode: boolean; + handlers: RendererHandlers; + config: Record; + height: number; + width: number; +} + +const style = { height: '100%', width: '100%' }; + +export const RenderWithFn: FC = ({ + name: functionName, + renderFn, + reuseNode = false, + handlers: incomingHandlers, + config, + width, + height, +}) => { + const { services } = useKibana(); + const onError = services.canvas.notify.error; + + const [domNode, setDomNode] = useState(null); + + // Tells us if the component is attempting to re-render into a previously-populated render target. + const firstRender = useRef(true); + // A reference to the node appended to the provided DOM node which is created and optionally replaced. + const renderTarget = useRef(null); + // A reference to the handlers, as the renderFn may mutate them, (via onXYZ functions) + const handlers = useRef(incomingHandlers); + + // Reset the render target, the node appended to the DOM node provided by RenderToDOM. + const resetRenderTarget = useCallback(() => { + if (!domNode) { + return; + } + + if (!firstRender) { + handlers.current.destroy(); + } + + while (domNode.firstChild) { + domNode.removeChild(domNode.firstChild); + } + + const div = document.createElement('div'); + div.style.width = '100%'; + div.style.height = '100%'; + domNode.appendChild(div); + + renderTarget.current = div; + firstRender.current = true; + }, [domNode]); + + useDebounce(() => handlers.current.resize({ height, width }), 150, [height, width]); + + useEffect( + () => () => { + handlers.current.destroy(); + }, + [] + ); + + const render = useCallback(() => { + renderFn(renderTarget.current!, config, handlers.current); + }, [renderTarget, config, renderFn]); + + useEffect(() => { + if (!domNode) { + return; + } + + if (!reuseNode || !renderTarget.current) { + resetRenderTarget(); + } + + try { + render(); + firstRender.current = false; + } catch (err) { + onError(err, { title: strings.getRenderErrorMessage(functionName) }); + } + }, [domNode, functionName, onError, render, resetRenderTarget, reuseNode]); + + return ( +
+ { + setDomNode(node); + }} + /> +
+ ); +}; diff --git a/x-pack/plugins/canvas/public/lib/create_handlers.ts b/x-pack/plugins/canvas/public/lib/create_handlers.ts new file mode 100644 index 000000000000..4e0c7b217d5b --- /dev/null +++ b/x-pack/plugins/canvas/public/lib/create_handlers.ts @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { isEqual } from 'lodash'; +// @ts-ignore untyped local +import { setFilter } from '../state/actions/elements'; +import { updateEmbeddableExpression, fetchEmbeddableRenderable } from '../state/actions/embeddable'; +import { RendererHandlers, CanvasElement } from '../../types'; + +// This class creates stub handlers to ensure every element and renderer fulfills the contract. +// TODO: consider warning if these methods are invoked but not implemented by the renderer...? + +export const createHandlers = (): RendererHandlers => ({ + destroy() {}, + done() {}, + event() {}, + getElementId() { + return ''; + }, + getFilter() { + return ''; + }, + onComplete(fn: () => void) { + this.done = fn; + }, + onDestroy(fn: () => void) { + this.destroy = fn; + }, + // TODO: these functions do not match the `onXYZ` and `xyz` pattern elsewhere. + onEmbeddableDestroyed() {}, + onEmbeddableInputChange() {}, + onResize(fn: (size: { height: number; width: number }) => void) { + this.resize = fn; + }, + reload() {}, + resize(_size: { height: number; width: number }) {}, + setFilter() {}, + update() {}, +}); + +export const assignHandlers = (handlers: Partial = {}): RendererHandlers => + Object.assign(createHandlers(), handlers); + +// TODO: this is a legacy approach we should unravel in the near future. +export const createDispatchedHandlerFactory = ( + dispatch: (action: any) => void +): ((element: CanvasElement) => RendererHandlers) => { + let isComplete = false; + let oldElement: CanvasElement | undefined; + let completeFn = () => {}; + + return (element: CanvasElement) => { + // reset isComplete when element changes + if (!isEqual(oldElement, element)) { + isComplete = false; + oldElement = element; + } + + return assignHandlers({ + setFilter(text: string) { + dispatch(setFilter(text, element.id, true)); + }, + + getFilter() { + return element.filter; + }, + + onComplete(fn: () => void) { + completeFn = fn; + }, + + getElementId: () => element.id, + + onEmbeddableInputChange(embeddableExpression: string) { + dispatch(updateEmbeddableExpression({ elementId: element.id, embeddableExpression })); + }, + + onEmbeddableDestroyed() { + dispatch(fetchEmbeddableRenderable(element.id)); + }, + + done() { + // don't emit if the element is already done + if (isComplete) { + return; + } + + isComplete = true; + completeFn(); + }, + }); + }; +}; diff --git a/x-pack/plugins/canvas/shareable_runtime/components/rendered_element.tsx b/x-pack/plugins/canvas/shareable_runtime/components/rendered_element.tsx index 5741f5f2d698..6bcc0db92f1c 100644 --- a/x-pack/plugins/canvas/shareable_runtime/components/rendered_element.tsx +++ b/x-pack/plugins/canvas/shareable_runtime/components/rendered_element.tsx @@ -7,13 +7,13 @@ import React, { FC, PureComponent } from 'react'; // @ts-expect-error untyped library import Style from 'style-it'; -// @ts-expect-error untyped local import { Positionable } from '../../public/components/positionable/positionable'; // @ts-expect-error untyped local import { elementToShape } from '../../public/components/workpad_page/utils'; import { CanvasRenderedElement } from '../types'; import { CanvasShareableContext, useCanvasShareableState } from '../context'; import { RendererSpec } from '../../types'; +import { createHandlers } from '../../public/lib/create_handlers'; import css from './rendered_element.module.scss'; @@ -62,17 +62,7 @@ export class RenderedElementComponent extends PureComponent { } try { - // TODO: These are stubbed, but may need implementation. - fn.render(this.ref.current, value.value, { - done: () => {}, - onDestroy: () => {}, - onResize: () => {}, - getElementId: () => '', - setFilter: () => {}, - getFilter: () => '', - onEmbeddableInputChange: () => {}, - onEmbeddableDestroyed: () => {}, - }); + fn.render(this.ref.current, value.value, createHandlers()); } catch (e) { // eslint-disable-next-line no-console console.log(as, e.message); diff --git a/x-pack/plugins/canvas/types/renderers.ts b/x-pack/plugins/canvas/types/renderers.ts index 2564b045d1cf..772a16aa94c6 100644 --- a/x-pack/plugins/canvas/types/renderers.ts +++ b/x-pack/plugins/canvas/types/renderers.ts @@ -4,25 +4,29 @@ * you may not use this file except in compliance with the Elastic License. */ -type GenericCallback = (callback: () => void) => void; +import { IInterpreterRenderHandlers } from 'src/plugins/expressions'; -export interface RendererHandlers { - /** Handler to invoke when an element has finished rendering */ - done: () => void; +type GenericRendererCallback = (callback: () => void) => void; + +export interface RendererHandlers extends IInterpreterRenderHandlers { + /** Handler to invoke when an element should be destroyed. */ + destroy: () => void; /** Get the id of the element being rendered. Can be used as a unique ID in a render function */ getElementId: () => string; - /** Handler to invoke when an element is deleted or changes to a different render type */ - onDestroy: GenericCallback; - /** Handler to invoke when an element's dimensions have changed*/ - onResize: GenericCallback; /** Retrieves the value of the filter property on the element object persisted on the workpad */ getFilter: () => string; - /** Sets the value of the filter property on the element object persisted on the workpad */ - setFilter: (filter: string) => void; - /** Handler to invoke when the input to a function has changed internally */ - onEmbeddableInputChange: (expression: string) => void; + /** Handler to invoke when a renderer is considered complete */ + onComplete: (fn: () => void) => void; /** Handler to invoke when a rendered embeddable is destroyed */ onEmbeddableDestroyed: () => void; + /** Handler to invoke when the input to a function has changed internally */ + onEmbeddableInputChange: (expression: string) => void; + /** Handler to invoke when an element's dimensions have changed*/ + onResize: GenericRendererCallback; + /** Handler to invoke when an element should be resized. */ + resize: (size: { height: number; width: number }) => void; + /** Sets the value of the filter property on the element object persisted on the workpad */ + setFilter: (filter: string) => void; } export interface RendererSpec { diff --git a/x-pack/plugins/cloud/public/plugin.ts b/x-pack/plugins/cloud/public/plugin.ts index 62e21392f711..1c3a770da79f 100644 --- a/x-pack/plugins/cloud/public/plugin.ts +++ b/x-pack/plugins/cloud/public/plugin.ts @@ -5,6 +5,7 @@ */ import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/public'; +import { i18n } from '@kbn/i18n'; import { getIsCloudEnabled } from '../common/is_cloud_enabled'; import { ELASTIC_SUPPORT_LINK } from '../common/constants'; import { HomePublicPluginSetup } from '../../../../src/plugins/home/public'; @@ -12,6 +13,7 @@ import { HomePublicPluginSetup } from '../../../../src/plugins/home/public'; interface CloudConfigType { id?: string; resetPasswordUrl?: string; + deploymentUrl?: string; } interface CloudSetupDependencies { @@ -24,10 +26,14 @@ export interface CloudSetup { } export class CloudPlugin implements Plugin { - constructor(private readonly initializerContext: PluginInitializerContext) {} + private config!: CloudConfigType; + + constructor(private readonly initializerContext: PluginInitializerContext) { + this.config = this.initializerContext.config.get(); + } public async setup(core: CoreSetup, { home }: CloudSetupDependencies) { - const { id, resetPasswordUrl } = this.initializerContext.config.get(); + const { id, resetPasswordUrl } = this.config; const isCloudEnabled = getIsCloudEnabled(id); if (home) { @@ -44,6 +50,16 @@ export class CloudPlugin implements Plugin { } public start(coreStart: CoreStart) { + const { deploymentUrl } = this.config; coreStart.chrome.setHelpSupportUrl(ELASTIC_SUPPORT_LINK); + if (deploymentUrl) { + coreStart.chrome.setCustomNavLink({ + title: i18n.translate('xpack.cloud.deploymentLinkLabel', { + defaultMessage: 'Manage this deployment', + }), + euiIconType: 'arrowLeft', + href: deploymentUrl, + }); + } } } diff --git a/x-pack/plugins/cloud/server/config.ts b/x-pack/plugins/cloud/server/config.ts index d899b45aebdf..ff8a2c5acdf9 100644 --- a/x-pack/plugins/cloud/server/config.ts +++ b/x-pack/plugins/cloud/server/config.ts @@ -22,6 +22,7 @@ const configSchema = schema.object({ id: schema.maybe(schema.string()), apm: schema.maybe(apmConfigSchema), resetPasswordUrl: schema.maybe(schema.string()), + deploymentUrl: schema.maybe(schema.string()), }); export type CloudConfigType = TypeOf; @@ -30,6 +31,7 @@ export const config: PluginConfigDescriptor = { exposeToBrowser: { id: true, resetPasswordUrl: true, + deploymentUrl: true, }, schema: configSchema, }; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts index 56d76da522ac..907c749f8ec0 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts @@ -35,6 +35,22 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => { ]); }; + const setLoadDataStreamResponse = (response: HttpResponse = []) => { + server.respondWith('GET', `${API_BASE_PATH}/data_streams/:id`, [ + 200, + { 'Content-Type': 'application/json' }, + JSON.stringify(response), + ]); + }; + + const setDeleteDataStreamResponse = (response: HttpResponse = []) => { + server.respondWith('POST', `${API_BASE_PATH}/delete_data_streams`, [ + 200, + { 'Content-Type': 'application/json' }, + JSON.stringify(response), + ]); + }; + const setDeleteTemplateResponse = (response: HttpResponse = []) => { server.respondWith('POST', `${API_BASE_PATH}/delete_index_templates`, [ 200, @@ -80,6 +96,8 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => { setLoadTemplatesResponse, setLoadIndicesResponse, setLoadDataStreamsResponse, + setLoadDataStreamResponse, + setDeleteDataStreamResponse, setDeleteTemplateResponse, setLoadTemplateResponse, setCreateTemplateResponse, diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx index 0a49191fdb14..d85db94d4a97 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx +++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx @@ -8,6 +8,7 @@ import React from 'react'; import axios from 'axios'; import axiosXhrAdapter from 'axios/lib/adapters/xhr'; +import { merge } from 'lodash'; import { notificationServiceMock, @@ -33,7 +34,7 @@ export const services = { services.uiMetricService.setup({ reportUiStats() {} } as any); setExtensionsService(services.extensionsService); setUiMetricService(services.uiMetricService); -const appDependencies = { services, core: {}, plugins: {} } as any; +const appDependencies = { services, core: { getUrlForApp: () => {} }, plugins: {} } as any; export const setupEnvironment = () => { // Mock initialization of services @@ -51,8 +52,13 @@ export const setupEnvironment = () => { }; }; -export const WithAppDependencies = (Comp: any) => (props: any) => ( - - - -); +export const WithAppDependencies = (Comp: any, overridingDependencies: any = {}) => ( + props: any +) => { + const mergedDependencies = merge({}, appDependencies, overridingDependencies); + return ( + + + + ); +}; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.helpers.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.helpers.ts index 572889954db6..ecea230ecab8 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.helpers.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.helpers.ts @@ -5,6 +5,7 @@ */ import { act } from 'react-dom/test-utils'; +import { ReactWrapper } from 'enzyme'; import { registerTestBed, @@ -17,27 +18,38 @@ import { IndexManagementHome } from '../../../public/application/sections/home'; import { indexManagementStore } from '../../../public/application/store'; // eslint-disable-line @kbn/eslint/no-restricted-paths import { WithAppDependencies, services, TestSubjects } from '../helpers'; -const testBedConfig: TestBedConfig = { - store: () => indexManagementStore(services as any), - memoryRouter: { - initialEntries: [`/indices`], - componentRoutePath: `/:section(indices|data_streams|templates)`, - }, - doMountAsync: true, -}; - -const initTestBed = registerTestBed(WithAppDependencies(IndexManagementHome), testBedConfig); - export interface DataStreamsTabTestBed extends TestBed { actions: { goToDataStreamsList: () => void; clickEmptyPromptIndexTemplateLink: () => void; clickReloadButton: () => void; + clickNameAt: (index: number) => void; clickIndicesAt: (index: number) => void; + clickDeletActionAt: (index: number) => void; + clickConfirmDelete: () => void; + clickDeletDataStreamButton: () => void; }; + findDeleteActionAt: (index: number) => ReactWrapper; + findDeleteConfirmationModal: () => ReactWrapper; + findDetailPanel: () => ReactWrapper; + findDetailPanelTitle: () => string; + findEmptyPromptIndexTemplateLink: () => ReactWrapper; } -export const setup = async (): Promise => { +export const setup = async (overridingDependencies: any = {}): Promise => { + const testBedConfig: TestBedConfig = { + store: () => indexManagementStore(services as any), + memoryRouter: { + initialEntries: [`/indices`], + componentRoutePath: `/:section(indices|data_streams|templates)`, + }, + doMountAsync: true, + }; + + const initTestBed = registerTestBed( + WithAppDependencies(IndexManagementHome, overridingDependencies), + testBedConfig + ); const testBed = await initTestBed(); /** @@ -48,15 +60,17 @@ export const setup = async (): Promise => { testBed.find('data_streamsTab').simulate('click'); }; - const clickEmptyPromptIndexTemplateLink = async () => { - const { find, component, router } = testBed; - + const findEmptyPromptIndexTemplateLink = () => { + const { find } = testBed; const templateLink = find('dataStreamsEmptyPromptTemplateLink'); + return templateLink; + }; + const clickEmptyPromptIndexTemplateLink = async () => { + const { component, router } = testBed; await act(async () => { - router.navigateTo(templateLink.props().href!); + router.navigateTo(findEmptyPromptIndexTemplateLink().props().href!); }); - component.update(); }; @@ -65,10 +79,15 @@ export const setup = async (): Promise => { find('reloadButton').simulate('click'); }; - const clickIndicesAt = async (index: number) => { - const { component, table, router } = testBed; + const findTestSubjectAt = (testSubject: string, index: number) => { + const { table } = testBed; const { rows } = table.getMetaData('dataStreamTable'); - const indicesLink = findTestSubject(rows[index].reactWrapper, 'indicesLink'); + return findTestSubject(rows[index].reactWrapper, testSubject); + }; + + const clickIndicesAt = async (index: number) => { + const { component, router } = testBed; + const indicesLink = findTestSubjectAt('indicesLink', index); await act(async () => { router.navigateTo(indicesLink.props().href!); @@ -77,14 +96,71 @@ export const setup = async (): Promise => { component.update(); }; + const clickNameAt = async (index: number) => { + const { component, router } = testBed; + const nameLink = findTestSubjectAt('nameLink', index); + + await act(async () => { + router.navigateTo(nameLink.props().href!); + }); + + component.update(); + }; + + const findDeleteActionAt = findTestSubjectAt.bind(null, 'deleteDataStream'); + + const clickDeletActionAt = (index: number) => { + findDeleteActionAt(index).simulate('click'); + }; + + const findDeleteConfirmationModal = () => { + const { find } = testBed; + return find('deleteDataStreamsConfirmation'); + }; + + const clickConfirmDelete = async () => { + const modal = document.body.querySelector('[data-test-subj="deleteDataStreamsConfirmation"]'); + const confirmButton: HTMLButtonElement | null = modal!.querySelector( + '[data-test-subj="confirmModalConfirmButton"]' + ); + + await act(async () => { + confirmButton!.click(); + }); + }; + + const clickDeletDataStreamButton = () => { + const { find } = testBed; + find('deleteDataStreamButton').simulate('click'); + }; + + const findDetailPanel = () => { + const { find } = testBed; + return find('dataStreamDetailPanel'); + }; + + const findDetailPanelTitle = () => { + const { find } = testBed; + return find('dataStreamDetailPanelTitle').text(); + }; + return { ...testBed, actions: { goToDataStreamsList, clickEmptyPromptIndexTemplateLink, clickReloadButton, + clickNameAt, clickIndicesAt, + clickDeletActionAt, + clickConfirmDelete, + clickDeletDataStreamButton, }, + findDeleteActionAt, + findDeleteConfirmationModal, + findDetailPanel, + findDetailPanelTitle, + findEmptyPromptIndexTemplateLink, }; }; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.test.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.test.ts index efe2e2d0c74a..dfcbb5186946 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.test.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/data_streams_tab.test.ts @@ -19,61 +19,38 @@ describe('Data Streams tab', () => { server.restore(); }); - beforeEach(async () => { - httpRequestsMockHelpers.setLoadIndicesResponse([ - { - health: '', - status: '', - primary: '', - replica: '', - documents: '', - documents_deleted: '', - size: '', - primary_size: '', - name: 'data-stream-index', - data_stream: 'dataStream1', - }, - { - health: 'green', - status: 'open', - primary: 1, - replica: 1, - documents: 10000, - documents_deleted: 100, - size: '156kb', - primary_size: '156kb', - name: 'non-data-stream-index', - }, - ]); - - await act(async () => { - testBed = await setup(); - }); - }); - describe('when there are no data streams', () => { beforeEach(async () => { - const { actions, component } = testBed; - + httpRequestsMockHelpers.setLoadIndicesResponse([]); httpRequestsMockHelpers.setLoadDataStreamsResponse([]); httpRequestsMockHelpers.setLoadTemplatesResponse({ templates: [], legacyTemplates: [] }); + }); + + test('displays an empty prompt', async () => { + testBed = await setup(); await act(async () => { - actions.goToDataStreamsList(); + testBed.actions.goToDataStreamsList(); }); + const { exists, component } = testBed; component.update(); - }); - - test('displays an empty prompt', async () => { - const { exists } = testBed; expect(exists('sectionLoading')).toBe(false); expect(exists('emptyPrompt')).toBe(true); }); - test('goes to index templates tab when "Get started" link is clicked', async () => { - const { actions, exists } = testBed; + test('when Ingest Manager is disabled, goes to index templates tab when "Get started" link is clicked', async () => { + testBed = await setup({ + plugins: {}, + }); + + await act(async () => { + testBed.actions.goToDataStreamsList(); + }); + + const { actions, exists, component } = testBed; + component.update(); await act(async () => { actions.clickEmptyPromptIndexTemplateLink(); @@ -81,32 +58,77 @@ describe('Data Streams tab', () => { expect(exists('templateList')).toBe(true); }); + + test('when Ingest Manager is enabled, links to Ingest Manager', async () => { + testBed = await setup({ + plugins: { ingestManager: { hi: 'ok' } }, + }); + + await act(async () => { + testBed.actions.goToDataStreamsList(); + }); + + const { findEmptyPromptIndexTemplateLink, component } = testBed; + component.update(); + + // Assert against the text because the href won't be available, due to dependency upon our core mock. + expect(findEmptyPromptIndexTemplateLink().text()).toBe('Ingest Manager'); + }); }); describe('when there are data streams', () => { beforeEach(async () => { - const { actions, component } = testBed; + httpRequestsMockHelpers.setLoadIndicesResponse([ + { + health: '', + status: '', + primary: '', + replica: '', + documents: '', + documents_deleted: '', + size: '', + primary_size: '', + name: 'data-stream-index', + data_stream: 'dataStream1', + }, + { + health: 'green', + status: 'open', + primary: 1, + replica: 1, + documents: 10000, + documents_deleted: 100, + size: '156kb', + primary_size: '156kb', + name: 'non-data-stream-index', + }, + ]); + + const dataStreamForDetailPanel = createDataStreamPayload('dataStream1'); httpRequestsMockHelpers.setLoadDataStreamsResponse([ - createDataStreamPayload('dataStream1'), + dataStreamForDetailPanel, createDataStreamPayload('dataStream2'), ]); + httpRequestsMockHelpers.setLoadDataStreamResponse(dataStreamForDetailPanel); + + testBed = await setup(); + await act(async () => { - actions.goToDataStreamsList(); + testBed.actions.goToDataStreamsList(); }); - component.update(); + testBed.component.update(); }); test('lists them in the table', async () => { const { table } = testBed; - const { tableCellsValues } = table.getMetaData('dataStreamTable'); expect(tableCellsValues).toEqual([ - ['dataStream1', '1', '@timestamp', '1'], - ['dataStream2', '1', '@timestamp', '1'], + ['', 'dataStream1', '1', ''], + ['', 'dataStream2', '1', ''], ]); }); @@ -126,12 +148,90 @@ describe('Data Streams tab', () => { test('clicking the indices count navigates to the backing indices', async () => { const { table, actions } = testBed; - await actions.clickIndicesAt(0); - expect(table.getMetaData('indexTable').tableCellsValues).toEqual([ ['', '', '', '', '', '', '', 'dataStream1'], ]); }); + + describe('row actions', () => { + test('can delete', () => { + const { findDeleteActionAt } = testBed; + const deleteAction = findDeleteActionAt(0); + expect(deleteAction.length).toBe(1); + }); + }); + + describe('deleting a data stream', () => { + test('shows a confirmation modal', async () => { + const { + actions: { clickDeletActionAt }, + findDeleteConfirmationModal, + } = testBed; + clickDeletActionAt(0); + const confirmationModal = findDeleteConfirmationModal(); + expect(confirmationModal).toBeDefined(); + }); + + test('sends a request to the Delete API', async () => { + const { + actions: { clickDeletActionAt, clickConfirmDelete }, + } = testBed; + clickDeletActionAt(0); + + httpRequestsMockHelpers.setDeleteDataStreamResponse({ + results: { + dataStreamsDeleted: ['dataStream1'], + errors: [], + }, + }); + + await clickConfirmDelete(); + + const { method, url, requestBody } = server.requests[server.requests.length - 1]; + + expect(method).toBe('POST'); + expect(url).toBe(`${API_BASE_PATH}/delete_data_streams`); + expect(JSON.parse(JSON.parse(requestBody).body)).toEqual({ + dataStreams: ['dataStream1'], + }); + }); + }); + + describe('detail panel', () => { + test('opens when the data stream name in the table is clicked', async () => { + const { actions, findDetailPanel, findDetailPanelTitle } = testBed; + await actions.clickNameAt(0); + expect(findDetailPanel().length).toBe(1); + expect(findDetailPanelTitle()).toBe('dataStream1'); + }); + + test('deletes the data stream when delete button is clicked', async () => { + const { + actions: { clickNameAt, clickDeletDataStreamButton, clickConfirmDelete }, + } = testBed; + + await clickNameAt(0); + + clickDeletDataStreamButton(); + + httpRequestsMockHelpers.setDeleteDataStreamResponse({ + results: { + dataStreamsDeleted: ['dataStream1'], + errors: [], + }, + }); + + await clickConfirmDelete(); + + const { method, url, requestBody } = server.requests[server.requests.length - 1]; + + expect(method).toBe('POST'); + expect(url).toBe(`${API_BASE_PATH}/delete_data_streams`); + expect(JSON.parse(JSON.parse(requestBody).body)).toEqual({ + dataStreams: ['dataStream1'], + }); + }); + }); }); }); diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.helpers.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.helpers.ts index f00348aacbf0..11ea29fd9b78 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.helpers.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.helpers.ts @@ -5,6 +5,7 @@ */ import { act } from 'react-dom/test-utils'; +import { ReactWrapper } from 'enzyme'; import { registerTestBed, @@ -34,6 +35,8 @@ export interface IndicesTestBed extends TestBed { clickIncludeHiddenIndicesToggle: () => void; clickDataStreamAt: (index: number) => void; }; + findDataStreamDetailPanel: () => ReactWrapper; + findDataStreamDetailPanelTitle: () => string; } export const setup = async (): Promise => { @@ -77,6 +80,16 @@ export const setup = async (): Promise => { component.update(); }; + const findDataStreamDetailPanel = () => { + const { find } = testBed; + return find('dataStreamDetailPanel'); + }; + + const findDataStreamDetailPanelTitle = () => { + const { find } = testBed; + return find('dataStreamDetailPanelTitle').text(); + }; + return { ...testBed, actions: { @@ -85,5 +98,7 @@ export const setup = async (): Promise => { clickIncludeHiddenIndicesToggle, clickDataStreamAt, }, + findDataStreamDetailPanel, + findDataStreamDetailPanelTitle, }; }; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.test.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.test.ts index c2d955bb4dfc..3d6d94d16585 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.test.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/indices_tab.test.ts @@ -70,10 +70,10 @@ describe('', () => { }, ]); - httpRequestsMockHelpers.setLoadDataStreamsResponse([ - createDataStreamPayload('dataStream1'), - createDataStreamPayload('dataStream2'), - ]); + // The detail panel should still appear even if there are no data streams. + httpRequestsMockHelpers.setLoadDataStreamsResponse([]); + + httpRequestsMockHelpers.setLoadDataStreamResponse(createDataStreamPayload('dataStream1')); testBed = await setup(); @@ -86,13 +86,16 @@ describe('', () => { }); test('navigates to the data stream in the Data Streams tab', async () => { - const { table, actions } = testBed; + const { + findDataStreamDetailPanel, + findDataStreamDetailPanelTitle, + actions: { clickDataStreamAt }, + } = testBed; - await actions.clickDataStreamAt(0); + await clickDataStreamAt(0); - expect(table.getMetaData('dataStreamTable').tableCellsValues).toEqual([ - ['dataStream1', '1', '@timestamp', '1'], - ]); + expect(findDataStreamDetailPanel().length).toBe(1); + expect(findDataStreamDetailPanelTitle()).toBe('dataStream1'); }); }); diff --git a/x-pack/plugins/index_management/common/lib/data_stream_serialization.ts b/x-pack/plugins/index_management/common/lib/data_stream_serialization.ts index 9d267210a6b3..51528ed9856c 100644 --- a/x-pack/plugins/index_management/common/lib/data_stream_serialization.ts +++ b/x-pack/plugins/index_management/common/lib/data_stream_serialization.ts @@ -6,8 +6,10 @@ import { DataStream, DataStreamFromEs } from '../types'; -export function deserializeDataStreamList(dataStreamsFromEs: DataStreamFromEs[]): DataStream[] { - return dataStreamsFromEs.map(({ name, timestamp_field, indices, generation }) => ({ +export function deserializeDataStream(dataStreamFromEs: DataStreamFromEs): DataStream { + const { name, timestamp_field, indices, generation } = dataStreamFromEs; + + return { name, timeStampField: timestamp_field, indices: indices.map( @@ -17,5 +19,9 @@ export function deserializeDataStreamList(dataStreamsFromEs: DataStreamFromEs[]) }) ), generation, - })); + }; +} + +export function deserializeDataStreamList(dataStreamsFromEs: DataStreamFromEs[]): DataStream[] { + return dataStreamsFromEs.map((dataStream) => deserializeDataStream(dataStream)); } diff --git a/x-pack/plugins/index_management/common/lib/index.ts b/x-pack/plugins/index_management/common/lib/index.ts index fce4d8ccc250..4e76a40ced52 100644 --- a/x-pack/plugins/index_management/common/lib/index.ts +++ b/x-pack/plugins/index_management/common/lib/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -export { deserializeDataStreamList } from './data_stream_serialization'; +export { deserializeDataStream, deserializeDataStreamList } from './data_stream_serialization'; export { deserializeLegacyTemplateList, diff --git a/x-pack/plugins/index_management/kibana.json b/x-pack/plugins/index_management/kibana.json index 2e0fa04337b4..40ecb26e8f0c 100644 --- a/x-pack/plugins/index_management/kibana.json +++ b/x-pack/plugins/index_management/kibana.json @@ -10,7 +10,8 @@ ], "optionalPlugins": [ "security", - "usageCollection" + "usageCollection", + "ingestManager" ], "configPath": ["xpack", "index_management"] } diff --git a/x-pack/plugins/index_management/public/application/app_context.tsx b/x-pack/plugins/index_management/public/application/app_context.tsx index 84938de41694..c82190712037 100644 --- a/x-pack/plugins/index_management/public/application/app_context.tsx +++ b/x-pack/plugins/index_management/public/application/app_context.tsx @@ -6,9 +6,10 @@ import React, { createContext, useContext } from 'react'; import { ScopedHistory } from 'kibana/public'; -import { CoreStart } from '../../../../../src/core/public'; +import { UsageCollectionSetup } from 'src/plugins/usage_collection/public'; -import { UsageCollectionSetup } from '../../../../../src/plugins/usage_collection/public'; +import { CoreStart } from '../../../../../src/core/public'; +import { IngestManagerSetup } from '../../../ingest_manager/public'; import { IndexMgmtMetricsType } from '../types'; import { UiMetricService, NotificationService, HttpService } from './services'; import { ExtensionsService } from '../services'; @@ -22,6 +23,7 @@ export interface AppDependencies { }; plugins: { usageCollection: UsageCollectionSetup; + ingestManager?: IngestManagerSetup; }; services: { uiMetricService: UiMetricService; diff --git a/x-pack/plugins/index_management/public/application/mount_management_section.ts b/x-pack/plugins/index_management/public/application/mount_management_section.ts index e8b6f200fb34..258f32865720 100644 --- a/x-pack/plugins/index_management/public/application/mount_management_section.ts +++ b/x-pack/plugins/index_management/public/application/mount_management_section.ts @@ -8,6 +8,7 @@ import { CoreSetup } from 'src/core/public'; import { ManagementAppMountParams } from 'src/plugins/management/public/'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/public'; +import { IngestManagerSetup } from '../../../ingest_manager/public'; import { ExtensionsService } from '../services'; import { IndexMgmtMetricsType } from '../types'; import { AppDependencies } from './app_context'; @@ -28,7 +29,8 @@ export async function mountManagementSection( coreSetup: CoreSetup, usageCollection: UsageCollectionSetup, services: InternalServices, - params: ManagementAppMountParams + params: ManagementAppMountParams, + ingestManager?: IngestManagerSetup ) { const { element, setBreadcrumbs, history } = params; const [core] = await coreSetup.getStartServices(); @@ -44,6 +46,7 @@ export async function mountManagementSection( }, plugins: { usageCollection, + ingestManager, }, services, history, diff --git a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_detail_panel/data_stream_detail_panel.tsx b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_detail_panel/data_stream_detail_panel.tsx index a6c8b83a05f9..577f04a4a7ef 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_detail_panel/data_stream_detail_panel.tsx +++ b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_detail_panel/data_stream_detail_panel.tsx @@ -4,9 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { Fragment } from 'react'; +import React, { useState } from 'react'; import { FormattedMessage } from '@kbn/i18n/react'; import { + EuiButton, EuiFlyout, EuiFlyoutHeader, EuiTitle, @@ -15,14 +16,18 @@ import { EuiFlexGroup, EuiFlexItem, EuiButtonEmpty, + EuiDescriptionList, + EuiDescriptionListTitle, + EuiDescriptionListDescription, } from '@elastic/eui'; import { SectionLoading, SectionError, Error } from '../../../../components'; import { useLoadDataStream } from '../../../../services/api'; +import { DeleteDataStreamConfirmationModal } from '../delete_data_stream_confirmation_modal'; interface Props { dataStreamName: string; - onClose: () => void; + onClose: (shouldReload?: boolean) => void; } /** @@ -36,6 +41,8 @@ export const DataStreamDetailPanel: React.FunctionComponent = ({ }) => { const { error, data: dataStream, isLoading } = useLoadDataStream(dataStreamName); + const [isDeleting, setIsDeleting] = useState(false); + let content; if (isLoading) { @@ -61,44 +68,97 @@ export const DataStreamDetailPanel: React.FunctionComponent = ({ /> ); } else if (dataStream) { - content = {JSON.stringify(dataStream)}; + const { timeStampField, generation } = dataStream; + + content = ( + + + + + + {timeStampField.name} + + + + + + {generation} + + ); } return ( - - - -

- {dataStreamName} -

-
-
- - {content} - - - - - - - - - - -
+ <> + {isDeleting ? ( + { + if (data && data.hasDeletedDataStreams) { + onClose(true); + } else { + setIsDeleting(false); + } + }} + dataStreams={[dataStreamName]} + /> + ) : null} + + + + +

+ {dataStreamName} +

+
+
+ + {content} + + + + + onClose()} + data-test-subj="closeDetailsButton" + > + + + + + {!isLoading && !error ? ( + + setIsDeleting(true)} + data-test-subj="deleteDataStreamButton" + > + + + + ) : null} + + +
+ ); }; diff --git a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_list.tsx b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_list.tsx index 951c4a0d7f3c..bad008b665cf 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_list.tsx +++ b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_list.tsx @@ -12,9 +12,13 @@ import { EuiTitle, EuiText, EuiSpacer, EuiEmptyPrompt, EuiLink } from '@elastic/ import { ScopedHistory } from 'kibana/public'; import { reactRouterNavigate } from '../../../../shared_imports'; +import { useAppContext } from '../../../app_context'; import { SectionError, SectionLoading, Error } from '../../../components'; import { useLoadDataStreams } from '../../../services/api'; +import { decodePathFromReactRouter } from '../../../services/routing'; +import { Section } from '../../home'; import { DataStreamTable } from './data_stream_table'; +import { DataStreamDetailPanel } from './data_stream_detail_panel'; interface MatchParams { dataStreamName?: string; @@ -26,6 +30,11 @@ export const DataStreamList: React.FunctionComponent { + const { + core: { getUrlForApp }, + plugins: { ingestManager }, + } = useAppContext(); + const { error, isLoading, data: dataStreams, sendRequest: reload } = useLoadDataStreams(); let content; @@ -67,22 +76,52 @@ export const DataStreamList: React.FunctionComponent - {i18n.translate('xpack.idxMgmt.dataStreamList.emptyPrompt.getStartedLink', { - defaultMessage: 'composable index template', - })} - - ), - }} + defaultMessage="Data streams represent collections of time series indices." /> + {' ' /* We need this space to separate these two sentences. */} + {ingestManager ? ( + + {i18n.translate( + 'xpack.idxMgmt.dataStreamList.emptyPrompt.noDataStreamsCtaIngestManagerLink', + { + defaultMessage: 'Ingest Manager', + } + )} + + ), + }} + /> + ) : ( + + {i18n.translate( + 'xpack.idxMgmt.dataStreamList.emptyPrompt.noDataStreamsCtaIndexTemplateLink', + { + defaultMessage: 'composable index template', + } + )} + + ), + }} + /> + )}

} data-test-subj="emptyPrompt" @@ -104,24 +143,38 @@ export const DataStreamList: React.FunctionComponent - - {/* TODO: Implement this once we have something to put in here, e.g. storage size, docs count */} - {/* dataStreamName && ( - { - history.push('/data_streams'); - }} - /> - )*/} ); } - return
{content}
; + return ( +
+ {content} + + {/* + If the user has been deep-linked, they'll expect to see the detail panel because it reflects + the URL state, even if there are no data streams or if there was an error loading them. + */} + {dataStreamName && ( + { + history.push(`/${Section.DataStreams}`); + + // If the data stream was deleted, we need to refresh the list. + if (shouldReload) { + reload(); + } + }} + /> + )} +
+ ); }; diff --git a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_table/data_stream_table.tsx b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_table/data_stream_table.tsx index 54035e219362..d01d8fa03a3f 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_table/data_stream_table.tsx +++ b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/data_stream_table/data_stream_table.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React from 'react'; +import React, { useState } from 'react'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; import { EuiInMemoryTable, EuiBasicTableColumn, EuiButton, EuiLink } from '@elastic/eui'; @@ -13,6 +13,8 @@ import { ScopedHistory } from 'kibana/public'; import { DataStream } from '../../../../../../common/types'; import { reactRouterNavigate } from '../../../../../shared_imports'; import { encodePathForReactRouter } from '../../../../services/routing'; +import { Section } from '../../../home'; +import { DeleteDataStreamConfirmationModal } from '../delete_data_stream_confirmation_modal'; interface Props { dataStreams?: DataStream[]; @@ -27,6 +29,9 @@ export const DataStreamTable: React.FunctionComponent = ({ history, filters, }) => { + const [selection, setSelection] = useState([]); + const [dataStreamsToDelete, setDataStreamsToDelete] = useState([]); + const columns: Array> = [ { field: 'name', @@ -35,7 +40,19 @@ export const DataStreamTable: React.FunctionComponent = ({ }), truncateText: true, sortable: true, - // TODO: Render as a link to open the detail panel + render: (name: DataStream['name'], item: DataStream) => { + return ( + /* eslint-disable-next-line @elastic/eui/href-or-on-click */ + + {name} + + ); + }, }, { field: 'indices', @@ -59,20 +76,27 @@ export const DataStreamTable: React.FunctionComponent = ({ ), }, { - field: 'timeStampField.name', - name: i18n.translate('xpack.idxMgmt.dataStreamList.table.timeStampFieldColumnTitle', { - defaultMessage: 'Timestamp field', + name: i18n.translate('xpack.idxMgmt.dataStreamList.table.actionColumnTitle', { + defaultMessage: 'Actions', }), - truncateText: true, - sortable: true, - }, - { - field: 'generation', - name: i18n.translate('xpack.idxMgmt.dataStreamList.table.generationFieldColumnTitle', { - defaultMessage: 'Generation', - }), - truncateText: true, - sortable: true, + actions: [ + { + name: i18n.translate('xpack.idxMgmt.dataStreamList.table.actionDeleteText', { + defaultMessage: 'Delete', + }), + description: i18n.translate('xpack.idxMgmt.dataStreamList.table.actionDeleteDecription', { + defaultMessage: 'Delete this data stream', + }), + icon: 'trash', + color: 'danger', + type: 'icon', + onClick: ({ name }: DataStream) => { + setDataStreamsToDelete([name]); + }, + isPrimary: true, + 'data-test-subj': 'deleteDataStream', + }, + ], }, ]; @@ -88,12 +112,29 @@ export const DataStreamTable: React.FunctionComponent = ({ }, } as const; + const selectionConfig = { + onSelectionChange: setSelection, + }; + const searchConfig = { query: filters, box: { incremental: true, }, - toolsLeft: undefined /* TODO: Actions menu */, + toolsLeft: + selection.length > 0 ? ( + setDataStreamsToDelete(selection.map(({ name }: DataStream) => name))} + color="danger" + > + + + ) : undefined, toolsRight: [ = ({ return ( <> + {dataStreamsToDelete && dataStreamsToDelete.length > 0 ? ( + { + if (data && data.hasDeletedDataStreams) { + reload(); + } else { + setDataStreamsToDelete([]); + } + }} + dataStreams={dataStreamsToDelete} + /> + ) : null} = ({ search={searchConfig} sorting={sorting} isSelectable={true} + selection={selectionConfig} pagination={pagination} rowProps={() => ({ 'data-test-subj': 'row', diff --git a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/delete_data_stream_confirmation_modal.tsx b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/delete_data_stream_confirmation_modal.tsx new file mode 100644 index 000000000000..fc8e41aa634b --- /dev/null +++ b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/delete_data_stream_confirmation_modal.tsx @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Fragment } from 'react'; +import { EuiCallOut, EuiConfirmModal, EuiOverlayMask, EuiSpacer } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { FormattedMessage } from '@kbn/i18n/react'; + +import { deleteDataStreams } from '../../../../services/api'; +import { notificationService } from '../../../../services/notification'; + +interface Props { + dataStreams: string[]; + onClose: (data?: { hasDeletedDataStreams: boolean }) => void; +} + +export const DeleteDataStreamConfirmationModal: React.FunctionComponent = ({ + dataStreams, + onClose, +}: { + dataStreams: string[]; + onClose: (data?: { hasDeletedDataStreams: boolean }) => void; +}) => { + const dataStreamsCount = dataStreams.length; + + const handleDeleteDataStreams = () => { + deleteDataStreams(dataStreams).then(({ data: { dataStreamsDeleted, errors }, error }) => { + const hasDeletedDataStreams = dataStreamsDeleted && dataStreamsDeleted.length; + + if (hasDeletedDataStreams) { + const successMessage = + dataStreamsDeleted.length === 1 + ? i18n.translate( + 'xpack.idxMgmt.deleteDataStreamsConfirmationModal.successDeleteSingleNotificationMessageText', + { + defaultMessage: "Deleted data stream '{dataStreamName}'", + values: { dataStreamName: dataStreams[0] }, + } + ) + : i18n.translate( + 'xpack.idxMgmt.deleteDataStreamsConfirmationModal.successDeleteMultipleNotificationMessageText', + { + defaultMessage: + 'Deleted {numSuccesses, plural, one {# data stream} other {# data streams}}', + values: { numSuccesses: dataStreamsDeleted.length }, + } + ); + + onClose({ hasDeletedDataStreams }); + notificationService.showSuccessToast(successMessage); + } + + if (error || (errors && errors.length)) { + const hasMultipleErrors = + (errors && errors.length > 1) || (error && dataStreams.length > 1); + + const errorMessage = hasMultipleErrors + ? i18n.translate( + 'xpack.idxMgmt.deleteDataStreamsConfirmationModal.multipleErrorsNotificationMessageText', + { + defaultMessage: 'Error deleting {count} data streams', + values: { + count: (errors && errors.length) || dataStreams.length, + }, + } + ) + : i18n.translate( + 'xpack.idxMgmt.deleteDataStreamsConfirmationModal.errorNotificationMessageText', + { + defaultMessage: "Error deleting data stream '{name}'", + values: { name: (errors && errors[0].name) || dataStreams[0] }, + } + ); + + notificationService.showDangerToast(errorMessage); + } + }); + }; + + return ( + + + } + onCancel={() => onClose()} + onConfirm={handleDeleteDataStreams} + cancelButtonText={ + + } + confirmButtonText={ + + } + > + + + } + color="danger" + iconType="alert" + > +

+ +

+
+ + + +

+ +

+ +
    + {dataStreams.map((name) => ( +
  • {name}
  • + ))} +
+
+
+
+ ); +}; diff --git a/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/index.ts b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/index.ts new file mode 100644 index 000000000000..eaa4a8fc2de0 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/sections/home/data_stream_list/delete_data_stream_confirmation_modal/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { DeleteDataStreamConfirmationModal } from './delete_data_stream_confirmation_modal'; diff --git a/x-pack/plugins/index_management/public/application/services/api.ts b/x-pack/plugins/index_management/public/application/services/api.ts index 5ad84395d24c..d7874ec2dcf3 100644 --- a/x-pack/plugins/index_management/public/application/services/api.ts +++ b/x-pack/plugins/index_management/public/application/services/api.ts @@ -53,14 +53,21 @@ export function useLoadDataStreams() { }); } -// TODO: Implement this API endpoint once we have content to surface in the detail panel. export function useLoadDataStream(name: string) { - return useRequest({ - path: `${API_BASE_PATH}/data_stream/${encodeURIComponent(name)}`, + return useRequest({ + path: `${API_BASE_PATH}/data_streams/${encodeURIComponent(name)}`, method: 'get', }); } +export async function deleteDataStreams(dataStreams: string[]) { + return sendRequest({ + path: `${API_BASE_PATH}/delete_data_streams`, + method: 'post', + body: { dataStreams }, + }); +} + export async function loadIndices() { const response = await httpService.httpClient.get(`${API_BASE_PATH}/indices`); return response.data ? response.data : response; diff --git a/x-pack/plugins/index_management/public/plugin.ts b/x-pack/plugins/index_management/public/plugin.ts index 94d9bccdc63c..aec25ee3247d 100644 --- a/x-pack/plugins/index_management/public/plugin.ts +++ b/x-pack/plugins/index_management/public/plugin.ts @@ -8,6 +8,8 @@ import { i18n } from '@kbn/i18n'; import { CoreSetup } from '../../../../src/core/public'; import { UsageCollectionSetup } from '../../../../src/plugins/usage_collection/public'; import { ManagementSetup, ManagementSectionId } from '../../../../src/plugins/management/public'; + +import { IngestManagerSetup } from '../../ingest_manager/public'; import { UIM_APP_NAME, PLUGIN } from '../common/constants'; import { httpService } from './application/services/http'; @@ -25,6 +27,7 @@ export interface IndexManagementPluginSetup { } interface PluginsDependencies { + ingestManager?: IngestManagerSetup; usageCollection: UsageCollectionSetup; management: ManagementSetup; } @@ -42,7 +45,7 @@ export class IndexMgmtUIPlugin { public setup(coreSetup: CoreSetup, plugins: PluginsDependencies): IndexManagementPluginSetup { const { http, notifications } = coreSetup; - const { usageCollection, management } = plugins; + const { ingestManager, usageCollection, management } = plugins; httpService.setup(http); notificationService.setup(notifications); @@ -60,7 +63,7 @@ export class IndexMgmtUIPlugin { uiMetricService: this.uiMetricService, extensionsService: this.extensionsService, }; - return mountManagementSection(coreSetup, usageCollection, services, params); + return mountManagementSection(coreSetup, usageCollection, services, params, ingestManager); }, }); diff --git a/x-pack/plugins/index_management/server/client/elasticsearch.ts b/x-pack/plugins/index_management/server/client/elasticsearch.ts index 6b1bf47512b2..6c0fbe3dd6a6 100644 --- a/x-pack/plugins/index_management/server/client/elasticsearch.ts +++ b/x-pack/plugins/index_management/server/client/elasticsearch.ts @@ -20,6 +20,20 @@ export const elasticsearchJsPlugin = (Client: any, config: any, components: any) method: 'GET', }); + dataManagement.getDataStream = ca({ + urls: [ + { + fmt: '/_data_stream/<%=name%>', + req: { + name: { + type: 'string', + }, + }, + }, + ], + method: 'GET', + }); + // We don't allow the user to create a data stream in the UI or API. We're just adding this here // to enable the API integration tests. dataManagement.createDataStream = ca({ diff --git a/x-pack/plugins/index_management/server/routes/api/data_streams/index.ts b/x-pack/plugins/index_management/server/routes/api/data_streams/index.ts index 56c514e30f24..4aaf2b1bc5ed 100644 --- a/x-pack/plugins/index_management/server/routes/api/data_streams/index.ts +++ b/x-pack/plugins/index_management/server/routes/api/data_streams/index.ts @@ -6,8 +6,11 @@ import { RouteDependencies } from '../../../types'; -import { registerGetAllRoute } from './register_get_route'; +import { registerGetOneRoute, registerGetAllRoute } from './register_get_route'; +import { registerDeleteRoute } from './register_delete_route'; export function registerDataStreamRoutes(dependencies: RouteDependencies) { + registerGetOneRoute(dependencies); registerGetAllRoute(dependencies); + registerDeleteRoute(dependencies); } diff --git a/x-pack/plugins/index_management/server/routes/api/data_streams/register_delete_route.ts b/x-pack/plugins/index_management/server/routes/api/data_streams/register_delete_route.ts new file mode 100644 index 000000000000..45b185bcd053 --- /dev/null +++ b/x-pack/plugins/index_management/server/routes/api/data_streams/register_delete_route.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema, TypeOf } from '@kbn/config-schema'; + +import { RouteDependencies } from '../../../types'; +import { addBasePath } from '../index'; +import { wrapEsError } from '../../helpers'; + +const bodySchema = schema.object({ + dataStreams: schema.arrayOf(schema.string()), +}); + +export function registerDeleteRoute({ router, license }: RouteDependencies) { + router.post( + { + path: addBasePath('/delete_data_streams'), + validate: { body: bodySchema }, + }, + license.guardApiRoute(async (ctx, req, res) => { + const { callAsCurrentUser } = ctx.dataManagement!.client; + const { dataStreams } = req.body as TypeOf; + + const response: { dataStreamsDeleted: string[]; errors: any[] } = { + dataStreamsDeleted: [], + errors: [], + }; + + await Promise.all( + dataStreams.map(async (name: string) => { + try { + await callAsCurrentUser('dataManagement.deleteDataStream', { + name, + }); + + return response.dataStreamsDeleted.push(name); + } catch (e) { + return response.errors.push({ + name, + error: wrapEsError(e), + }); + } + }) + ); + + return res.ok({ body: response }); + }) + ); +} diff --git a/x-pack/plugins/index_management/server/routes/api/data_streams/register_get_route.ts b/x-pack/plugins/index_management/server/routes/api/data_streams/register_get_route.ts index 9128556130bf..5f4e62534833 100644 --- a/x-pack/plugins/index_management/server/routes/api/data_streams/register_get_route.ts +++ b/x-pack/plugins/index_management/server/routes/api/data_streams/register_get_route.ts @@ -4,7 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { deserializeDataStreamList } from '../../../../common/lib'; +import { schema, TypeOf } from '@kbn/config-schema'; + +import { deserializeDataStream, deserializeDataStreamList } from '../../../../common/lib'; import { RouteDependencies } from '../../../types'; import { addBasePath } from '../index'; @@ -32,3 +34,40 @@ export function registerGetAllRoute({ router, license, lib: { isEsError } }: Rou }) ); } + +export function registerGetOneRoute({ router, license, lib: { isEsError } }: RouteDependencies) { + const paramsSchema = schema.object({ + name: schema.string(), + }); + + router.get( + { + path: addBasePath('/data_streams/{name}'), + validate: { params: paramsSchema }, + }, + license.guardApiRoute(async (ctx, req, res) => { + const { name } = req.params as TypeOf; + const { callAsCurrentUser } = ctx.dataManagement!.client; + + try { + const dataStream = await callAsCurrentUser('dataManagement.getDataStream', { name }); + + if (dataStream[0]) { + const body = deserializeDataStream(dataStream[0]); + return res.ok({ body }); + } + + return res.notFound(); + } catch (e) { + if (isEsError(e)) { + return res.customError({ + statusCode: e.statusCode, + body: e, + }); + } + // Case: default + return res.internalError({ body: e }); + } + }) + ); +} diff --git a/x-pack/plugins/infra/kibana.json b/x-pack/plugins/infra/kibana.json index 4701182c9681..4e23f1985d45 100644 --- a/x-pack/plugins/infra/kibana.json +++ b/x-pack/plugins/infra/kibana.json @@ -13,6 +13,9 @@ "alerts", "triggers_actions_ui" ], + "optionalPlugins": [ + "ml" + ], "server": true, "ui": true, "configPath": ["xpack", "infra"] diff --git a/x-pack/plugins/infra/public/assets/anomaly_chart_minified.svg b/x-pack/plugins/infra/public/assets/anomaly_chart_minified.svg new file mode 100644 index 000000000000..dd1b39248bba --- /dev/null +++ b/x-pack/plugins/infra/public/assets/anomaly_chart_minified.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/index.ts b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/index.ts index 7f2982f221a3..72099e9b1b4b 100644 --- a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/index.ts +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/index.ts @@ -13,3 +13,4 @@ export * from './missing_results_privileges_prompt'; export * from './missing_setup_privileges_prompt'; export * from './ml_unavailable_prompt'; export * from './setup_status_unknown_prompt'; +export * from './subscription_splash_content'; diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/subscription_splash_content.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/subscription_splash_content.tsx new file mode 100644 index 000000000000..e0e293b1cc3e --- /dev/null +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/subscription_splash_content.tsx @@ -0,0 +1,174 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useEffect } from 'react'; +import { i18n } from '@kbn/i18n'; +import { + EuiPage, + EuiPageBody, + EuiPageContent, + EuiFlexGroup, + EuiFlexItem, + EuiSpacer, + EuiTitle, + EuiText, + EuiButton, + EuiButtonEmpty, + EuiImage, +} from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { LoadingPage } from '../../loading_page'; + +import { useKibana } from '../../../../../../../src/plugins/kibana_react/public'; +import { euiStyled } from '../../../../../observability/public'; +import { useTrialStatus } from '../../../hooks/use_trial_status'; + +export const SubscriptionSplashContent: React.FC = () => { + const { services } = useKibana(); + const { loadState, isTrialAvailable, checkTrialAvailability } = useTrialStatus(); + + useEffect(() => { + checkTrialAvailability(); + }, [checkTrialAvailability]); + + if (loadState === 'pending') { + return ( + + ); + } + + const canStartTrial = isTrialAvailable && loadState === 'resolved'; + + let title; + let description; + let cta; + + if (canStartTrial) { + title = ( + + ); + + description = ( + + ); + + cta = ( + + + + ); + } else { + title = ( + + ); + + description = ( + + ); + + cta = ( + + + + ); + } + + return ( + + + + + + +

{title}

+
+ + +

{description}

+
+ +
{cta}
+
+ + + +
+ + +

+ +

+
+ + + +
+
+
+
+ ); +}; + +const SubscriptionPage = euiStyled(EuiPage)` + height: 100% +`; + +const SubscriptionPageContent = euiStyled(EuiPageContent)` + max-width: 768px !important; +`; + +const SubscriptionPageFooter = euiStyled.div` + background: ${(props) => props.theme.eui.euiColorLightestShade}; + margin: 0 -${(props) => props.theme.eui.paddingSizes.l} -${(props) => + props.theme.eui.paddingSizes.l}; + padding: ${(props) => props.theme.eui.paddingSizes.l}; +`; diff --git a/x-pack/plugins/infra/public/hooks/use_trial_status.tsx b/x-pack/plugins/infra/public/hooks/use_trial_status.tsx new file mode 100644 index 000000000000..9cc118d09c7e --- /dev/null +++ b/x-pack/plugins/infra/public/hooks/use_trial_status.tsx @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { boolean } from 'io-ts'; +import { i18n } from '@kbn/i18n'; + +import { useState } from 'react'; +import { useKibana } from '../../../../../src/plugins/kibana_react/public'; +import { API_BASE_PATH as LICENSE_MANAGEMENT_API_BASE_PATH } from '../../../license_management/common/constants'; +import { useTrackedPromise } from '../utils/use_tracked_promise'; +import { decodeOrThrow } from '../../common/runtime_types'; + +interface UseTrialStatusState { + loadState: 'uninitialized' | 'pending' | 'resolved' | 'rejected'; + isTrialAvailable: boolean; + checkTrialAvailability: () => void; +} + +export function useTrialStatus(): UseTrialStatusState { + const { services } = useKibana(); + const [isTrialAvailable, setIsTrialAvailable] = useState(false); + + const [loadState, checkTrialAvailability] = useTrackedPromise( + { + createPromise: async () => { + const response = await services.http.get(`${LICENSE_MANAGEMENT_API_BASE_PATH}/start_trial`); + return decodeOrThrow(boolean)(response); + }, + onResolve: (response) => { + setIsTrialAvailable(response); + }, + onReject: (error) => { + services.notifications.toasts.addDanger( + i18n.translate('xpack.infra.trialStatus.trialStatusNetworkErrorMessage', { + defaultMessage: 'We could not determine if the trial license is available', + }) + ); + }, + }, + [services] + ); + + return { + loadState: loadState.state, + isTrialAvailable, + checkTrialAvailability, + }; +} diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/page_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/page_content.tsx index 04b472ceb59c..5d9adb8a4f6e 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/page_content.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/page_content.tsx @@ -12,7 +12,7 @@ import { LogAnalysisSetupStatusUnknownPrompt, MissingResultsPrivilegesPrompt, MissingSetupPrivilegesPrompt, - MlUnavailablePrompt, + SubscriptionSplashContent, } from '../../../components/logging/log_analysis_setup'; import { SourceErrorPage } from '../../../components/source_error_page'; import { SourceLoadingPage } from '../../../components/source_loading_page'; @@ -50,7 +50,7 @@ export const LogEntryCategoriesPageContent = () => { } else if (hasFailedLoadingSource) { return ; } else if (!hasLogAnalysisCapabilites) { - return ; + return ; } else if (!hasLogAnalysisReadCapabilities) { return ; } else if (setupStatus.type === 'initializing') { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_content.tsx index fc07289f02fe..4ec05a977851 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_content.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_content.tsx @@ -12,7 +12,7 @@ import { LogAnalysisSetupStatusUnknownPrompt, MissingResultsPrivilegesPrompt, MissingSetupPrivilegesPrompt, - MlUnavailablePrompt, + SubscriptionSplashContent, } from '../../../components/logging/log_analysis_setup'; import { SourceErrorPage } from '../../../components/source_error_page'; import { SourceLoadingPage } from '../../../components/source_loading_page'; @@ -50,7 +50,7 @@ export const LogEntryRatePageContent = () => { } else if (hasFailedLoadingSource) { return ; } else if (!hasLogAnalysisCapabilites) { - return ; + return ; } else if (!hasLogAnalysisReadCapabilities) { return ; } else if (setupStatus.type === 'initializing') { diff --git a/x-pack/plugins/infra/public/pages/logs/page_content.tsx b/x-pack/plugins/infra/public/pages/logs/page_content.tsx index 78b7f86993cb..c5047dbdf3bb 100644 --- a/x-pack/plugins/infra/public/pages/logs/page_content.tsx +++ b/x-pack/plugins/infra/public/pages/logs/page_content.tsx @@ -17,7 +17,6 @@ import { HelpCenterContent } from '../../components/help_center_content'; import { AppNavigation } from '../../components/navigation/app_navigation'; import { RoutedTabs } from '../../components/navigation/routed_tabs'; import { ColumnarPage } from '../../components/page'; -import { useLogAnalysisCapabilitiesContext } from '../../containers/logs/log_analysis'; import { useLogSourceContext } from '../../containers/logs/log_source'; import { RedirectWithQueryParams } from '../../utils/redirect_with_query_params'; import { LogEntryCategoriesPage } from './log_entry_categories'; @@ -28,7 +27,6 @@ import { AlertDropdown } from '../../components/alerting/logs/alert_dropdown'; export const LogsPageContent: React.FunctionComponent = () => { const uiCapabilities = useKibana().services.application?.capabilities; - const logAnalysisCapabilities = useLogAnalysisCapabilitiesContext(); const { initialize } = useLogSourceContext(); @@ -79,13 +77,7 @@ export const LogsPageContent: React.FunctionComponent = () => { - + diff --git a/x-pack/plugins/infra/server/lib/adapters/framework/adapter_types.ts b/x-pack/plugins/infra/server/lib/adapters/framework/adapter_types.ts index d00afbc7b497..905b7dfa314b 100644 --- a/x-pack/plugins/infra/server/lib/adapters/framework/adapter_types.ts +++ b/x-pack/plugins/infra/server/lib/adapters/framework/adapter_types.ts @@ -4,18 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SearchResponse, GenericParams } from 'elasticsearch'; +import { GenericParams, SearchResponse } from 'elasticsearch'; import { Lifecycle } from 'hapi'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { RouteMethod, RouteConfig } from '../../../../../../../src/core/server'; -import { PluginSetupContract as FeaturesPluginSetup } from '../../../../../../plugins/features/server'; -import { SpacesPluginSetup } from '../../../../../../plugins/spaces/server'; +import { RouteConfig, RouteMethod } from '../../../../../../../src/core/server'; +import { HomeServerPluginSetup } from '../../../../../../../src/plugins/home/server'; import { VisTypeTimeseriesSetup } from '../../../../../../../src/plugins/vis_type_timeseries/server'; import { APMPluginSetup } from '../../../../../../plugins/apm/server'; -import { HomeServerPluginSetup } from '../../../../../../../src/plugins/home/server'; +import { PluginSetupContract as FeaturesPluginSetup } from '../../../../../../plugins/features/server'; +import { SpacesPluginSetup } from '../../../../../../plugins/spaces/server'; import { PluginSetupContract as AlertingPluginContract } from '../../../../../alerts/server'; +import { MlPluginSetup } from '../../../../../ml/server'; -// NP_TODO: Compose real types from plugins we depend on, no "any" export interface InfraServerPluginDeps { home: HomeServerPluginSetup; spaces: SpacesPluginSetup; @@ -24,6 +24,7 @@ export interface InfraServerPluginDeps { features: FeaturesPluginSetup; apm: APMPluginSetup; alerts: AlertingPluginContract; + ml?: MlPluginSetup; } export interface CallWithRequestParams extends GenericParams { diff --git a/x-pack/plugins/infra/server/lib/compose/kibana.ts b/x-pack/plugins/infra/server/lib/compose/kibana.ts deleted file mode 100644 index 626b9d46bbde..000000000000 --- a/x-pack/plugins/infra/server/lib/compose/kibana.ts +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -import { FrameworkFieldsAdapter } from '../adapters/fields/framework_fields_adapter'; -import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter'; -import { InfraKibanaLogEntriesAdapter } from '../adapters/log_entries/kibana_log_entries_adapter'; -import { KibanaMetricsAdapter } from '../adapters/metrics/kibana_metrics_adapter'; -import { InfraElasticsearchSourceStatusAdapter } from '../adapters/source_status'; -import { InfraFieldsDomain } from '../domains/fields_domain'; -import { InfraLogEntriesDomain } from '../domains/log_entries_domain'; -import { InfraMetricsDomain } from '../domains/metrics_domain'; -import { InfraBackendLibs, InfraDomainLibs } from '../infra_types'; -import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from '../log_analysis'; -import { InfraSnapshot } from '../snapshot'; -import { InfraSourceStatus } from '../source_status'; -import { InfraSources } from '../sources'; -import { InfraConfig } from '../../../server'; -import { CoreSetup } from '../../../../../../src/core/server'; -import { InfraServerPluginDeps } from '../adapters/framework/adapter_types'; - -export function compose(core: CoreSetup, config: InfraConfig, plugins: InfraServerPluginDeps) { - const framework = new KibanaFramework(core, config, plugins); - const sources = new InfraSources({ - config, - }); - const sourceStatus = new InfraSourceStatus(new InfraElasticsearchSourceStatusAdapter(framework), { - sources, - }); - const snapshot = new InfraSnapshot(); - const logEntryCategoriesAnalysis = new LogEntryCategoriesAnalysis({ framework }); - const logEntryRateAnalysis = new LogEntryRateAnalysis({ framework }); - - // TODO: separate these out individually and do away with "domains" as a temporary group - const domainLibs: InfraDomainLibs = { - fields: new InfraFieldsDomain(new FrameworkFieldsAdapter(framework), { - sources, - }), - logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), { - framework, - sources, - }), - metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)), - }; - - const libs: InfraBackendLibs = { - configuration: config, // NP_TODO: Do we ever use this anywhere? - framework, - logEntryCategoriesAnalysis, - logEntryRateAnalysis, - snapshot, - sources, - sourceStatus, - ...domainLibs, - }; - - return libs; -} diff --git a/x-pack/plugins/infra/server/lib/infra_types.ts b/x-pack/plugins/infra/server/lib/infra_types.ts index 51c433557f4f..9896ad6ac1cd 100644 --- a/x-pack/plugins/infra/server/lib/infra_types.ts +++ b/x-pack/plugins/infra/server/lib/infra_types.ts @@ -8,7 +8,6 @@ import { InfraSourceConfiguration } from '../../common/graphql/types'; import { InfraFieldsDomain } from './domains/fields_domain'; import { InfraLogEntriesDomain } from './domains/log_entries_domain'; import { InfraMetricsDomain } from './domains/metrics_domain'; -import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from './log_analysis'; import { InfraSnapshot } from './snapshot'; import { InfraSources } from './sources'; import { InfraSourceStatus } from './source_status'; @@ -31,8 +30,6 @@ export interface InfraDomainLibs { export interface InfraBackendLibs extends InfraDomainLibs { configuration: InfraConfig; framework: KibanaFramework; - logEntryCategoriesAnalysis: LogEntryCategoriesAnalysis; - logEntryRateAnalysis: LogEntryRateAnalysis; snapshot: InfraSnapshot; sources: InfraSources; sourceStatus: InfraSourceStatus; diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts index d0a6ae0fc935..4298ccb61bbe 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { KibanaRequest, RequestHandlerContext } from 'src/core/server'; +import type { IScopedClusterClient } from 'src/core/server'; import { compareDatasetsByMaximumAnomalyScore, getJobId, @@ -13,7 +13,7 @@ import { } from '../../../common/log_analysis'; import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing'; import { decodeOrThrow } from '../../../common/runtime_types'; -import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter'; +import type { MlAnomalyDetectors, MlSystem } from '../../types'; import { InsufficientLogAnalysisMlJobConfigurationError, NoLogAnalysisMlJobError, @@ -39,7 +39,6 @@ import { LogEntryDatasetBucket, logEntryDatasetsResponseRT, } from './queries/log_entry_data_sets'; -import { createMlJobsQuery, mlJobsResponseRT } from './queries/ml_jobs'; import { createTopLogEntryCategoriesQuery, topLogEntryCategoriesResponseRT, @@ -47,489 +46,470 @@ import { const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; -export class LogEntryCategoriesAnalysis { - constructor( - private readonly libs: { - framework: KibanaFramework; - } - ) {} - - public async getTopLogEntryCategories( - requestContext: RequestHandlerContext, - request: KibanaRequest, - sourceId: string, - startTime: number, - endTime: number, - categoryCount: number, - datasets: string[], - histograms: HistogramParameters[] - ) { - const finalizeTopLogEntryCategoriesSpan = startTracingSpan('get top categories'); - - const logEntryCategoriesCountJobId = getJobId( - this.libs.framework.getSpaceId(request), - sourceId, - logEntryCategoriesJobTypes[0] - ); - - const { - topLogEntryCategories, - timing: { spans: fetchTopLogEntryCategoriesAggSpans }, - } = await this.fetchTopLogEntryCategories( - requestContext, - logEntryCategoriesCountJobId, - startTime, - endTime, - categoryCount, - datasets - ); - - const categoryIds = topLogEntryCategories.map(({ categoryId }) => categoryId); - - const { - logEntryCategoriesById, - timing: { spans: fetchTopLogEntryCategoryPatternsSpans }, - } = await this.fetchLogEntryCategories( - requestContext, - logEntryCategoriesCountJobId, - categoryIds - ); - - const { - categoryHistogramsById, - timing: { spans: fetchTopLogEntryCategoryHistogramsSpans }, - } = await this.fetchTopLogEntryCategoryHistograms( - requestContext, - logEntryCategoriesCountJobId, - categoryIds, - histograms - ); - - const topLogEntryCategoriesSpan = finalizeTopLogEntryCategoriesSpan(); - - return { - data: topLogEntryCategories.map((topCategory) => ({ - ...topCategory, - regularExpression: logEntryCategoriesById[topCategory.categoryId]?._source.regex ?? '', - histograms: categoryHistogramsById[topCategory.categoryId] ?? [], - })), - timing: { - spans: [ - topLogEntryCategoriesSpan, - ...fetchTopLogEntryCategoriesAggSpans, - ...fetchTopLogEntryCategoryPatternsSpans, - ...fetchTopLogEntryCategoryHistogramsSpans, - ], - }, - }; - } - - public async getLogEntryCategoryDatasets( - requestContext: RequestHandlerContext, - request: KibanaRequest, - sourceId: string, - startTime: number, - endTime: number - ) { - const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets'); - - const logEntryCategoriesCountJobId = getJobId( - this.libs.framework.getSpaceId(request), - sourceId, - logEntryCategoriesJobTypes[0] - ); - - let logEntryDatasetBuckets: LogEntryDatasetBucket[] = []; - let afterLatestBatchKey: CompositeDatasetKey | undefined; - let esSearchSpans: TracingSpan[] = []; - - while (true) { - const finalizeEsSearchSpan = startTracingSpan('fetch category dataset batch from ES'); - - const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)( - await this.libs.framework.callWithRequest( - requestContext, - 'search', - createLogEntryDatasetsQuery( - logEntryCategoriesCountJobId, - startTime, - endTime, - COMPOSITE_AGGREGATION_BATCH_SIZE, - afterLatestBatchKey - ) - ) - ); - - if (logEntryDatasetsResponse._shards.total === 0) { - throw new NoLogAnalysisResultsIndexError( - `Failed to find ml result index for job ${logEntryCategoriesCountJobId}.` - ); - } - - const { - after_key: afterKey, - buckets: latestBatchBuckets, - } = logEntryDatasetsResponse.aggregations.dataset_buckets; - - logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets]; - afterLatestBatchKey = afterKey; - esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()]; - - if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { - break; - } - } - - const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan(); - - return { - data: logEntryDatasetBuckets.map( - (logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset - ), - timing: { - spans: [logEntryDatasetsSpan, ...esSearchSpans], - }, +export async function getTopLogEntryCategories( + context: { + infra: { + mlSystem: MlSystem; + spaceId: string; }; - } - - public async getLogEntryCategoryExamples( - requestContext: RequestHandlerContext, - request: KibanaRequest, - sourceId: string, - startTime: number, - endTime: number, - categoryId: number, - exampleCount: number - ) { - const finalizeLogEntryCategoryExamplesSpan = startTracingSpan( - 'get category example log entries' - ); - - const logEntryCategoriesCountJobId = getJobId( - this.libs.framework.getSpaceId(request), - sourceId, - logEntryCategoriesJobTypes[0] - ); - - const { - mlJob, - timing: { spans: fetchMlJobSpans }, - } = await this.fetchMlJob(requestContext, logEntryCategoriesCountJobId); - - const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings); - const indices = customSettings?.logs_source_config?.indexPattern; - const timestampField = customSettings?.logs_source_config?.timestampField; - - if (indices == null || timestampField == null) { - throw new InsufficientLogAnalysisMlJobConfigurationError( - `Failed to find index configuration for ml job ${logEntryCategoriesCountJobId}` - ); - } - - const { - logEntryCategoriesById, - timing: { spans: fetchLogEntryCategoriesSpans }, - } = await this.fetchLogEntryCategories(requestContext, logEntryCategoriesCountJobId, [ - categoryId, - ]); - const category = logEntryCategoriesById[categoryId]; - - if (category == null) { - throw new UnknownCategoryError(categoryId); - } - - const { - examples, - timing: { spans: fetchLogEntryCategoryExamplesSpans }, - } = await this.fetchLogEntryCategoryExamples( - requestContext, - indices, - timestampField, - startTime, - endTime, - category._source.terms, - exampleCount - ); - - const logEntryCategoryExamplesSpan = finalizeLogEntryCategoryExamplesSpan(); + }, + sourceId: string, + startTime: number, + endTime: number, + categoryCount: number, + datasets: string[], + histograms: HistogramParameters[] +) { + const finalizeTopLogEntryCategoriesSpan = startTracingSpan('get top categories'); + + const logEntryCategoriesCountJobId = getJobId( + context.infra.spaceId, + sourceId, + logEntryCategoriesJobTypes[0] + ); + + const { + topLogEntryCategories, + timing: { spans: fetchTopLogEntryCategoriesAggSpans }, + } = await fetchTopLogEntryCategories( + context, + logEntryCategoriesCountJobId, + startTime, + endTime, + categoryCount, + datasets + ); + + const categoryIds = topLogEntryCategories.map(({ categoryId }) => categoryId); + + const { + logEntryCategoriesById, + timing: { spans: fetchTopLogEntryCategoryPatternsSpans }, + } = await fetchLogEntryCategories(context, logEntryCategoriesCountJobId, categoryIds); + + const { + categoryHistogramsById, + timing: { spans: fetchTopLogEntryCategoryHistogramsSpans }, + } = await fetchTopLogEntryCategoryHistograms( + context, + logEntryCategoriesCountJobId, + categoryIds, + histograms + ); + + const topLogEntryCategoriesSpan = finalizeTopLogEntryCategoriesSpan(); + + return { + data: topLogEntryCategories.map((topCategory) => ({ + ...topCategory, + regularExpression: logEntryCategoriesById[topCategory.categoryId]?._source.regex ?? '', + histograms: categoryHistogramsById[topCategory.categoryId] ?? [], + })), + timing: { + spans: [ + topLogEntryCategoriesSpan, + ...fetchTopLogEntryCategoriesAggSpans, + ...fetchTopLogEntryCategoryPatternsSpans, + ...fetchTopLogEntryCategoryHistogramsSpans, + ], + }, + }; +} - return { - data: examples, - timing: { - spans: [ - logEntryCategoryExamplesSpan, - ...fetchMlJobSpans, - ...fetchLogEntryCategoriesSpans, - ...fetchLogEntryCategoryExamplesSpans, - ], - }, +export async function getLogEntryCategoryDatasets( + context: { + infra: { + mlSystem: MlSystem; + spaceId: string; }; - } - - private async fetchTopLogEntryCategories( - requestContext: RequestHandlerContext, - logEntryCategoriesCountJobId: string, - startTime: number, - endTime: number, - categoryCount: number, - datasets: string[] - ) { - const finalizeEsSearchSpan = startTracingSpan('Fetch top categories from ES'); - - const topLogEntryCategoriesResponse = decodeOrThrow(topLogEntryCategoriesResponseRT)( - await this.libs.framework.callWithRequest( - requestContext, - 'search', - createTopLogEntryCategoriesQuery( + }, + sourceId: string, + startTime: number, + endTime: number +) { + const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets'); + + const logEntryCategoriesCountJobId = getJobId( + context.infra.spaceId, + sourceId, + logEntryCategoriesJobTypes[0] + ); + + let logEntryDatasetBuckets: LogEntryDatasetBucket[] = []; + let afterLatestBatchKey: CompositeDatasetKey | undefined; + let esSearchSpans: TracingSpan[] = []; + + while (true) { + const finalizeEsSearchSpan = startTracingSpan('fetch category dataset batch from ES'); + + const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)( + await context.infra.mlSystem.mlAnomalySearch( + createLogEntryDatasetsQuery( logEntryCategoriesCountJobId, startTime, endTime, - categoryCount, - datasets + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey ) ) ); - const esSearchSpan = finalizeEsSearchSpan(); - - if (topLogEntryCategoriesResponse._shards.total === 0) { + if (logEntryDatasetsResponse._shards.total === 0) { throw new NoLogAnalysisResultsIndexError( `Failed to find ml result index for job ${logEntryCategoriesCountJobId}.` ); } - const topLogEntryCategories = topLogEntryCategoriesResponse.aggregations.terms_category_id.buckets.map( - (topCategoryBucket) => { - const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce< - Record - >( - (accumulatedMaximumAnomalyScores, datasetFromRecord) => ({ - ...accumulatedMaximumAnomalyScores, - [datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0, - }), - {} - ); - - return { - categoryId: parseCategoryId(topCategoryBucket.key), - logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0, - datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets - .map((datasetBucket) => ({ - name: datasetBucket.key, - maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0, - })) - .sort(compareDatasetsByMaximumAnomalyScore) - .reverse(), - maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0, - }; - } - ); + const { + after_key: afterKey, + buckets: latestBatchBuckets, + } = logEntryDatasetsResponse.aggregations.dataset_buckets; - return { - topLogEntryCategories, - timing: { - spans: [esSearchSpan], - }, + logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; + esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()]; + + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; + } + } + + const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan(); + + return { + data: logEntryDatasetBuckets.map((logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset), + timing: { + spans: [logEntryDatasetsSpan, ...esSearchSpans], + }, + }; +} + +export async function getLogEntryCategoryExamples( + context: { + core: { elasticsearch: { legacy: { client: IScopedClusterClient } } }; + infra: { + mlAnomalyDetectors: MlAnomalyDetectors; + mlSystem: MlSystem; + spaceId: string; }; + }, + sourceId: string, + startTime: number, + endTime: number, + categoryId: number, + exampleCount: number +) { + const finalizeLogEntryCategoryExamplesSpan = startTracingSpan('get category example log entries'); + + const logEntryCategoriesCountJobId = getJobId( + context.infra.spaceId, + sourceId, + logEntryCategoriesJobTypes[0] + ); + + const { + mlJob, + timing: { spans: fetchMlJobSpans }, + } = await fetchMlJob(context, logEntryCategoriesCountJobId); + + const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings); + const indices = customSettings?.logs_source_config?.indexPattern; + const timestampField = customSettings?.logs_source_config?.timestampField; + + if (indices == null || timestampField == null) { + throw new InsufficientLogAnalysisMlJobConfigurationError( + `Failed to find index configuration for ml job ${logEntryCategoriesCountJobId}` + ); } - private async fetchLogEntryCategories( - requestContext: RequestHandlerContext, - logEntryCategoriesCountJobId: string, - categoryIds: number[] - ) { - if (categoryIds.length === 0) { - return { - logEntryCategoriesById: {}, - timing: { spans: [] }, - }; - } + const { + logEntryCategoriesById, + timing: { spans: fetchLogEntryCategoriesSpans }, + } = await fetchLogEntryCategories(context, logEntryCategoriesCountJobId, [categoryId]); + const category = logEntryCategoriesById[categoryId]; + + if (category == null) { + throw new UnknownCategoryError(categoryId); + } - const finalizeEsSearchSpan = startTracingSpan('Fetch category patterns from ES'); + const { + examples, + timing: { spans: fetchLogEntryCategoryExamplesSpans }, + } = await fetchLogEntryCategoryExamples( + context, + indices, + timestampField, + startTime, + endTime, + category._source.terms, + exampleCount + ); + + const logEntryCategoryExamplesSpan = finalizeLogEntryCategoryExamplesSpan(); + + return { + data: examples, + timing: { + spans: [ + logEntryCategoryExamplesSpan, + ...fetchMlJobSpans, + ...fetchLogEntryCategoriesSpans, + ...fetchLogEntryCategoryExamplesSpans, + ], + }, + }; +} - const logEntryCategoriesResponse = decodeOrThrow(logEntryCategoriesResponseRT)( - await this.libs.framework.callWithRequest( - requestContext, - 'search', - createLogEntryCategoriesQuery(logEntryCategoriesCountJobId, categoryIds) +async function fetchTopLogEntryCategories( + context: { infra: { mlSystem: MlSystem } }, + logEntryCategoriesCountJobId: string, + startTime: number, + endTime: number, + categoryCount: number, + datasets: string[] +) { + const finalizeEsSearchSpan = startTracingSpan('Fetch top categories from ES'); + + const topLogEntryCategoriesResponse = decodeOrThrow(topLogEntryCategoriesResponseRT)( + await context.infra.mlSystem.mlAnomalySearch( + createTopLogEntryCategoriesQuery( + logEntryCategoriesCountJobId, + startTime, + endTime, + categoryCount, + datasets ) - ); + ) + ); - const esSearchSpan = finalizeEsSearchSpan(); + const esSearchSpan = finalizeEsSearchSpan(); - const logEntryCategoriesById = logEntryCategoriesResponse.hits.hits.reduce< - Record - >( - (accumulatedCategoriesById, categoryHit) => ({ - ...accumulatedCategoriesById, - [categoryHit._source.category_id]: categoryHit, - }), - {} + if (topLogEntryCategoriesResponse._shards.total === 0) { + throw new NoLogAnalysisResultsIndexError( + `Failed to find ml result index for job ${logEntryCategoriesCountJobId}.` ); - - return { - logEntryCategoriesById, - timing: { - spans: [esSearchSpan], - }, - }; } - private async fetchTopLogEntryCategoryHistograms( - requestContext: RequestHandlerContext, - logEntryCategoriesCountJobId: string, - categoryIds: number[], - histograms: HistogramParameters[] - ) { - if (categoryIds.length === 0 || histograms.length === 0) { + const topLogEntryCategories = topLogEntryCategoriesResponse.aggregations.terms_category_id.buckets.map( + (topCategoryBucket) => { + const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce< + Record + >( + (accumulatedMaximumAnomalyScores, datasetFromRecord) => ({ + ...accumulatedMaximumAnomalyScores, + [datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0, + }), + {} + ); + return { - categoryHistogramsById: {}, - timing: { spans: [] }, + categoryId: parseCategoryId(topCategoryBucket.key), + logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0, + datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets + .map((datasetBucket) => ({ + name: datasetBucket.key, + maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0, + })) + .sort(compareDatasetsByMaximumAnomalyScore) + .reverse(), + maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0, }; } + ); + + return { + topLogEntryCategories, + timing: { + spans: [esSearchSpan], + }, + }; +} - const finalizeEsSearchSpan = startTracingSpan('Fetch category histograms from ES'); - - const categoryHistogramsReponses = await Promise.all( - histograms.map(({ bucketCount, endTime, id: histogramId, startTime }) => - this.libs.framework - .callWithRequest( - requestContext, - 'search', - createLogEntryCategoryHistogramsQuery( - logEntryCategoriesCountJobId, - categoryIds, - startTime, - endTime, - bucketCount - ) - ) - .then(decodeOrThrow(logEntryCategoryHistogramsResponseRT)) - .then((response) => ({ - histogramId, - histogramBuckets: response.aggregations.filters_categories.buckets, - })) - ) - ); - - const esSearchSpan = finalizeEsSearchSpan(); - - const categoryHistogramsById = Object.values(categoryHistogramsReponses).reduce< - Record< - number, - Array<{ - histogramId: string; - buckets: Array<{ - bucketDuration: number; - logEntryCount: number; - startTime: number; - }>; - }> - > - >( - (outerAccumulatedHistograms, { histogramId, histogramBuckets }) => - Object.entries(histogramBuckets).reduce( - (innerAccumulatedHistograms, [categoryBucketKey, categoryBucket]) => { - const categoryId = parseCategoryId(categoryBucketKey); - return { - ...innerAccumulatedHistograms, - [categoryId]: [ - ...(innerAccumulatedHistograms[categoryId] ?? []), - { - histogramId, - buckets: categoryBucket.histogram_timestamp.buckets.map((bucket) => ({ - bucketDuration: categoryBucket.histogram_timestamp.meta.bucketDuration, - logEntryCount: bucket.sum_actual.value, - startTime: bucket.key, - })), - }, - ], - }; - }, - outerAccumulatedHistograms - ), - {} - ); - +async function fetchLogEntryCategories( + context: { infra: { mlSystem: MlSystem } }, + logEntryCategoriesCountJobId: string, + categoryIds: number[] +) { + if (categoryIds.length === 0) { return { - categoryHistogramsById, - timing: { - spans: [esSearchSpan], - }, + logEntryCategoriesById: {}, + timing: { spans: [] }, }; } - private async fetchMlJob( - requestContext: RequestHandlerContext, - logEntryCategoriesCountJobId: string - ) { - const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES'); - - const { - jobs: [mlJob], - } = decodeOrThrow(mlJobsResponseRT)( - await this.libs.framework.callWithRequest( - requestContext, - 'transport.request', - createMlJobsQuery([logEntryCategoriesCountJobId]) - ) - ); - - const mlGetJobSpan = finalizeMlGetJobSpan(); - - if (mlJob == null) { - throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryCategoriesCountJobId}.`); - } + const finalizeEsSearchSpan = startTracingSpan('Fetch category patterns from ES'); + + const logEntryCategoriesResponse = decodeOrThrow(logEntryCategoriesResponseRT)( + await context.infra.mlSystem.mlAnomalySearch( + createLogEntryCategoriesQuery(logEntryCategoriesCountJobId, categoryIds) + ) + ); + + const esSearchSpan = finalizeEsSearchSpan(); + + const logEntryCategoriesById = logEntryCategoriesResponse.hits.hits.reduce< + Record + >( + (accumulatedCategoriesById, categoryHit) => ({ + ...accumulatedCategoriesById, + [categoryHit._source.category_id]: categoryHit, + }), + {} + ); + + return { + logEntryCategoriesById, + timing: { + spans: [esSearchSpan], + }, + }; +} +async function fetchTopLogEntryCategoryHistograms( + context: { infra: { mlSystem: MlSystem } }, + logEntryCategoriesCountJobId: string, + categoryIds: number[], + histograms: HistogramParameters[] +) { + if (categoryIds.length === 0 || histograms.length === 0) { return { - mlJob, - timing: { - spans: [mlGetJobSpan], - }, + categoryHistogramsById: {}, + timing: { spans: [] }, }; } - private async fetchLogEntryCategoryExamples( - requestContext: RequestHandlerContext, - indices: string, - timestampField: string, - startTime: number, - endTime: number, - categoryQuery: string, - exampleCount: number - ) { - const finalizeEsSearchSpan = startTracingSpan('Fetch examples from ES'); + const finalizeEsSearchSpan = startTracingSpan('Fetch category histograms from ES'); - const { - hits: { hits }, - } = decodeOrThrow(logEntryCategoryExamplesResponseRT)( - await this.libs.framework.callWithRequest( - requestContext, - 'search', - createLogEntryCategoryExamplesQuery( - indices, - timestampField, - startTime, - endTime, - categoryQuery, - exampleCount + const categoryHistogramsReponses = await Promise.all( + histograms.map(({ bucketCount, endTime, id: histogramId, startTime }) => + context.infra.mlSystem + .mlAnomalySearch( + createLogEntryCategoryHistogramsQuery( + logEntryCategoriesCountJobId, + categoryIds, + startTime, + endTime, + bucketCount + ) ) - ) - ); + .then(decodeOrThrow(logEntryCategoryHistogramsResponseRT)) + .then((response) => ({ + histogramId, + histogramBuckets: response.aggregations.filters_categories.buckets, + })) + ) + ); + + const esSearchSpan = finalizeEsSearchSpan(); + + const categoryHistogramsById = Object.values(categoryHistogramsReponses).reduce< + Record< + number, + Array<{ + histogramId: string; + buckets: Array<{ + bucketDuration: number; + logEntryCount: number; + startTime: number; + }>; + }> + > + >( + (outerAccumulatedHistograms, { histogramId, histogramBuckets }) => + Object.entries(histogramBuckets).reduce( + (innerAccumulatedHistograms, [categoryBucketKey, categoryBucket]) => { + const categoryId = parseCategoryId(categoryBucketKey); + return { + ...innerAccumulatedHistograms, + [categoryId]: [ + ...(innerAccumulatedHistograms[categoryId] ?? []), + { + histogramId, + buckets: categoryBucket.histogram_timestamp.buckets.map((bucket) => ({ + bucketDuration: categoryBucket.histogram_timestamp.meta.bucketDuration, + logEntryCount: bucket.sum_actual.value, + startTime: bucket.key, + })), + }, + ], + }; + }, + outerAccumulatedHistograms + ), + {} + ); + + return { + categoryHistogramsById, + timing: { + spans: [esSearchSpan], + }, + }; +} - const esSearchSpan = finalizeEsSearchSpan(); +async function fetchMlJob( + context: { infra: { mlAnomalyDetectors: MlAnomalyDetectors } }, + logEntryCategoriesCountJobId: string +) { + const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES'); - return { - examples: hits.map((hit) => ({ - dataset: hit._source.event?.dataset ?? '', - message: hit._source.message ?? '', - timestamp: hit.sort[0], - })), - timing: { - spans: [esSearchSpan], - }, - }; + const { + jobs: [mlJob], + } = await context.infra.mlAnomalyDetectors.jobs(logEntryCategoriesCountJobId); + + const mlGetJobSpan = finalizeMlGetJobSpan(); + + if (mlJob == null) { + throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryCategoriesCountJobId}.`); } + + return { + mlJob, + timing: { + spans: [mlGetJobSpan], + }, + }; +} + +async function fetchLogEntryCategoryExamples( + requestContext: { core: { elasticsearch: { legacy: { client: IScopedClusterClient } } } }, + indices: string, + timestampField: string, + startTime: number, + endTime: number, + categoryQuery: string, + exampleCount: number +) { + const finalizeEsSearchSpan = startTracingSpan('Fetch examples from ES'); + + const { + hits: { hits }, + } = decodeOrThrow(logEntryCategoryExamplesResponseRT)( + await requestContext.core.elasticsearch.legacy.client.callAsCurrentUser( + 'search', + createLogEntryCategoryExamplesQuery( + indices, + timestampField, + startTime, + endTime, + categoryQuery, + exampleCount + ) + ) + ); + + const esSearchSpan = finalizeEsSearchSpan(); + + return { + examples: hits.map((hit) => ({ + dataset: hit._source.event?.dataset ?? '', + message: hit._source.message ?? '', + timestamp: hit.sort[0], + })), + timing: { + spans: [esSearchSpan], + }, + }; } const parseCategoryId = (rawCategoryId: string) => parseInt(rawCategoryId, 10); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts index 28c167484197..125cc2b196e0 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts @@ -7,10 +7,8 @@ import { pipe } from 'fp-ts/lib/pipeable'; import { map, fold } from 'fp-ts/lib/Either'; import { identity } from 'fp-ts/lib/function'; -import { RequestHandlerContext, KibanaRequest } from 'src/core/server'; import { getJobId } from '../../../common/log_analysis'; import { throwErrors, createPlainError } from '../../../common/runtime_types'; -import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter'; import { NoLogAnalysisResultsIndexError } from './errors'; import { logRateModelPlotResponseRT, @@ -18,126 +16,114 @@ import { LogRateModelPlotBucket, CompositeTimestampPartitionKey, } from './queries'; +import { MlSystem } from '../../types'; const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; -export class LogEntryRateAnalysis { - constructor( - private readonly libs: { - framework: KibanaFramework; - } - ) {} - - public getJobIds(request: KibanaRequest, sourceId: string) { - return { - logEntryRate: getJobId(this.libs.framework.getSpaceId(request), sourceId, 'log-entry-rate'), +export async function getLogEntryRateBuckets( + context: { + infra: { + mlSystem: MlSystem; + spaceId: string; }; - } + }, + sourceId: string, + startTime: number, + endTime: number, + bucketDuration: number +) { + const logRateJobId = getJobId(context.infra.spaceId, sourceId, 'log-entry-rate'); + let mlModelPlotBuckets: LogRateModelPlotBucket[] = []; + let afterLatestBatchKey: CompositeTimestampPartitionKey | undefined; - public async getLogEntryRateBuckets( - requestContext: RequestHandlerContext, - request: KibanaRequest, - sourceId: string, - startTime: number, - endTime: number, - bucketDuration: number - ) { - const logRateJobId = this.getJobIds(request, sourceId).logEntryRate; - let mlModelPlotBuckets: LogRateModelPlotBucket[] = []; - let afterLatestBatchKey: CompositeTimestampPartitionKey | undefined; + while (true) { + const mlModelPlotResponse = await context.infra.mlSystem.mlAnomalySearch( + createLogEntryRateQuery( + logRateJobId, + startTime, + endTime, + bucketDuration, + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey + ) + ); - while (true) { - const mlModelPlotResponse = await this.libs.framework.callWithRequest( - requestContext, - 'search', - createLogEntryRateQuery( - logRateJobId, - startTime, - endTime, - bucketDuration, - COMPOSITE_AGGREGATION_BATCH_SIZE, - afterLatestBatchKey - ) + if (mlModelPlotResponse._shards.total === 0) { + throw new NoLogAnalysisResultsIndexError( + `Failed to query ml result index for job ${logRateJobId}.` ); + } - if (mlModelPlotResponse._shards.total === 0) { - throw new NoLogAnalysisResultsIndexError( - `Failed to find ml result index for job ${logRateJobId}.` - ); - } - - const { after_key: afterKey, buckets: latestBatchBuckets } = pipe( - logRateModelPlotResponseRT.decode(mlModelPlotResponse), - map((response) => response.aggregations.timestamp_partition_buckets), - fold(throwErrors(createPlainError), identity) - ); + const { after_key: afterKey, buckets: latestBatchBuckets } = pipe( + logRateModelPlotResponseRT.decode(mlModelPlotResponse), + map((response) => response.aggregations.timestamp_partition_buckets), + fold(throwErrors(createPlainError), identity) + ); - mlModelPlotBuckets = [...mlModelPlotBuckets, ...latestBatchBuckets]; - afterLatestBatchKey = afterKey; + mlModelPlotBuckets = [...mlModelPlotBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; - if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { - break; - } + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; } + } - return mlModelPlotBuckets.reduce< - Array<{ - partitions: Array<{ - analysisBucketCount: number; - anomalies: Array<{ - actualLogEntryRate: number; - anomalyScore: number; - duration: number; - startTime: number; - typicalLogEntryRate: number; - }>; - averageActualLogEntryRate: number; - maximumAnomalyScore: number; - numberOfLogEntries: number; - partitionId: string; + return mlModelPlotBuckets.reduce< + Array<{ + partitions: Array<{ + analysisBucketCount: number; + anomalies: Array<{ + actualLogEntryRate: number; + anomalyScore: number; + duration: number; + startTime: number; + typicalLogEntryRate: number; }>; - startTime: number; - }> - >((histogramBuckets, timestampPartitionBucket) => { - const previousHistogramBucket = histogramBuckets[histogramBuckets.length - 1]; - const partition = { - analysisBucketCount: timestampPartitionBucket.filter_model_plot.doc_count, - anomalies: timestampPartitionBucket.filter_records.top_hits_record.hits.hits.map( - ({ _source: record }) => ({ - actualLogEntryRate: record.actual[0], - anomalyScore: record.record_score, - duration: record.bucket_span * 1000, - startTime: record.timestamp, - typicalLogEntryRate: record.typical[0], - }) - ), - averageActualLogEntryRate: - timestampPartitionBucket.filter_model_plot.average_actual.value || 0, - maximumAnomalyScore: - timestampPartitionBucket.filter_records.maximum_record_score.value || 0, - numberOfLogEntries: timestampPartitionBucket.filter_model_plot.sum_actual.value || 0, - partitionId: timestampPartitionBucket.key.partition, - }; - if ( - previousHistogramBucket && - previousHistogramBucket.startTime === timestampPartitionBucket.key.timestamp - ) { - return [ - ...histogramBuckets.slice(0, -1), - { - ...previousHistogramBucket, - partitions: [...previousHistogramBucket.partitions, partition], - }, - ]; - } else { - return [ - ...histogramBuckets, - { - partitions: [partition], - startTime: timestampPartitionBucket.key.timestamp, - }, - ]; - } - }, []); - } + averageActualLogEntryRate: number; + maximumAnomalyScore: number; + numberOfLogEntries: number; + partitionId: string; + }>; + startTime: number; + }> + >((histogramBuckets, timestampPartitionBucket) => { + const previousHistogramBucket = histogramBuckets[histogramBuckets.length - 1]; + const partition = { + analysisBucketCount: timestampPartitionBucket.filter_model_plot.doc_count, + anomalies: timestampPartitionBucket.filter_records.top_hits_record.hits.hits.map( + ({ _source: record }) => ({ + actualLogEntryRate: record.actual[0], + anomalyScore: record.record_score, + duration: record.bucket_span * 1000, + startTime: record.timestamp, + typicalLogEntryRate: record.typical[0], + }) + ), + averageActualLogEntryRate: + timestampPartitionBucket.filter_model_plot.average_actual.value || 0, + maximumAnomalyScore: timestampPartitionBucket.filter_records.maximum_record_score.value || 0, + numberOfLogEntries: timestampPartitionBucket.filter_model_plot.sum_actual.value || 0, + partitionId: timestampPartitionBucket.key.partition, + }; + if ( + previousHistogramBucket && + previousHistogramBucket.startTime === timestampPartitionBucket.key.timestamp + ) { + return [ + ...histogramBuckets.slice(0, -1), + { + ...previousHistogramBucket, + partitions: [...previousHistogramBucket.partitions, partition], + }, + ]; + } else { + return [ + ...histogramBuckets, + { + partitions: [partition], + startTime: timestampPartitionBucket.key.timestamp, + }, + ]; + } + }, []); } diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts index f1e68d34fdae..eacf29b303db 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts @@ -4,10 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-'; - -export const getMlResultIndex = (jobId: string) => `${ML_ANOMALY_INDEX_PREFIX}${jobId}`; - export const defaultRequestParameters = { allowNoIndices: true, ignoreUnavailable: true, @@ -15,6 +11,16 @@ export const defaultRequestParameters = { trackTotalHits: false, }; +export const createJobIdFilters = (jobId: string) => [ + { + term: { + job_id: { + value: jobId, + }, + }, + }, +]; + export const createTimeRangeFilters = (startTime: number, endTime: number) => [ { range: { @@ -26,12 +32,10 @@ export const createTimeRangeFilters = (startTime: number, endTime: number) => [ }, ]; -export const createResultTypeFilters = (resultType: 'model_plot' | 'record') => [ +export const createResultTypeFilters = (resultTypes: Array<'model_plot' | 'record'>) => [ { - term: { - result_type: { - value: resultType, - }, + terms: { + result_type: resultTypes, }, }, ]; diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_categories.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_categories.ts index 2681a4c037f5..c7ad60eeaabc 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_categories.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_categories.ts @@ -5,9 +5,8 @@ */ import * as rt from 'io-ts'; - import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; -import { defaultRequestParameters, getMlResultIndex, createCategoryIdFilters } from './common'; +import { createCategoryIdFilters, createJobIdFilters, defaultRequestParameters } from './common'; export const createLogEntryCategoriesQuery = ( logEntryCategoriesJobId: string, @@ -17,12 +16,14 @@ export const createLogEntryCategoriesQuery = ( body: { query: { bool: { - filter: [...createCategoryIdFilters(categoryIds)], + filter: [ + ...createJobIdFilters(logEntryCategoriesJobId), + ...createCategoryIdFilters(categoryIds), + ], }, }, _source: ['category_id', 'regex', 'terms'], }, - index: getMlResultIndex(logEntryCategoriesJobId), size: categoryIds.length, }); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_category_histograms.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_category_histograms.ts index 67087f3b4775..5fdafb512325 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_category_histograms.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_category_histograms.ts @@ -5,13 +5,12 @@ */ import * as rt from 'io-ts'; - import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; import { + createJobIdFilters, createResultTypeFilters, createTimeRangeFilters, defaultRequestParameters, - getMlResultIndex, } from './common'; export const createLogEntryCategoryHistogramsQuery = ( @@ -26,8 +25,9 @@ export const createLogEntryCategoryHistogramsQuery = ( query: { bool: { filter: [ + ...createJobIdFilters(logEntryCategoriesJobId), ...createTimeRangeFilters(startTime, endTime), - ...createResultTypeFilters('model_plot'), + ...createResultTypeFilters(['model_plot']), ...createCategoryFilters(categoryIds), ], }, @@ -41,7 +41,6 @@ export const createLogEntryCategoryHistogramsQuery = ( }, }, }, - index: getMlResultIndex(logEntryCategoriesJobId), size: 0, }); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts index b41a21a21b6a..dd22bedae8b2 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts @@ -5,9 +5,13 @@ */ import * as rt from 'io-ts'; - import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; -import { defaultRequestParameters, getMlResultIndex } from './common'; +import { + createJobIdFilters, + createResultTypeFilters, + createTimeRangeFilters, + defaultRequestParameters, +} from './common'; export const createLogEntryDatasetsQuery = ( logEntryAnalysisJobId: string, @@ -21,21 +25,9 @@ export const createLogEntryDatasetsQuery = ( query: { bool: { filter: [ - { - range: { - timestamp: { - gte: startTime, - lt: endTime, - }, - }, - }, - { - term: { - result_type: { - value: 'model_plot', - }, - }, - }, + ...createJobIdFilters(logEntryAnalysisJobId), + ...createTimeRangeFilters(startTime, endTime), + ...createResultTypeFilters(['model_plot']), ], }, }, @@ -58,7 +50,6 @@ export const createLogEntryDatasetsQuery = ( }, }, }, - index: getMlResultIndex(logEntryAnalysisJobId), size: 0, }); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts index def7caf578b9..269850e29263 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts @@ -5,8 +5,12 @@ */ import * as rt from 'io-ts'; - -import { defaultRequestParameters, getMlResultIndex } from './common'; +import { + createJobIdFilters, + createResultTypeFilters, + createTimeRangeFilters, + defaultRequestParameters, +} from './common'; export const createLogEntryRateQuery = ( logRateJobId: string, @@ -21,19 +25,9 @@ export const createLogEntryRateQuery = ( query: { bool: { filter: [ - { - range: { - timestamp: { - gte: startTime, - lt: endTime, - }, - }, - }, - { - terms: { - result_type: ['model_plot', 'record'], - }, - }, + ...createJobIdFilters(logRateJobId), + ...createTimeRangeFilters(startTime, endTime), + ...createResultTypeFilters(['model_plot', 'record']), { term: { detector_index: { @@ -118,7 +112,6 @@ export const createLogEntryRateQuery = ( }, }, }, - index: getMlResultIndex(logRateJobId), size: 0, }); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts index 517d31865e35..6fa715624050 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts @@ -5,13 +5,12 @@ */ import * as rt from 'io-ts'; - import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; import { + createJobIdFilters, createResultTypeFilters, createTimeRangeFilters, defaultRequestParameters, - getMlResultIndex, } from './common'; export const createTopLogEntryCategoriesQuery = ( @@ -27,6 +26,7 @@ export const createTopLogEntryCategoriesQuery = ( query: { bool: { filter: [ + ...createJobIdFilters(logEntryCategoriesJobId), ...createTimeRangeFilters(startTime, endTime), ...createDatasetsFilters(datasets), { @@ -35,7 +35,7 @@ export const createTopLogEntryCategoriesQuery = ( { bool: { filter: [ - ...createResultTypeFilters('model_plot'), + ...createResultTypeFilters(['model_plot']), { range: { actual: { @@ -48,7 +48,7 @@ export const createTopLogEntryCategoriesQuery = ( }, { bool: { - filter: createResultTypeFilters('record'), + filter: createResultTypeFilters(['record']), }, }, ], @@ -119,7 +119,6 @@ export const createTopLogEntryCategoriesQuery = ( }, }, }, - index: getMlResultIndex(logEntryCategoriesJobId), size: 0, }); diff --git a/x-pack/plugins/infra/server/plugin.ts b/x-pack/plugins/infra/server/plugin.ts index 2fd614830c05..8062c48d9861 100644 --- a/x-pack/plugins/infra/server/plugin.ts +++ b/x-pack/plugins/infra/server/plugin.ts @@ -19,7 +19,6 @@ import { InfraElasticsearchSourceStatusAdapter } from './lib/adapters/source_sta import { InfraFieldsDomain } from './lib/domains/fields_domain'; import { InfraLogEntriesDomain } from './lib/domains/log_entries_domain'; import { InfraMetricsDomain } from './lib/domains/metrics_domain'; -import { LogEntryCategoriesAnalysis, LogEntryRateAnalysis } from './lib/log_analysis'; import { InfraSnapshot } from './lib/snapshot'; import { InfraSourceStatus } from './lib/source_status'; import { InfraSources } from './lib/sources'; @@ -31,6 +30,7 @@ import { registerAlertTypes } from './lib/alerting'; import { infraSourceConfigurationSavedObjectType } from './lib/sources'; import { metricsExplorerViewSavedObjectType } from '../common/saved_objects/metrics_explorer_view'; import { inventoryViewSavedObjectType } from '../common/saved_objects/inventory_view'; +import { InfraRequestHandlerContext } from './types'; export const config = { schema: schema.object({ @@ -106,8 +106,6 @@ export class InfraServerPlugin { } ); const snapshot = new InfraSnapshot(); - const logEntryCategoriesAnalysis = new LogEntryCategoriesAnalysis({ framework }); - const logEntryRateAnalysis = new LogEntryRateAnalysis({ framework }); // register saved object types core.savedObjects.registerType(infraSourceConfigurationSavedObjectType); @@ -115,6 +113,8 @@ export class InfraServerPlugin { core.savedObjects.registerType(inventoryViewSavedObjectType); // TODO: separate these out individually and do away with "domains" as a temporary group + // and make them available via the request context so we can do away with + // the wrapper classes const domainLibs: InfraDomainLibs = { fields: new InfraFieldsDomain(new FrameworkFieldsAdapter(framework), { sources, @@ -129,8 +129,6 @@ export class InfraServerPlugin { this.libs = { configuration: this.config, framework, - logEntryCategoriesAnalysis, - logEntryRateAnalysis, snapshot, sources, sourceStatus, @@ -151,6 +149,25 @@ export class InfraServerPlugin { initInfraServer(this.libs); registerAlertTypes(plugins.alerts, this.libs); + core.http.registerRouteHandlerContext( + 'infra', + (context, request): InfraRequestHandlerContext => { + const mlSystem = + context.ml && + plugins.ml?.mlSystemProvider(context.ml?.mlClient.callAsCurrentUser, request); + const mlAnomalyDetectors = + context.ml && + plugins.ml?.anomalyDetectorsProvider(context.ml?.mlClient.callAsCurrentUser); + const spaceId = plugins.spaces?.spacesService.getSpaceId(request) || 'default'; + + return { + mlAnomalyDetectors, + mlSystem, + spaceId, + }; + } + ); + // Telemetry UsageCollector.registerUsageCollector(plugins.usageCollection); diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_categories.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_categories.ts index d335774c85f3..f9f31f28dffe 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_categories.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_categories.ts @@ -5,36 +5,29 @@ */ import Boom from 'boom'; - -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { schema } from '@kbn/config-schema'; -import { InfraBackendLibs } from '../../../lib/infra_types'; import { - LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, getLogEntryCategoriesRequestPayloadRT, getLogEntryCategoriesSuccessReponsePayloadRT, + LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, } from '../../../../common/http_api/log_analysis'; -import { throwErrors } from '../../../../common/runtime_types'; -import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis'; - -const anyObject = schema.object({}, { unknowns: 'allow' }); +import { createValidationFunction } from '../../../../common/runtime_types'; +import type { InfraBackendLibs } from '../../../lib/infra_types'; +import { + getTopLogEntryCategories, + NoLogAnalysisResultsIndexError, +} from '../../../lib/log_analysis'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; -export const initGetLogEntryCategoriesRoute = ({ - framework, - logEntryCategoriesAnalysis, -}: InfraBackendLibs) => { +export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs) => { framework.registerRoute( { method: 'post', path: LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, validate: { - // short-circuit forced @kbn/config-schema validation so we can do io-ts validation - body: anyObject, + body: createValidationFunction(getLogEntryCategoriesRequestPayloadRT), }, }, - async (requestContext, request, response) => { + framework.router.handleLegacyErrors(async (requestContext, request, response) => { const { data: { categoryCount, @@ -43,18 +36,13 @@ export const initGetLogEntryCategoriesRoute = ({ timeRange: { startTime, endTime }, datasets, }, - } = pipe( - getLogEntryCategoriesRequestPayloadRT.decode(request.body), - fold(throwErrors(Boom.badRequest), identity) - ); + } = request.body; try { - const { - data: topLogEntryCategories, - timing, - } = await logEntryCategoriesAnalysis.getTopLogEntryCategories( + assertHasInfraMlPlugins(requestContext); + + const { data: topLogEntryCategories, timing } = await getTopLogEntryCategories( requestContext, - request, sourceId, startTime, endTime, @@ -76,18 +64,22 @@ export const initGetLogEntryCategoriesRoute = ({ timing, }), }); - } catch (e) { - const { statusCode = 500, message = 'Unknown error occurred' } = e; + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } - if (e instanceof NoLogAnalysisResultsIndexError) { - return response.notFound({ body: { message } }); + if (error instanceof NoLogAnalysisResultsIndexError) { + return response.notFound({ body: { message: error.message } }); } return response.customError({ - statusCode, - body: { message }, + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, }); } - } + }) ); }; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_datasets.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_datasets.ts index 730e32dee2fb..69b1e942464f 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_datasets.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_datasets.ts @@ -4,54 +4,42 @@ * you may not use this file except in compliance with the Elastic License. */ -import { schema } from '@kbn/config-schema'; import Boom from 'boom'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; - import { getLogEntryCategoryDatasetsRequestPayloadRT, getLogEntryCategoryDatasetsSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, } from '../../../../common/http_api/log_analysis'; -import { throwErrors } from '../../../../common/runtime_types'; -import { InfraBackendLibs } from '../../../lib/infra_types'; -import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis'; - -const anyObject = schema.object({}, { unknowns: 'allow' }); +import { createValidationFunction } from '../../../../common/runtime_types'; +import type { InfraBackendLibs } from '../../../lib/infra_types'; +import { + getLogEntryCategoryDatasets, + NoLogAnalysisResultsIndexError, +} from '../../../lib/log_analysis'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; -export const initGetLogEntryCategoryDatasetsRoute = ({ - framework, - logEntryCategoriesAnalysis, -}: InfraBackendLibs) => { +export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackendLibs) => { framework.registerRoute( { method: 'post', path: LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, validate: { - // short-circuit forced @kbn/config-schema validation so we can do io-ts validation - body: anyObject, + body: createValidationFunction(getLogEntryCategoryDatasetsRequestPayloadRT), }, }, - async (requestContext, request, response) => { + framework.router.handleLegacyErrors(async (requestContext, request, response) => { const { data: { sourceId, timeRange: { startTime, endTime }, }, - } = pipe( - getLogEntryCategoryDatasetsRequestPayloadRT.decode(request.body), - fold(throwErrors(Boom.badRequest), identity) - ); + } = request.body; try { - const { - data: logEntryCategoryDatasets, - timing, - } = await logEntryCategoriesAnalysis.getLogEntryCategoryDatasets( + assertHasInfraMlPlugins(requestContext); + + const { data: logEntryCategoryDatasets, timing } = await getLogEntryCategoryDatasets( requestContext, - request, sourceId, startTime, endTime @@ -65,18 +53,22 @@ export const initGetLogEntryCategoryDatasetsRoute = ({ timing, }), }); - } catch (e) { - const { statusCode = 500, message = 'Unknown error occurred' } = e; + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } - if (e instanceof NoLogAnalysisResultsIndexError) { - return response.notFound({ body: { message } }); + if (error instanceof NoLogAnalysisResultsIndexError) { + return response.notFound({ body: { message: error.message } }); } return response.customError({ - statusCode, - body: { message }, + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, }); } - } + }) ); }; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_examples.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_examples.ts index 44f466cc77c8..217180c0290f 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_examples.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_category_examples.ts @@ -4,37 +4,30 @@ * you may not use this file except in compliance with the Elastic License. */ -import { schema } from '@kbn/config-schema'; import Boom from 'boom'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; - import { getLogEntryCategoryExamplesRequestPayloadRT, getLogEntryCategoryExamplesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, } from '../../../../common/http_api/log_analysis'; -import { throwErrors } from '../../../../common/runtime_types'; -import { InfraBackendLibs } from '../../../lib/infra_types'; -import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis'; - -const anyObject = schema.object({}, { unknowns: 'allow' }); +import { createValidationFunction } from '../../../../common/runtime_types'; +import type { InfraBackendLibs } from '../../../lib/infra_types'; +import { + getLogEntryCategoryExamples, + NoLogAnalysisResultsIndexError, +} from '../../../lib/log_analysis'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; -export const initGetLogEntryCategoryExamplesRoute = ({ - framework, - logEntryCategoriesAnalysis, -}: InfraBackendLibs) => { +export const initGetLogEntryCategoryExamplesRoute = ({ framework }: InfraBackendLibs) => { framework.registerRoute( { method: 'post', path: LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, validate: { - // short-circuit forced @kbn/config-schema validation so we can do io-ts validation - body: anyObject, + body: createValidationFunction(getLogEntryCategoryExamplesRequestPayloadRT), }, }, - async (requestContext, request, response) => { + framework.router.handleLegacyErrors(async (requestContext, request, response) => { const { data: { categoryId, @@ -42,18 +35,13 @@ export const initGetLogEntryCategoryExamplesRoute = ({ sourceId, timeRange: { startTime, endTime }, }, - } = pipe( - getLogEntryCategoryExamplesRequestPayloadRT.decode(request.body), - fold(throwErrors(Boom.badRequest), identity) - ); + } = request.body; try { - const { - data: logEntryCategoryExamples, - timing, - } = await logEntryCategoriesAnalysis.getLogEntryCategoryExamples( + assertHasInfraMlPlugins(requestContext); + + const { data: logEntryCategoryExamples, timing } = await getLogEntryCategoryExamples( requestContext, - request, sourceId, startTime, endTime, @@ -69,18 +57,22 @@ export const initGetLogEntryCategoryExamplesRoute = ({ timing, }), }); - } catch (e) { - const { statusCode = 500, message = 'Unknown error occurred' } = e; + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } - if (e instanceof NoLogAnalysisResultsIndexError) { - return response.notFound({ body: { message } }); + if (error instanceof NoLogAnalysisResultsIndexError) { + return response.notFound({ body: { message: error.message } }); } return response.customError({ - statusCode, - body: { message }, + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, }); } - } + }) ); }; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts index 38dc0a790a7a..ae86102980c1 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts @@ -5,11 +5,6 @@ */ import Boom from 'boom'; - -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { schema } from '@kbn/config-schema'; import { InfraBackendLibs } from '../../../lib/infra_types'; import { LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, @@ -17,57 +12,61 @@ import { getLogEntryRateSuccessReponsePayloadRT, GetLogEntryRateSuccessResponsePayload, } from '../../../../common/http_api/log_analysis'; -import { throwErrors } from '../../../../common/runtime_types'; -import { NoLogAnalysisResultsIndexError } from '../../../lib/log_analysis'; - -const anyObject = schema.object({}, { unknowns: 'allow' }); +import { createValidationFunction } from '../../../../common/runtime_types'; +import { NoLogAnalysisResultsIndexError, getLogEntryRateBuckets } from '../../../lib/log_analysis'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; -export const initGetLogEntryRateRoute = ({ framework, logEntryRateAnalysis }: InfraBackendLibs) => { +export const initGetLogEntryRateRoute = ({ framework }: InfraBackendLibs) => { framework.registerRoute( { method: 'post', path: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, validate: { - // short-circuit forced @kbn/config-schema validation so we can do io-ts validation - body: anyObject, + body: createValidationFunction(getLogEntryRateRequestPayloadRT), }, }, - async (requestContext, request, response) => { + framework.router.handleLegacyErrors(async (requestContext, request, response) => { + const { + data: { sourceId, timeRange, bucketDuration }, + } = request.body; + try { - const payload = pipe( - getLogEntryRateRequestPayloadRT.decode(request.body), - fold(throwErrors(Boom.badRequest), identity) - ); + assertHasInfraMlPlugins(requestContext); - const logEntryRateBuckets = await logEntryRateAnalysis.getLogEntryRateBuckets( + const logEntryRateBuckets = await getLogEntryRateBuckets( requestContext, - request, - payload.data.sourceId, - payload.data.timeRange.startTime, - payload.data.timeRange.endTime, - payload.data.bucketDuration + sourceId, + timeRange.startTime, + timeRange.endTime, + bucketDuration ); return response.ok({ body: getLogEntryRateSuccessReponsePayloadRT.encode({ data: { - bucketDuration: payload.data.bucketDuration, + bucketDuration, histogramBuckets: logEntryRateBuckets, totalNumberOfLogEntries: getTotalNumberOfLogEntries(logEntryRateBuckets), }, }), }); - } catch (e) { - const { statusCode = 500, message = 'Unknown error occurred' } = e; - if (e instanceof NoLogAnalysisResultsIndexError) { - return response.notFound({ body: { message } }); + } catch (error) { + if (Boom.isBoom(error)) { + throw error; } + + if (error instanceof NoLogAnalysisResultsIndexError) { + return response.notFound({ body: { message: error.message } }); + } + return response.customError({ - statusCode, - body: { message }, + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, }); } - } + }) ); }; diff --git a/x-pack/plugins/infra/server/types.ts b/x-pack/plugins/infra/server/types.ts new file mode 100644 index 000000000000..735569a790f6 --- /dev/null +++ b/x-pack/plugins/infra/server/types.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { MlPluginSetup } from '../../ml/server'; + +export type MlSystem = ReturnType; +export type MlAnomalyDetectors = ReturnType; + +export interface InfraMlRequestHandlerContext { + mlAnomalyDetectors?: MlAnomalyDetectors; + mlSystem?: MlSystem; +} + +export interface InfraSpacesRequestHandlerContext { + spaceId: string; +} + +export type InfraRequestHandlerContext = InfraMlRequestHandlerContext & + InfraSpacesRequestHandlerContext; + +declare module 'src/core/server' { + interface RequestHandlerContext { + infra?: InfraRequestHandlerContext; + } +} diff --git a/x-pack/plugins/infra/server/utils/request_context.ts b/x-pack/plugins/infra/server/utils/request_context.ts new file mode 100644 index 000000000000..30855d74d9e3 --- /dev/null +++ b/x-pack/plugins/infra/server/utils/request_context.ts @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/* eslint-disable max-classes-per-file */ + +import { InfraMlRequestHandlerContext, InfraRequestHandlerContext } from '../types'; + +export class MissingContextValuesError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export class NoMlPluginError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export function assertHasInfraPlugins( + context: Context +): asserts context is Context & { infra: Context['infra'] } { + if (context.infra == null) { + throw new MissingContextValuesError('Failed to access "infra" context values.'); + } +} + +export function assertHasInfraMlPlugins( + context: Context +): asserts context is Context & { + infra: Context['infra'] & Required; +} { + assertHasInfraPlugins(context); + + if (context.infra?.mlAnomalyDetectors == null || context.infra?.mlSystem == null) { + throw new NoMlPluginError('Failed to access ML plugin.'); + } +} diff --git a/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json b/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json index ea61d9714579..d17b4115e64a 100644 --- a/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json +++ b/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json @@ -1712,6 +1712,198 @@ }, "success": true } + }, + "required-package": { + "value": { + "response": { + "format_version": "1.0.0", + "name": "endpoint", + "title": "Elastic Endpoint", + "version": "0.3.0", + "readme": "/package/endpoint/0.3.0/docs/README.md", + "license": "basic", + "description": "This is the Elastic Endpoint package.", + "type": "solution", + "categories": [ + "security" + ], + "release": "beta", + "requirement": { + "kibana": { + "versions": ">7.4.0" + } + }, + "icons": [ + { + "src": "/package/endpoint/0.3.0/img/logo-endpoint-64-color.svg", + "size": "16x16", + "type": "image/svg+xml" + } + ], + "assets": { + "kibana": { + "dashboard": [ + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "dashboard", + "file": "826759f0-7074-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/dashboard/826759f0-7074-11ea-9bc8-6b38f4d29a16.json" + } + ], + "map": [ + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "map", + "file": "a3a3bd10-706b-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/map/a3a3bd10-706b-11ea-9bc8-6b38f4d29a16.json" + } + ], + "visualization": [ + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "visualization", + "file": "1cfceda0-728b-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/visualization/1cfceda0-728b-11ea-9bc8-6b38f4d29a16.json" + }, + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "visualization", + "file": "1e525190-7074-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/visualization/1e525190-7074-11ea-9bc8-6b38f4d29a16.json" + }, + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "visualization", + "file": "55387750-729c-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/visualization/55387750-729c-11ea-9bc8-6b38f4d29a16.json" + }, + { + "pkgkey": "endpoint-0.3.0", + "service": "kibana", + "type": "visualization", + "file": "92b1edc0-706a-11ea-9bc8-6b38f4d29a16.json", + "path": "endpoint-0.3.0/kibana/visualization/92b1edc0-706a-11ea-9bc8-6b38f4d29a16.json" + } + ] + } + }, + "datasets": [ + { + "id": "endpoint", + "title": "Endpoint Events", + "release": "experimental", + "type": "events", + "package": "endpoint", + "path": "events" + }, + { + "id": "endpoint.metadata", + "title": "Endpoint Metadata", + "release": "experimental", + "type": "metrics", + "package": "endpoint", + "path": "metadata" + }, + { + "id": "endpoint.policy", + "title": "Endpoint Policy Response", + "release": "experimental", + "type": "metrics", + "package": "endpoint", + "path": "policy" + }, + { + "id": "endpoint.telemetry", + "title": "Endpoint Telemetry", + "release": "experimental", + "type": "metrics", + "package": "endpoint", + "path": "telemetry" + } + ], + "datasources": [ + { + "name": "endpoint", + "title": "Endpoint data source", + "description": "Interact with the endpoint.", + "inputs": null, + "multiple": false + } + ], + "download": "/epr/endpoint/endpoint-0.3.0.tar.gz", + "path": "/package/endpoint/0.3.0", + "latestVersion": "0.3.0", + "removable": false, + "status": "installed", + "savedObject": { + "id": "endpoint", + "type": "epm-packages", + "updated_at": "2020-06-23T21:44:59.319Z", + "version": "Wzk4LDFd", + "attributes": { + "installed": [ + { + "id": "826759f0-7074-11ea-9bc8-6b38f4d29a16", + "type": "dashboard" + }, + { + "id": "1cfceda0-728b-11ea-9bc8-6b38f4d29a16", + "type": "visualization" + }, + { + "id": "1e525190-7074-11ea-9bc8-6b38f4d29a16", + "type": "visualization" + }, + { + "id": "55387750-729c-11ea-9bc8-6b38f4d29a16", + "type": "visualization" + }, + { + "id": "92b1edc0-706a-11ea-9bc8-6b38f4d29a16", + "type": "visualization" + }, + { + "id": "a3a3bd10-706b-11ea-9bc8-6b38f4d29a16", + "type": "map" + }, + { + "id": "events-endpoint", + "type": "index-template" + }, + { + "id": "metrics-endpoint.metadata", + "type": "index-template" + }, + { + "id": "metrics-endpoint.policy", + "type": "index-template" + }, + { + "id": "metrics-endpoint.telemetry", + "type": "index-template" + } + ], + "es_index_patterns": { + "events": "events-endpoint-*", + "metadata": "metrics-endpoint.metadata-*", + "policy": "metrics-endpoint.policy-*", + "telemetry": "metrics-endpoint.telemetry-*" + }, + "name": "endpoint", + "version": "0.3.0", + "internal": false, + "removable": false + }, + "references": [] + } + }, + "success": true + } } } } @@ -3822,6 +4014,9 @@ }, "path": { "type": "string" + }, + "removable": { + "type": "boolean" } }, "required": [ diff --git a/x-pack/plugins/ingest_manager/common/types/models/epm.ts b/x-pack/plugins/ingest_manager/common/types/models/epm.ts index cc9e23dc9388..599165d2bfd9 100644 --- a/x-pack/plugins/ingest_manager/common/types/models/epm.ts +++ b/x-pack/plugins/ingest_manager/common/types/models/epm.ts @@ -58,7 +58,6 @@ export interface RegistryPackage { icons?: RegistryImage[]; assets?: string[]; internal?: boolean; - removable?: boolean; format_version: string; datasets?: Dataset[]; datasources?: RegistryDatasource[]; @@ -206,6 +205,7 @@ interface PackageAdditions { title: string; latestVersion: string; assets: AssetsGroupedByServiceByType; + removable?: boolean; } // Managers public HTTP response types diff --git a/x-pack/plugins/ingest_manager/public/index.ts b/x-pack/plugins/ingest_manager/public/index.ts index 9f4893ac6e49..ac56349b30c1 100644 --- a/x-pack/plugins/ingest_manager/public/index.ts +++ b/x-pack/plugins/ingest_manager/public/index.ts @@ -6,7 +6,7 @@ import { PluginInitializerContext } from 'src/core/public'; import { IngestManagerPlugin } from './plugin'; -export { IngestManagerStart } from './plugin'; +export { IngestManagerSetup, IngestManagerStart } from './plugin'; export const plugin = (initializerContext: PluginInitializerContext) => { return new IngestManagerPlugin(initializerContext); diff --git a/x-pack/plugins/ingest_manager/public/plugin.ts b/x-pack/plugins/ingest_manager/public/plugin.ts index 1cd70f70faa3..4a10a26151e7 100644 --- a/x-pack/plugins/ingest_manager/public/plugin.ts +++ b/x-pack/plugins/ingest_manager/public/plugin.ts @@ -22,7 +22,11 @@ import { registerDatasource } from './applications/ingest_manager/sections/agent export { IngestManagerConfigType } from '../common/types'; -export type IngestManagerSetup = void; +// We need to provide an object instead of void so that dependent plugins know when Ingest Manager +// is disabled. +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IngestManagerSetup {} + /** * Describes public IngestManager plugin contract returned at the `start` stage. */ @@ -72,6 +76,8 @@ export class IngestManagerPlugin }; }, }); + + return {}; } public async start(core: CoreStart): Promise { diff --git a/x-pack/plugins/ingest_manager/server/index.ts b/x-pack/plugins/ingest_manager/server/index.ts index f6b2d7ccc6d4..1e9011c9dfe4 100644 --- a/x-pack/plugins/ingest_manager/server/index.ts +++ b/x-pack/plugins/ingest_manager/server/index.ts @@ -11,6 +11,7 @@ export { IngestManagerSetupContract, IngestManagerSetupDeps, IngestManagerStartContract, + ExternalCallback, } from './plugin'; export const config = { @@ -42,6 +43,8 @@ export const config = { export type IngestManagerConfigType = TypeOf; +export { DatasourceServiceInterface } from './services/datasource'; + export const plugin = (initializerContext: PluginInitializerContext) => { return new IngestManagerPlugin(initializerContext); }; diff --git a/x-pack/plugins/ingest_manager/server/mocks.ts b/x-pack/plugins/ingest_manager/server/mocks.ts new file mode 100644 index 000000000000..3bdef14dc85a --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/mocks.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { loggingSystemMock, savedObjectsServiceMock } from 'src/core/server/mocks'; +import { IngestManagerAppContext } from './plugin'; +import { encryptedSavedObjectsMock } from '../../encrypted_saved_objects/server/mocks'; +import { securityMock } from '../../security/server/mocks'; +import { DatasourceServiceInterface } from './services/datasource'; + +export const createAppContextStartContractMock = (): IngestManagerAppContext => { + return { + encryptedSavedObjectsStart: encryptedSavedObjectsMock.createStart(), + savedObjects: savedObjectsServiceMock.createStartContract(), + security: securityMock.createSetup(), + logger: loggingSystemMock.create().get(), + isProductionMode: true, + kibanaVersion: '8.0.0', + }; +}; + +export const createDatasourceServiceMock = () => { + return { + assignPackageStream: jest.fn(), + buildDatasourceFromPackage: jest.fn(), + bulkCreate: jest.fn(), + create: jest.fn(), + delete: jest.fn(), + get: jest.fn(), + getByIDs: jest.fn(), + list: jest.fn(), + update: jest.fn(), + } as jest.Mocked; +}; diff --git a/x-pack/plugins/ingest_manager/server/plugin.ts b/x-pack/plugins/ingest_manager/server/plugin.ts index fb1c218e1545..fcdb6387fed3 100644 --- a/x-pack/plugins/ingest_manager/server/plugin.ts +++ b/x-pack/plugins/ingest_manager/server/plugin.ts @@ -45,15 +45,16 @@ import { registerSettingsRoutes, registerAppRoutes, } from './routes'; -import { IngestManagerConfigType } from '../common'; +import { IngestManagerConfigType, NewDatasource } from '../common'; import { appContextService, licenseService, ESIndexPatternSavedObjectService, ESIndexPatternService, AgentService, + datasourceService, } from './services'; -import { getAgentStatusById } from './services/agents'; +import { getAgentStatusById, authenticateAgentWithAccessToken } from './services/agents'; import { CloudSetup } from '../../cloud/server'; import { agentCheckinState } from './services/agents/checkin/state'; @@ -92,12 +93,31 @@ const allSavedObjectTypes = [ ENROLLMENT_API_KEYS_SAVED_OBJECT_TYPE, ]; +/** + * Callbacks supported by the Ingest plugin + */ +export type ExternalCallback = [ + 'datasourceCreate', + (newDatasource: NewDatasource) => Promise +]; + +export type ExternalCallbacksStorage = Map>; + /** * Describes public IngestManager plugin contract returned at the `startup` stage. */ export interface IngestManagerStartContract { esIndexPatternService: ESIndexPatternService; agentService: AgentService; + /** + * Services for Ingest's Datasources + */ + datasourceService: typeof datasourceService; + /** + * Register callbacks for inclusion in ingest API processing + * @param args + */ + registerExternalCallback: (...args: ExternalCallback) => void; } export class IngestManagerPlugin @@ -236,6 +256,11 @@ export class IngestManagerPlugin esIndexPatternService: new ESIndexPatternSavedObjectService(), agentService: { getAgentStatusById, + authenticateAgentWithAccessToken, + }, + datasourceService, + registerExternalCallback: (...args: ExternalCallback) => { + return appContextService.addExternalCallback(...args); }, }; } diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.test.ts b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.test.ts index 84923d5c3366..aaed189ae3dd 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.test.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.test.ts @@ -77,7 +77,7 @@ describe('test acks handlers', () => { id: 'action1', }, ]), - getAgentByAccessAPIKeyId: jest.fn().mockReturnValueOnce({ + authenticateAgentWithAccessToken: jest.fn().mockReturnValueOnce({ id: 'agent', }), getSavedObjectsClientContract: jest.fn().mockReturnValueOnce(mockSavedObjectsClient), diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts index 83d894295c31..0b719d8a67df 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts @@ -9,7 +9,6 @@ import { RequestHandler } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { PostAgentAcksRequestSchema } from '../../types/rest_spec'; -import * as APIKeyService from '../../services/api_keys'; import { AcksService } from '../../services/agents'; import { AgentEvent } from '../../../common/types/models'; import { PostAgentAcksResponse } from '../../../common/types/rest_spec'; @@ -24,8 +23,7 @@ export const postAgentAcksHandlerBuilder = function ( return async (context, request, response) => { try { const soClient = ackService.getSavedObjectsClientContract(request); - const res = APIKeyService.parseApiKeyFromHeaders(request.headers); - const agent = await ackService.getAgentByAccessAPIKeyId(soClient, res.apiKeyId as string); + const agent = await ackService.authenticateAgentWithAccessToken(soClient, request); const agentEvents = request.body.events as AgentEvent[]; // validate that all events are for the authorized agent obtained from the api key diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts index 0d1c77b8d697..d31498599a2b 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts @@ -171,8 +171,7 @@ export const postAgentCheckinHandler: RequestHandler< > = async (context, request, response) => { try { const soClient = appContextService.getInternalUserSOClient(request); - const res = APIKeyService.parseApiKeyFromHeaders(request.headers); - const agent = await AgentService.getAgentByAccessAPIKeyId(soClient, res.apiKeyId); + const agent = await AgentService.authenticateAgentWithAccessToken(soClient, request); const abortController = new AbortController(); request.events.aborted$.subscribe(() => { abortController.abort(); diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/index.ts b/x-pack/plugins/ingest_manager/server/routes/agent/index.ts index 87eee4622c80..eaab46c7b455 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/index.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/index.ts @@ -109,7 +109,7 @@ export const registerRoutes = (router: IRouter) => { }, postAgentAcksHandlerBuilder({ acknowledgeAgentActions: AgentService.acknowledgeAgentActions, - getAgentByAccessAPIKeyId: AgentService.getAgentByAccessAPIKeyId, + authenticateAgentWithAccessToken: AgentService.authenticateAgentWithAccessToken, getSavedObjectsClientContract: appContextService.getInternalUserSOClient.bind( appContextService ), diff --git a/x-pack/plugins/ingest_manager/server/routes/datasource/datasource_handlers.test.ts b/x-pack/plugins/ingest_manager/server/routes/datasource/datasource_handlers.test.ts new file mode 100644 index 000000000000..07cbeb8b2cec --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/routes/datasource/datasource_handlers.test.ts @@ -0,0 +1,332 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { httpServerMock, httpServiceMock } from 'src/core/server/mocks'; +import { IRouter, KibanaRequest, Logger, RequestHandler, RouteConfig } from 'kibana/server'; +import { registerRoutes } from './index'; +import { DATASOURCE_API_ROUTES } from '../../../common/constants'; +import { xpackMocks } from '../../../../../mocks'; +import { appContextService } from '../../services'; +import { createAppContextStartContractMock } from '../../mocks'; +import { DatasourceServiceInterface, ExternalCallback } from '../..'; +import { CreateDatasourceRequestSchema } from '../../types/rest_spec'; +import { datasourceService } from '../../services'; + +const datasourceServiceMock = datasourceService as jest.Mocked; + +jest.mock('../../services/datasource', (): { + datasourceService: jest.Mocked; +} => { + return { + datasourceService: { + assignPackageStream: jest.fn((packageInfo, dataInputs) => Promise.resolve(dataInputs)), + buildDatasourceFromPackage: jest.fn(), + bulkCreate: jest.fn(), + create: jest.fn((soClient, newData) => + Promise.resolve({ + ...newData, + id: '1', + revision: 1, + updated_at: new Date().toISOString(), + updated_by: 'elastic', + created_at: new Date().toISOString(), + created_by: 'elastic', + }) + ), + delete: jest.fn(), + get: jest.fn(), + getByIDs: jest.fn(), + list: jest.fn(), + update: jest.fn(), + }, + }; +}); + +jest.mock('../../services/epm/packages', () => { + return { + ensureInstalledPackage: jest.fn(() => Promise.resolve()), + getPackageInfo: jest.fn(() => Promise.resolve()), + }; +}); + +describe('When calling datasource', () => { + let routerMock: jest.Mocked; + let routeHandler: RequestHandler; + let routeConfig: RouteConfig; + let context: ReturnType; + let response: ReturnType; + + beforeAll(() => { + routerMock = httpServiceMock.createRouter(); + registerRoutes(routerMock); + }); + + beforeEach(() => { + appContextService.start(createAppContextStartContractMock()); + context = xpackMocks.createRequestHandlerContext(); + response = httpServerMock.createResponseFactory(); + }); + + afterEach(() => { + jest.clearAllMocks(); + appContextService.stop(); + }); + + describe('create api handler', () => { + const getCreateKibanaRequest = ( + newData?: typeof CreateDatasourceRequestSchema.body + ): KibanaRequest => { + return httpServerMock.createKibanaRequest< + undefined, + undefined, + typeof CreateDatasourceRequestSchema.body + >({ + path: routeConfig.path, + method: 'post', + body: newData || { + name: 'endpoint-1', + description: '', + config_id: 'a5ca00c0-b30c-11ea-9732-1bb05811278c', + enabled: true, + output_id: '', + inputs: [], + namespace: 'default', + package: { name: 'endpoint', title: 'Elastic Endpoint', version: '0.5.0' }, + }, + }); + }; + + // Set the routeConfig and routeHandler to the Create API + beforeAll(() => { + [routeConfig, routeHandler] = routerMock.post.mock.calls.find(([{ path }]) => + path.startsWith(DATASOURCE_API_ROUTES.CREATE_PATTERN) + )!; + }); + + describe('and external callbacks are registered', () => { + const callbackCallingOrder: string[] = []; + + // Callback one adds an input that includes a `config` property + const callbackOne: ExternalCallback[1] = jest.fn(async (ds) => { + callbackCallingOrder.push('one'); + const newDs = { + ...ds, + inputs: [ + { + type: 'endpoint', + enabled: true, + streams: [], + config: { + one: { + value: 'inserted by callbackOne', + }, + }, + }, + ], + }; + return newDs; + }); + + // Callback two adds an additional `input[0].config` property + const callbackTwo: ExternalCallback[1] = jest.fn(async (ds) => { + callbackCallingOrder.push('two'); + const newDs = { + ...ds, + inputs: [ + { + ...ds.inputs[0], + config: { + ...ds.inputs[0].config, + two: { + value: 'inserted by callbackTwo', + }, + }, + }, + ], + }; + return newDs; + }); + + beforeEach(() => { + appContextService.addExternalCallback('datasourceCreate', callbackOne); + appContextService.addExternalCallback('datasourceCreate', callbackTwo); + }); + + afterEach(() => (callbackCallingOrder.length = 0)); + + it('should call external callbacks in expected order', async () => { + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(callbackCallingOrder).toEqual(['one', 'two']); + }); + + it('should feed datasource returned by last callback', async () => { + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(callbackOne).toHaveBeenCalledWith({ + config_id: 'a5ca00c0-b30c-11ea-9732-1bb05811278c', + description: '', + enabled: true, + inputs: [], + name: 'endpoint-1', + namespace: 'default', + output_id: '', + package: { + name: 'endpoint', + title: 'Elastic Endpoint', + version: '0.5.0', + }, + }); + expect(callbackTwo).toHaveBeenCalledWith({ + config_id: 'a5ca00c0-b30c-11ea-9732-1bb05811278c', + description: '', + enabled: true, + inputs: [ + { + type: 'endpoint', + enabled: true, + streams: [], + config: { + one: { + value: 'inserted by callbackOne', + }, + }, + }, + ], + name: 'endpoint-1', + namespace: 'default', + output_id: '', + package: { + name: 'endpoint', + title: 'Elastic Endpoint', + version: '0.5.0', + }, + }); + }); + + it('should create with data from callback', async () => { + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(datasourceServiceMock.create.mock.calls[0][1]).toEqual({ + config_id: 'a5ca00c0-b30c-11ea-9732-1bb05811278c', + description: '', + enabled: true, + inputs: [ + { + config: { + one: { + value: 'inserted by callbackOne', + }, + two: { + value: 'inserted by callbackTwo', + }, + }, + enabled: true, + streams: [], + type: 'endpoint', + }, + ], + name: 'endpoint-1', + namespace: 'default', + output_id: '', + package: { + name: 'endpoint', + title: 'Elastic Endpoint', + version: '0.5.0', + }, + }); + }); + + describe('and a callback throws an exception', () => { + const callbackThree: ExternalCallback[1] = jest.fn(async (ds) => { + callbackCallingOrder.push('three'); + throw new Error('callbackThree threw error on purpose'); + }); + + const callbackFour: ExternalCallback[1] = jest.fn(async (ds) => { + callbackCallingOrder.push('four'); + return { + ...ds, + inputs: [ + { + ...ds.inputs[0], + config: { + ...ds.inputs[0].config, + four: { + value: 'inserted by callbackFour', + }, + }, + }, + ], + }; + }); + + beforeEach(() => { + appContextService.addExternalCallback('datasourceCreate', callbackThree); + appContextService.addExternalCallback('datasourceCreate', callbackFour); + }); + + it('should skip over callback exceptions and still execute other callbacks', async () => { + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(callbackCallingOrder).toEqual(['one', 'two', 'three', 'four']); + }); + + it('should log errors', async () => { + const errorLogger = (appContextService.getLogger() as jest.Mocked).error; + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(errorLogger.mock.calls).toEqual([ + ['An external registered [datasourceCreate] callback failed when executed'], + [new Error('callbackThree threw error on purpose')], + ]); + }); + + it('should create datasource with last successful returned datasource', async () => { + const request = getCreateKibanaRequest(); + await routeHandler(context, request, response); + expect(response.ok).toHaveBeenCalled(); + expect(datasourceServiceMock.create.mock.calls[0][1]).toEqual({ + config_id: 'a5ca00c0-b30c-11ea-9732-1bb05811278c', + description: '', + enabled: true, + inputs: [ + { + config: { + one: { + value: 'inserted by callbackOne', + }, + two: { + value: 'inserted by callbackTwo', + }, + four: { + value: 'inserted by callbackFour', + }, + }, + enabled: true, + streams: [], + type: 'endpoint', + }, + ], + name: 'endpoint-1', + namespace: 'default', + output_id: '', + package: { + name: 'endpoint', + title: 'Elastic Endpoint', + version: '0.5.0', + }, + }); + }); + }); + }); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/routes/datasource/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/datasource/handlers.ts index 09daec337040..4f83d24a846e 100644 --- a/x-pack/plugins/ingest_manager/server/routes/datasource/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/datasource/handlers.ts @@ -14,6 +14,7 @@ import { CreateDatasourceRequestSchema, UpdateDatasourceRequestSchema, DeleteDatasourcesRequestSchema, + NewDatasource, } from '../../types'; import { CreateDatasourceResponse, DeleteDatasourcesResponse } from '../../../common'; @@ -76,23 +77,50 @@ export const createDatasourceHandler: RequestHandler< const soClient = context.core.savedObjects.client; const callCluster = context.core.elasticsearch.legacy.client.callAsCurrentUser; const user = (await appContextService.getSecurity()?.authc.getCurrentUser(request)) || undefined; - const newData = { ...request.body }; + const logger = appContextService.getLogger(); + let newData = { ...request.body }; try { + // If we have external callbacks, then process those now before creating the actual datasource + const externalCallbacks = appContextService.getExternalCallbacks('datasourceCreate'); + if (externalCallbacks && externalCallbacks.size > 0) { + let updatedNewData: NewDatasource = newData; + + for (const callback of externalCallbacks) { + try { + // ensure that the returned value by the callback passes schema validation + updatedNewData = CreateDatasourceRequestSchema.body.validate( + await callback(updatedNewData) + ); + } catch (error) { + // Log the error, but keep going and process the other callbacks + logger.error('An external registered [datasourceCreate] callback failed when executed'); + logger.error(error); + } + } + + // The type `NewDatasource` and the `DatasourceBaseSchema` are incompatible. + // `NewDatasrouce` defines `namespace` as optional string, which means that `undefined` is a + // valid value, however, the schema defines it as string with a minimum length of 1. + // Here, we need to cast the value back to the schema type and ignore the TS error. + // @ts-ignore + newData = updatedNewData as typeof CreateDatasourceRequestSchema.body; + } + // Make sure the datasource package is installed - if (request.body.package?.name) { + if (newData.package?.name) { await ensureInstalledPackage({ savedObjectsClient: soClient, - pkgName: request.body.package.name, + pkgName: newData.package.name, callCluster, }); const pkgInfo = await getPackageInfo({ savedObjectsClient: soClient, - pkgName: request.body.package.name, - pkgVersion: request.body.package.version, + pkgName: newData.package.name, + pkgVersion: newData.package.version, }); newData.inputs = (await datasourceService.assignPackageStream( pkgInfo, - request.body.inputs + newData.inputs )) as TypeOf['inputs']; } @@ -103,6 +131,7 @@ export const createDatasourceHandler: RequestHandler< body, }); } catch (e) { + logger.error(e); return response.customError({ statusCode: 500, body: { message: e.message }, diff --git a/x-pack/plugins/ingest_manager/server/services/agents/acks.ts b/x-pack/plugins/ingest_manager/server/services/agents/acks.ts index 81ba9754e8aa..a1b48a879bb8 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/acks.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/acks.ts @@ -140,9 +140,9 @@ export interface AcksService { actionIds: AgentEvent[] ) => Promise; - getAgentByAccessAPIKeyId: ( + authenticateAgentWithAccessToken: ( soClient: SavedObjectsClientContract, - accessAPIKeyId: string + request: KibanaRequest ) => Promise; getSavedObjectsClientContract: (kibanaRequest: KibanaRequest) => SavedObjectsClientContract; diff --git a/x-pack/plugins/ingest_manager/server/services/agents/authenticate.test.ts b/x-pack/plugins/ingest_manager/server/services/agents/authenticate.test.ts new file mode 100644 index 000000000000..b56ca4ca8cc1 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/agents/authenticate.test.ts @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { KibanaRequest } from 'kibana/server'; +import { savedObjectsClientMock } from 'src/core/server/mocks'; + +import { authenticateAgentWithAccessToken } from './authenticate'; + +describe('test agent autenticate services', () => { + it('should succeed with a valid API key and an active agent', async () => { + const mockSavedObjectsClient = savedObjectsClientMock.create(); + mockSavedObjectsClient.find.mockReturnValue( + Promise.resolve({ + page: 1, + per_page: 100, + total: 1, + saved_objects: [ + { + id: 'agent1', + type: 'agent', + references: [], + score: 0, + attributes: { + active: true, + access_api_key_id: 'pedTuHIBTEDt93wW0Fhr', + }, + }, + ], + }) + ); + await authenticateAgentWithAccessToken(mockSavedObjectsClient, { + auth: { isAuthenticated: true }, + headers: { + authorization: 'ApiKey cGVkVHVISUJURUR0OTN3VzBGaHI6TnU1U0JtbHJSeC12Rm9qQWpoSHlUZw==', + }, + } as KibanaRequest); + }); + + it('should throw if the request is not authenticated', async () => { + const mockSavedObjectsClient = savedObjectsClientMock.create(); + mockSavedObjectsClient.find.mockReturnValue( + Promise.resolve({ + page: 1, + per_page: 100, + total: 1, + saved_objects: [ + { + id: 'agent1', + type: 'agent', + references: [], + score: 0, + attributes: { + active: true, + access_api_key_id: 'pedTuHIBTEDt93wW0Fhr', + }, + }, + ], + }) + ); + expect( + authenticateAgentWithAccessToken(mockSavedObjectsClient, { + auth: { isAuthenticated: false }, + headers: { + authorization: 'ApiKey cGVkVHVISUJURUR0OTN3VzBGaHI6TnU1U0JtbHJSeC12Rm9qQWpoSHlUZw==', + }, + } as KibanaRequest) + ).rejects.toThrow(/Request not authenticated/); + }); + + it('should throw if the ApiKey headers is malformed', async () => { + const mockSavedObjectsClient = savedObjectsClientMock.create(); + mockSavedObjectsClient.find.mockReturnValue( + Promise.resolve({ + page: 1, + per_page: 100, + total: 1, + saved_objects: [ + { + id: 'agent1', + type: 'agent', + references: [], + score: 0, + attributes: { + active: false, + access_api_key_id: 'pedTuHIBTEDt93wW0Fhr', + }, + }, + ], + }) + ); + expect( + authenticateAgentWithAccessToken(mockSavedObjectsClient, { + auth: { isAuthenticated: true }, + headers: { + authorization: 'aaaa', + }, + } as KibanaRequest) + ).rejects.toThrow(/Authorization header is malformed/); + }); + + it('should throw if the agent is not active', async () => { + const mockSavedObjectsClient = savedObjectsClientMock.create(); + mockSavedObjectsClient.find.mockReturnValue( + Promise.resolve({ + page: 1, + per_page: 100, + total: 1, + saved_objects: [ + { + id: 'agent1', + type: 'agent', + references: [], + score: 0, + attributes: { + active: false, + access_api_key_id: 'pedTuHIBTEDt93wW0Fhr', + }, + }, + ], + }) + ); + expect( + authenticateAgentWithAccessToken(mockSavedObjectsClient, { + auth: { isAuthenticated: true }, + headers: { + authorization: 'ApiKey cGVkVHVISUJURUR0OTN3VzBGaHI6TnU1U0JtbHJSeC12Rm9qQWpoSHlUZw==', + }, + } as KibanaRequest) + ).rejects.toThrow(/Agent inactive/); + }); + + it('should throw if there is no agent matching the API key', async () => { + const mockSavedObjectsClient = savedObjectsClientMock.create(); + mockSavedObjectsClient.find.mockReturnValue( + Promise.resolve({ + page: 1, + per_page: 100, + total: 1, + saved_objects: [], + }) + ); + expect( + authenticateAgentWithAccessToken(mockSavedObjectsClient, { + auth: { isAuthenticated: true }, + headers: { + authorization: 'ApiKey cGVkVHVISUJURUR0OTN3VzBGaHI6TnU1U0JtbHJSeC12Rm9qQWpoSHlUZw==', + }, + } as KibanaRequest) + ).rejects.toThrow(/Agent not found/); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/services/agents/authenticate.ts b/x-pack/plugins/ingest_manager/server/services/agents/authenticate.ts new file mode 100644 index 000000000000..2515a02da4e7 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/agents/authenticate.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; +import { KibanaRequest, SavedObjectsClientContract } from 'src/core/server'; +import { Agent } from '../../types'; +import * as APIKeyService from '../api_keys'; +import { getAgentByAccessAPIKeyId } from './crud'; + +export async function authenticateAgentWithAccessToken( + soClient: SavedObjectsClientContract, + request: KibanaRequest +): Promise { + if (!request.auth.isAuthenticated) { + throw Boom.unauthorized('Request not authenticated'); + } + let res: { apiKey: string; apiKeyId: string }; + try { + res = APIKeyService.parseApiKeyFromHeaders(request.headers); + } catch (err) { + throw Boom.unauthorized(err.message); + } + + const agent = await getAgentByAccessAPIKeyId(soClient, res.apiKeyId); + + return agent; +} diff --git a/x-pack/plugins/ingest_manager/server/services/agents/index.ts b/x-pack/plugins/ingest_manager/server/services/agents/index.ts index 257091af0ebd..400c099af4e9 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/index.ts @@ -14,3 +14,4 @@ export * from './crud'; export * from './update'; export * from './actions'; export * from './reassign'; +export * from './authenticate'; diff --git a/x-pack/plugins/ingest_manager/server/services/app_context.ts b/x-pack/plugins/ingest_manager/server/services/app_context.ts index 5ed6f7c5e54d..4d109b73d12d 100644 --- a/x-pack/plugins/ingest_manager/server/services/app_context.ts +++ b/x-pack/plugins/ingest_manager/server/services/app_context.ts @@ -12,7 +12,7 @@ import { } from '../../../encrypted_saved_objects/server'; import { SecurityPluginSetup } from '../../../security/server'; import { IngestManagerConfigType } from '../../common'; -import { IngestManagerAppContext } from '../plugin'; +import { ExternalCallback, ExternalCallbacksStorage, IngestManagerAppContext } from '../plugin'; import { CloudSetup } from '../../../cloud/server'; class AppContextService { @@ -27,6 +27,7 @@ class AppContextService { private cloud?: CloudSetup; private logger: Logger | undefined; private httpSetup?: HttpServiceSetup; + private externalCallbacks: ExternalCallbacksStorage = new Map(); public async start(appContext: IngestManagerAppContext) { this.encryptedSavedObjects = appContext.encryptedSavedObjectsStart?.getClient(); @@ -47,7 +48,9 @@ class AppContextService { } } - public stop() {} + public stop() { + this.externalCallbacks.clear(); + } public getEncryptedSavedObjects() { if (!this.encryptedSavedObjects) { @@ -121,6 +124,19 @@ class AppContextService { } return this.kibanaVersion; } + + public addExternalCallback(type: ExternalCallback[0], callback: ExternalCallback[1]) { + if (!this.externalCallbacks.has(type)) { + this.externalCallbacks.set(type, new Set()); + } + this.externalCallbacks.get(type)!.add(callback); + } + + public getExternalCallbacks(type: ExternalCallback[0]) { + if (this.externalCallbacks) { + return this.externalCallbacks.get(type); + } + } } export const appContextService = new AppContextService(); diff --git a/x-pack/plugins/ingest_manager/server/services/datasource.ts b/x-pack/plugins/ingest_manager/server/services/datasource.ts index 3ad94ea8191d..f3f460d2a742 100644 --- a/x-pack/plugins/ingest_manager/server/services/datasource.ts +++ b/x-pack/plugins/ingest_manager/server/services/datasource.ts @@ -307,4 +307,5 @@ async function _assignPackageStreamToStream( return { ...stream }; } +export type DatasourceServiceInterface = DatasourceService; export const datasourceService = new DatasourceService(); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts index 7d5e6d6e8838..a261eec899d7 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts @@ -8,7 +8,7 @@ import { SavedObjectsClientContract } from 'src/core/server'; import { PACKAGES_SAVED_OBJECT_TYPE } from '../../../constants'; import { Installation, InstallationStatus, PackageInfo, KibanaAssetType } from '../../../types'; import * as Registry from '../registry'; -import { createInstallableFrom } from './index'; +import { createInstallableFrom, isRequiredPackage } from './index'; export { fetchFile as getFile, SearchParams } from '../registry'; @@ -79,10 +79,7 @@ export async function getPackageInfo(options: { getInstallationObject({ savedObjectsClient, pkgName }), Registry.fetchFindLatestPackage(pkgName), Registry.getArchiveInfo(pkgName, pkgVersion), - ] as const); - // adding `as const` due to regression in TS 3.7.2 - // see https://github.com/microsoft/TypeScript/issues/34925#issuecomment-550021453 - // and https://github.com/microsoft/TypeScript/pull/33707#issuecomment-550718523 + ]); // add properties that aren't (or aren't yet) on Registry response const updated = { @@ -90,6 +87,7 @@ export async function getPackageInfo(options: { latestVersion: latestPackage.version, title: item.title || nameAsTitle(item.name), assets: Registry.groupPathsByService(assets || []), + removable: !isRequiredPackage(pkgName), }; return createInstallableFrom(updated, savedObject); } diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts index d49e0e661440..b79f9178ad6a 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts @@ -26,6 +26,16 @@ export { export { installKibanaAssets, installPackage, ensureInstalledPackage } from './install'; export { removeInstallation } from './remove'; +type RequiredPackage = 'system' | 'endpoint'; +const requiredPackages: Record = { + system: true, + endpoint: true, +}; + +export function isRequiredPackage(value: string): value is RequiredPackage { + return value in requiredPackages; +} + export class PackageNotInstalledError extends Error { constructor(pkgkey: string) { super(`${pkgkey} is not installed`); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts index 736711f9152e..910283549abd 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts @@ -19,7 +19,7 @@ import { import { installIndexPatterns } from '../kibana/index_pattern/install'; import * as Registry from '../registry'; import { getObject } from './get_objects'; -import { getInstallation, getInstallationObject } from './index'; +import { getInstallation, getInstallationObject, isRequiredPackage } from './index'; import { installTemplates } from '../elasticsearch/template/install'; import { generateESIndexPatterns } from '../elasticsearch/template/template'; import { installPipelines } from '../elasticsearch/ingest_pipeline/install'; @@ -104,7 +104,8 @@ export async function installPackage(options: { throw Boom.badRequest('Cannot install or update to an out-of-date package'); const reinstall = pkgVersion === installedPkg?.attributes.version; - const { internal = false, removable = true } = registryPackageInfo; + const removable = !isRequiredPackage(pkgName); + const { internal = false } = registryPackageInfo; // delete the previous version's installation's SO kibana assets before installing new ones // in case some assets were removed in the new version diff --git a/x-pack/plugins/lists/README.md b/x-pack/plugins/lists/README.md index cdd7813792fc..5c97107cf228 100644 --- a/x-pack/plugins/lists/README.md +++ b/x-pack/plugins/lists/README.md @@ -157,12 +157,14 @@ And you can attach exception list items like so: { "field": "actingProcess.file.signer", "operator": "included", - "match": "Elastic, N.V." + "type": "match", + "value": "Elastic, N.V." }, { "field": "event.category", "operator": "included", - "match_any": [ + "type": "match_any", + "value": [ "process", "malware" ] diff --git a/x-pack/plugins/lists/common/constants.mock.ts b/x-pack/plugins/lists/common/constants.mock.ts index 24cfe440bd7d..185de02d555b 100644 --- a/x-pack/plugins/lists/common/constants.mock.ts +++ b/x-pack/plugins/lists/common/constants.mock.ts @@ -46,10 +46,8 @@ export const EXISTS = 'exists'; export const NESTED = 'nested'; export const ENTRIES: EntriesArray = [ { - entries: [ - { field: 'some.not.nested.field', operator: 'included', type: 'match', value: 'some value' }, - ], - field: 'some.field', + entries: [{ field: 'nested.field', operator: 'included', type: 'match', value: 'some value' }], + field: 'some.parentField', type: 'nested', }, { field: 'some.not.nested.field', operator: 'included', type: 'match', value: 'some value' }, diff --git a/x-pack/plugins/lists/common/schemas/types/default_entries_array.test.ts b/x-pack/plugins/lists/common/schemas/types/default_entries_array.test.ts index 9e615528ba77..e7910be6bf4b 100644 --- a/x-pack/plugins/lists/common/schemas/types/default_entries_array.test.ts +++ b/x-pack/plugins/lists/common/schemas/types/default_entries_array.test.ts @@ -17,7 +17,8 @@ import { getEntriesArrayMock, getEntryMatchMock, getEntryNestedMock } from './en // it checks against every item in that union. Since entries consist of 5 // different entry types, it returns 5 of these. To make more readable, // extracted here. -const returnedSchemaError = `"Array<({| field: string, operator: "excluded" | "included", type: "match", value: string |} | {| field: string, operator: "excluded" | "included", type: "match_any", value: DefaultStringArray |} | {| field: string, operator: "excluded" | "included", type: "list", value: DefaultStringArray |} | {| field: string, operator: "excluded" | "included", type: "exists" |} | {| entries: Array<({| field: string, operator: "excluded" | "included", type: "match", value: string |} | {| field: string, operator: "excluded" | "included", type: "match_any", value: DefaultStringArray |} | {| field: string, operator: "excluded" | "included", type: "list", value: DefaultStringArray |} | {| field: string, operator: "excluded" | "included", type: "exists" |})>, field: string, type: "nested" |})>"`; +const returnedSchemaError = + '"Array<({| field: string, operator: "excluded" | "included", type: "match", value: string |} | {| field: string, operator: "excluded" | "included", type: "match_any", value: DefaultStringArray |} | {| field: string, list: {| id: string, type: "ip" | "keyword" |}, operator: "excluded" | "included", type: "list" |} | {| field: string, operator: "excluded" | "included", type: "exists" |} | {| entries: Array<{| field: string, operator: "excluded" | "included", type: "match", value: string |}>, field: string, type: "nested" |})>"'; describe('default_entries_array', () => { test('it should validate an empty array', () => { diff --git a/x-pack/plugins/lists/common/schemas/types/default_namespace.test.ts b/x-pack/plugins/lists/common/schemas/types/default_namespace.test.ts new file mode 100644 index 000000000000..152f85233aa1 --- /dev/null +++ b/x-pack/plugins/lists/common/schemas/types/default_namespace.test.ts @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { pipe } from 'fp-ts/lib/pipeable'; +import { left } from 'fp-ts/lib/Either'; + +import { foldLeftRight, getPaths } from '../../siem_common_deps'; + +import { DefaultNamespace } from './default_namespace'; + +describe('default_namespace', () => { + test('it should validate "single"', () => { + const payload = 'single'; + const decoded = DefaultNamespace.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should validate "agnostic"', () => { + const payload = 'agnostic'; + const decoded = DefaultNamespace.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it defaults to "single" if "undefined"', () => { + const payload = undefined; + const decoded = DefaultNamespace.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual('single'); + }); + + test('it defaults to "single" if "null"', () => { + const payload = null; + const decoded = DefaultNamespace.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual('single'); + }); + + test('it should NOT validate if not "single" or "agnostic"', () => { + const payload = 'something else'; + const decoded = DefaultNamespace.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + `Invalid value "something else" supplied to "DefaultNamespace"`, + ]); + expect(message.schema).toEqual({}); + }); +}); diff --git a/x-pack/plugins/lists/common/schemas/types/default_namespace.ts b/x-pack/plugins/lists/common/schemas/types/default_namespace.ts index c98cb8d2bba7..8f8f8d105b62 100644 --- a/x-pack/plugins/lists/common/schemas/types/default_namespace.ts +++ b/x-pack/plugins/lists/common/schemas/types/default_namespace.ts @@ -7,7 +7,7 @@ import * as t from 'io-ts'; import { Either } from 'fp-ts/lib/Either'; -const namespaceType = t.keyof({ agnostic: null, single: null }); +export const namespaceType = t.keyof({ agnostic: null, single: null }); type NamespaceType = t.TypeOf; diff --git a/x-pack/plugins/lists/common/schemas/types/entries.mock.ts b/x-pack/plugins/lists/common/schemas/types/entries.mock.ts index 1926cb09db11..8af18c970c6a 100644 --- a/x-pack/plugins/lists/common/schemas/types/entries.mock.ts +++ b/x-pack/plugins/lists/common/schemas/types/entries.mock.ts @@ -9,10 +9,12 @@ import { EXISTS, FIELD, LIST, + LIST_ID, MATCH, MATCH_ANY, NESTED, OPERATOR, + TYPE, } from '../../constants.mock'; import { @@ -40,9 +42,9 @@ export const getEntryMatchAnyMock = (): EntryMatchAny => ({ export const getEntryListMock = (): EntryList => ({ field: FIELD, + list: { id: LIST_ID, type: TYPE }, operator: OPERATOR, type: LIST, - value: [ENTRY_VALUE], }); export const getEntryExistsMock = (): EntryExists => ({ @@ -52,7 +54,7 @@ export const getEntryExistsMock = (): EntryExists => ({ }); export const getEntryNestedMock = (): EntryNested => ({ - entries: [getEntryMatchMock(), getEntryExistsMock()], + entries: [getEntryMatchMock(), getEntryMatchMock()], field: FIELD, type: NESTED, }); diff --git a/x-pack/plugins/lists/common/schemas/types/entries.test.ts b/x-pack/plugins/lists/common/schemas/types/entries.test.ts index a13d4c0347e4..01f82f12f2b2 100644 --- a/x-pack/plugins/lists/common/schemas/types/entries.test.ts +++ b/x-pack/plugins/lists/common/schemas/types/entries.test.ts @@ -251,16 +251,16 @@ describe('Entries', () => { expect(message.schema).toEqual(payload); }); - test('it should not validate when "value" is not string array', () => { - const payload: Omit & { value: string } = { + test('it should not validate when "list" is not expected value', () => { + const payload: Omit & { list: string } = { ...getEntryListMock(), - value: 'someListId', + list: 'someListId', }; const decoded = entriesList.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([ - 'Invalid value "someListId" supplied to "value"', + 'Invalid value "someListId" supplied to "list"', ]); expect(message.schema).toEqual({}); }); @@ -338,6 +338,20 @@ describe('Entries', () => { expect(message.schema).toEqual({}); }); + test('it should NOT validate when "entries" contains an entry item that is not type "match"', () => { + const payload: Omit & { + entries: EntryMatchAny[]; + } = { ...getEntryNestedMock(), entries: [getEntryMatchAnyMock()] }; + const decoded = entriesNested.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "match_any" supplied to "entries,type"', + 'Invalid value "["some host name"]" supplied to "entries,value"', + ]); + expect(message.schema).toEqual({}); + }); + test('it should strip out extra keys', () => { const payload: EntryNested & { extraKey?: string; diff --git a/x-pack/plugins/lists/common/schemas/types/entries.ts b/x-pack/plugins/lists/common/schemas/types/entries.ts index e3625dbe0833..c379f77b862c 100644 --- a/x-pack/plugins/lists/common/schemas/types/entries.ts +++ b/x-pack/plugins/lists/common/schemas/types/entries.ts @@ -8,7 +8,7 @@ import * as t from 'io-ts'; -import { operator } from '../common/schemas'; +import { operator, type } from '../common/schemas'; import { DefaultStringArray } from '../../siem_common_deps'; export const entriesMatch = t.exact( @@ -34,9 +34,9 @@ export type EntryMatchAny = t.TypeOf; export const entriesList = t.exact( t.type({ field: t.string, + list: t.exact(t.type({ id: t.string, type })), operator, type: t.keyof({ list: null }), - value: DefaultStringArray, }) ); export type EntryList = t.TypeOf; @@ -52,7 +52,7 @@ export type EntryExists = t.TypeOf; export const entriesNested = t.exact( t.type({ - entries: t.array(t.union([entriesMatch, entriesMatchAny, entriesList, entriesExists])), + entries: t.array(entriesMatch), field: t.string, type: t.keyof({ nested: null }), }) diff --git a/x-pack/plugins/lists/common/schemas/types/index.ts b/x-pack/plugins/lists/common/schemas/types/index.ts index 8e4b28b31d95..97f2b0f59a5f 100644 --- a/x-pack/plugins/lists/common/schemas/types/index.ts +++ b/x-pack/plugins/lists/common/schemas/types/index.ts @@ -5,5 +5,6 @@ */ export * from './default_comments_array'; export * from './default_entries_array'; +export * from './default_namespace'; export * from './comments'; export * from './entries'; diff --git a/x-pack/plugins/lists/server/index.ts b/x-pack/plugins/lists/server/index.ts index 33f58ba65d3c..31f22108028a 100644 --- a/x-pack/plugins/lists/server/index.ts +++ b/x-pack/plugins/lists/server/index.ts @@ -11,6 +11,7 @@ import { ListPlugin } from './plugin'; // exporting these since its required at top level in siem plugin export { ListClient } from './services/lists/list_client'; +export { ExceptionListClient } from './services/exception_lists/exception_list_client'; export { ListPluginSetup } from './types'; export const config = { schema: ConfigSchema }; diff --git a/x-pack/plugins/lists/server/saved_objects/exception_list.ts b/x-pack/plugins/lists/server/saved_objects/exception_list.ts index 10f9b1f4383f..57bc63e6f7e3 100644 --- a/x-pack/plugins/lists/server/saved_objects/exception_list.ts +++ b/x-pack/plugins/lists/server/saved_objects/exception_list.ts @@ -105,6 +105,16 @@ export const exceptionListItemMapping: SavedObjectsType['mappings'] = { field: { type: 'keyword', }, + list: { + properties: { + id: { + type: 'keyword', + }, + type: { + type: 'keyword', + }, + }, + }, operator: { type: 'keyword', }, diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json new file mode 100644 index 000000000000..e1dab72c1c7f --- /dev/null +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json @@ -0,0 +1,24 @@ +{ + "list_id": "endpoint_list", + "item_id": "endpoint_list_item_lg_val_list", + "_tags": ["endpoint", "process", "malware", "os:windows"], + "tags": ["user added string for a tag", "malware"], + "type": "simple", + "description": "This is a sample exception list item with a large value list included", + "name": "Sample Endpoint Exception List Item with large value list", + "comments": [], + "entries": [ + { + "field": "event.module", + "operator": "excluded", + "type": "match_any", + "value": ["zeek"] + }, + { + "field": "source.ip", + "operator": "excluded", + "type": "list", + "list": { "id": "list-ip", "type": "ip" } + } + ] +} diff --git a/x-pack/plugins/lists/server/scripts/lists/new/list_ip_item.json b/x-pack/plugins/lists/server/scripts/lists/new/list_ip_item.json index 1516fa5057e5..1ece2268f3cf 100644 --- a/x-pack/plugins/lists/server/scripts/lists/new/list_ip_item.json +++ b/x-pack/plugins/lists/server/scripts/lists/new/list_ip_item.json @@ -1,5 +1,5 @@ { "id": "hand_inserted_item_id", "list_id": "list-ip", - "value": "127.0.0.1" + "value": "10.4.2.140" } diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap b/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap index a9216e481776..1620e3058be6 100644 --- a/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap +++ b/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap @@ -32,17 +32,9 @@ exports[`LayerPanel is rendered 1`] = ` - - - + { - dispatch(fitToLayerExtent(layerId)); - }, updateSourceProp: (id, propName, value, newLayerType) => dispatch(updateSourceProp(id, propName, value, newLayerType)), }; diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/view.js b/x-pack/plugins/maps/public/connected_components/layer_panel/view.js index f34c402a4d41..14252dcfc067 100644 --- a/x-pack/plugins/maps/public/connected_components/layer_panel/view.js +++ b/x-pack/plugins/maps/public/connected_components/layer_panel/view.js @@ -13,7 +13,7 @@ import { LayerErrors } from './layer_errors'; import { LayerSettings } from './layer_settings'; import { StyleSettings } from './style_settings'; import { - EuiButtonIcon, + EuiIcon, EuiFlexItem, EuiTitle, EuiPanel, @@ -27,7 +27,6 @@ import { } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { FormattedMessage } from '@kbn/i18n/react'; import { KibanaContextProvider } from '../../../../../../src/plugins/kibana_react/public'; import { Storage } from '../../../../../../src/plugins/kibana_utils/public'; @@ -175,18 +174,7 @@ export class LayerPanel extends React.Component { - - - + diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts index 16d888a9da27..ac455120dca8 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts @@ -25,11 +25,18 @@ export enum ANALYSIS_CONFIG_TYPE { } export enum ANALYSIS_ADVANCED_FIELDS { + ETA = 'eta', + FEATURE_BAG_FRACTION = 'feature_bag_fraction', FEATURE_INFLUENCE_THRESHOLD = 'feature_influence_threshold', GAMMA = 'gamma', LAMBDA = 'lambda', MAX_TREES = 'max_trees', + METHOD = 'method', + N_NEIGHBORS = 'n_neighbors', + NUM_TOP_CLASSES = 'num_top_classes', NUM_TOP_FEATURE_IMPORTANCE_VALUES = 'num_top_feature_importance_values', + OUTLIER_FRACTION = 'outlier_fraction', + RANDOMIZE_SEED = 'randomize_seed', } export enum OUTLIER_ANALYSIS_METHOD { diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/advanced_step_form.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/advanced_step_form.tsx index 8b137ac72361..bc9bb0cce5ae 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/advanced_step_form.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/advanced_step_form.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, Fragment, useMemo } from 'react'; +import React, { FC, Fragment, useMemo, useEffect, useState } from 'react'; import { EuiAccordion, EuiFieldNumber, @@ -23,9 +23,11 @@ import { getModelMemoryLimitErrors } from '../../../analytics_management/hooks/u import { ANALYSIS_CONFIG_TYPE, NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN, + ANALYSIS_ADVANCED_FIELDS, } from '../../../../common/analytics'; import { DEFAULT_MODEL_MEMORY_LIMIT } from '../../../analytics_management/hooks/use_create_analytics_form/state'; import { ANALYTICS_STEPS } from '../../page'; +import { fetchExplainData } from '../shared'; import { ContinueButton } from '../continue_button'; import { OutlierHyperParameters } from './outlier_hyper_parameters'; @@ -33,23 +35,39 @@ export function getNumberValue(value?: number) { return value === undefined ? '' : +value; } +export type AdvancedParamErrors = { + [key in ANALYSIS_ADVANCED_FIELDS]?: string; +}; + export const AdvancedStepForm: FC = ({ actions, state, setCurrentStep, }) => { + const [advancedParamErrors, setAdvancedParamErrors] = useState({}); + const [fetchingAdvancedParamErrors, setFetchingAdvancedParamErrors] = useState(false); + const { setFormState } = actions; const { form, isJobCreated } = state; const { computeFeatureInfluence, + eta, + featureBagFraction, featureInfluenceThreshold, + gamma, jobType, + lambda, + maxTrees, + method, modelMemoryLimit, modelMemoryLimitValidationResult, + nNeighbors, numTopClasses, numTopFeatureImportanceValues, numTopFeatureImportanceValuesValid, + outlierFraction, predictionFieldName, + randomizeSeed, } = form; const mmlErrors = useMemo(() => getModelMemoryLimitErrors(modelMemoryLimitValidationResult), [ @@ -61,6 +79,43 @@ export const AdvancedStepForm: FC = ({ const mmlInvalid = modelMemoryLimitValidationResult !== null; + const isStepInvalid = + mmlInvalid || + Object.keys(advancedParamErrors).length > 0 || + fetchingAdvancedParamErrors === true; + + useEffect(() => { + setFetchingAdvancedParamErrors(true); + (async function () { + const { success, errorMessage } = await fetchExplainData(form); + const paramErrors: AdvancedParamErrors = {}; + + if (!success) { + // Check which field is invalid + Object.values(ANALYSIS_ADVANCED_FIELDS).forEach((param) => { + if (errorMessage.includes(`[${param}]`)) { + paramErrors[param] = errorMessage; + } + }); + } + setFetchingAdvancedParamErrors(false); + setAdvancedParamErrors(paramErrors); + })(); + }, [ + eta, + featureBagFraction, + featureInfluenceThreshold, + gamma, + lambda, + maxTrees, + method, + nNeighbors, + numTopClasses, + numTopFeatureImportanceValues, + outlierFraction, + randomizeSeed, + ]); + const outlierDetectionAdvancedConfig = ( @@ -126,6 +181,10 @@ export const AdvancedStepForm: FC = ({ 'The minimum outlier score that a document needs to have in order to calculate its feature influence score. Value range: 0-1. Defaults to 0.1.', } )} + isInvalid={ + advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.FEATURE_INFLUENCE_THRESHOLD] !== undefined + } + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.FEATURE_INFLUENCE_THRESHOLD]} > @@ -315,14 +374,24 @@ export const AdvancedStepForm: FC = ({ > {jobType === ANALYSIS_CONFIG_TYPE.OUTLIER_DETECTION && ( - + + )} + {isRegOrClassJob && ( + )} - {isRegOrClassJob && } { setCurrentStep(ANALYTICS_STEPS.DETAILS); }} diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/hyper_parameters.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/hyper_parameters.tsx index 144a06210600..620e81e30a0c 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/hyper_parameters.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/advanced_step/hyper_parameters.tsx @@ -8,11 +8,16 @@ import React, { FC, Fragment } from 'react'; import { EuiFieldNumber, EuiFlexItem, EuiFormRow } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import { CreateAnalyticsFormProps } from '../../../analytics_management/hooks/use_create_analytics_form'; -import { getNumberValue } from './advanced_step_form'; +import { AdvancedParamErrors, getNumberValue } from './advanced_step_form'; +import { ANALYSIS_ADVANCED_FIELDS } from '../../../../common/analytics'; const MAX_TREES_LIMIT = 2000; -export const HyperParameters: FC = ({ actions, state }) => { +interface Props extends CreateAnalyticsFormProps { + advancedParamErrors: AdvancedParamErrors; +} + +export const HyperParameters: FC = ({ actions, state, advancedParamErrors }) => { const { setFormState } = actions; const { eta, featureBagFraction, gamma, lambda, maxTrees, randomizeSeed } = state.form; @@ -28,6 +33,8 @@ export const HyperParameters: FC = ({ actions, state } defaultMessage: 'Regularization parameter to prevent overfitting on the training data set. Must be a non negative value.', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.LAMBDA] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.LAMBDA]} > = ({ actions, state } helpText={i18n.translate('xpack.ml.dataframe.analytics.create.maxTreesText', { defaultMessage: 'The maximum number of trees the forest is allowed to contain.', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.MAX_TREES] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.MAX_TREES]} > = ({ actions, state } defaultMessage: 'Multiplies a linear penalty associated with the size of individual trees in the forest. Must be non-negative value.', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.GAMMA] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.GAMMA]} > = ({ actions, state } helpText={i18n.translate('xpack.ml.dataframe.analytics.create.etaText', { defaultMessage: 'The shrinkage applied to the weights. Must be between 0.001 and 1.', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.ETA] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.ETA]} > = ({ actions, state } defaultMessage: 'The fraction of features used when selecting a random bag for each candidate split.', })} + isInvalid={ + advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.FEATURE_BAG_FRACTION] !== undefined + } + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.FEATURE_BAG_FRACTION]} > = ({ actions, state } = ({ actions, state }) => { +interface Props extends CreateAnalyticsFormProps { + advancedParamErrors: AdvancedParamErrors; +} + +export const OutlierHyperParameters: FC = ({ actions, state, advancedParamErrors }) => { const { setFormState } = actions; const { method, nNeighbors, outlierFraction, standardizationEnabled } = state.form; @@ -27,6 +31,8 @@ export const OutlierHyperParameters: FC = ({ actions, defaultMessage: 'Sets the method that outlier detection uses. If not set, uses an ensemble of different methods and normalises and combines their individual outlier scores to obtain the overall outlier score. We recommend to use the ensemble method', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.METHOD] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.METHOD]} > ({ @@ -51,6 +57,8 @@ export const OutlierHyperParameters: FC = ({ actions, defaultMessage: 'The value for how many nearest neighbors each method of outlier detection will use to calculate its outlier score. When not set, different values will be used for different ensemble members. Must be a positive integer', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.N_NEIGHBORS] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.N_NEIGHBORS]} > = ({ actions, defaultMessage: 'Sets the proportion of the data set that is assumed to be outlying prior to outlier detection.', })} + isInvalid={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.OUTLIER_FRACTION] !== undefined} + error={advancedParamErrors[ANALYSIS_ADVANCED_FIELDS.OUTLIER_FRACTION]} > )} {tableItems.length > 0 && ( - + = ({ const { currentSavedSearch, currentIndexPattern } = mlContext; const { savedSearchQuery, savedSearchQueryStr } = useSavedSearch(); + const [loadingFieldOptions, setLoadingFieldOptions] = useState(false); + const [fieldOptionsFetchFail, setFieldOptionsFetchFail] = useState(false); + const [loadingDepVarOptions, setLoadingDepVarOptions] = useState(false); + const [dependentVariableFetchFail, setDependentVariableFetchFail] = useState(false); + const [dependentVariableOptions, setDependentVariableOptions] = useState< + EuiComboBoxOptionOption[] + >([]); + const [excludesTableItems, setExcludesTableItems] = useState([]); + const [maxDistinctValuesError, setMaxDistinctValuesError] = useState( + undefined + ); + const { setEstimatedModelMemoryLimit, setFormState } = actions; const { estimatedModelMemoryLimit, form, isJobCreated, requestMessages } = state; const firstUpdate = useRef(true); const { dependentVariable, - dependentVariableFetchFail, - dependentVariableOptions, excludes, - excludesTableItems, - fieldOptionsFetchFail, jobConfigQuery, jobConfigQueryString, jobType, - loadingDepVarOptions, - loadingFieldOptions, - maxDistinctValuesError, modelMemoryLimit, previousJobType, requiredFieldsError, @@ -109,30 +120,20 @@ export const ConfigurationStepForm: FC = ({ requiredFieldsError !== undefined; const loadDepVarOptions = async (formState: State['form']) => { - setFormState({ - loadingDepVarOptions: true, - maxDistinctValuesError: undefined, - }); + setLoadingDepVarOptions(true); + setMaxDistinctValuesError(undefined); + try { if (currentIndexPattern !== undefined) { - const formStateUpdate: { - loadingDepVarOptions: boolean; - dependentVariableFetchFail: boolean; - dependentVariableOptions: State['form']['dependentVariableOptions']; - dependentVariable?: State['form']['dependentVariable']; - } = { - loadingDepVarOptions: false, - dependentVariableFetchFail: false, - dependentVariableOptions: [] as State['form']['dependentVariableOptions'], - }; - + const depVarOptions = []; + let depVarUpdate = dependentVariable; // Get fields and filter for supported types for job type const { fields } = newJobCapsService; let resetDependentVariable = true; for (const field of fields) { if (shouldAddAsDepVarOption(field, jobType)) { - formStateUpdate.dependentVariableOptions.push({ + depVarOptions.push({ label: field.id, }); @@ -143,13 +144,16 @@ export const ConfigurationStepForm: FC = ({ } if (resetDependentVariable) { - formStateUpdate.dependentVariable = ''; + depVarUpdate = ''; } - - setFormState(formStateUpdate); + setDependentVariableOptions(depVarOptions); + setLoadingDepVarOptions(false); + setDependentVariableFetchFail(false); + setFormState({ dependentVariable: depVarUpdate }); } } catch (e) { - setFormState({ loadingDepVarOptions: false, dependentVariableFetchFail: true }); + setLoadingDepVarOptions(false); + setDependentVariableFetchFail(true); } }; @@ -165,72 +169,48 @@ export const ConfigurationStepForm: FC = ({ // Reset if jobType changes (jobType requires dependent_variable to be set - // which won't be the case if switching from outlier detection) if (jobTypeChanged) { - setFormState({ - loadingFieldOptions: true, - }); + setLoadingFieldOptions(true); } - try { - const jobConfig = getJobConfigFromFormState(form); - delete jobConfig.dest; - delete jobConfig.model_memory_limit; - const resp: DfAnalyticsExplainResponse = await ml.dataFrameAnalytics.explainDataFrameAnalytics( - jobConfig - ); - const expectedMemoryWithoutDisk = resp.memory_estimation?.expected_memory_without_disk; + const { success, expectedMemory, fieldSelection, errorMessage } = await fetchExplainData(form); + if (success) { if (shouldUpdateEstimatedMml) { - setEstimatedModelMemoryLimit(expectedMemoryWithoutDisk); + setEstimatedModelMemoryLimit(expectedMemory); } - const fieldSelection: FieldSelectionItem[] | undefined = resp.field_selection; - - let hasRequiredFields = false; - if (fieldSelection) { - for (let i = 0; i < fieldSelection.length; i++) { - const field = fieldSelection[i]; - if (field.is_included === true && field.is_required === false) { - hasRequiredFields = true; - break; - } - } - } + const hasRequiredFields = fieldSelection.some( + (field) => field.is_included === true && field.is_required === false + ); - // If job type has changed load analysis field options again if (jobTypeChanged) { + setLoadingFieldOptions(false); + setFieldOptionsFetchFail(false); + setMaxDistinctValuesError(undefined); + setExcludesTableItems(fieldSelection ? fieldSelection : []); setFormState({ - ...(shouldUpdateModelMemoryLimit ? { modelMemoryLimit: expectedMemoryWithoutDisk } : {}), - excludesTableItems: fieldSelection ? fieldSelection : [], - loadingFieldOptions: false, - fieldOptionsFetchFail: false, - maxDistinctValuesError: undefined, + ...(shouldUpdateModelMemoryLimit ? { modelMemoryLimit: expectedMemory } : {}), requiredFieldsError: !hasRequiredFields ? requiredFieldsErrorText : undefined, }); } else { setFormState({ - ...(shouldUpdateModelMemoryLimit ? { modelMemoryLimit: expectedMemoryWithoutDisk } : {}), + ...(shouldUpdateModelMemoryLimit ? { modelMemoryLimit: expectedMemory } : {}), requiredFieldsError: !hasRequiredFields ? requiredFieldsErrorText : undefined, }); } - } catch (e) { + } else { let maxDistinctValuesErrorMessage; - if ( jobType === ANALYSIS_CONFIG_TYPE.CLASSIFICATION && - e.body && - e.body.message !== undefined && - e.body.message.includes('status_exception') && - (e.body.message.includes('must have at most') || - e.body.message.includes('must have at least')) + errorMessage.includes('status_exception') && + (errorMessage.includes('must have at most') || errorMessage.includes('must have at least')) ) { - maxDistinctValuesErrorMessage = e.body.message; + maxDistinctValuesErrorMessage = errorMessage; } if ( - e.body && - e.body.message !== undefined && - e.body.message.includes('status_exception') && - e.body.message.includes('Unable to estimate memory usage as no documents') + errorMessage.includes('status_exception') && + errorMessage.includes('Unable to estimate memory usage as no documents') ) { toastNotifications.addWarning( i18n.translate('xpack.ml.dataframe.analytics.create.allDocsMissingFieldsErrorMessage', { @@ -241,15 +221,17 @@ export const ConfigurationStepForm: FC = ({ }) ); } + const fallbackModelMemoryLimit = jobType !== undefined ? DEFAULT_MODEL_MEMORY_LIMIT[jobType] : DEFAULT_MODEL_MEMORY_LIMIT.outlier_detection; + setEstimatedModelMemoryLimit(fallbackModelMemoryLimit); + setLoadingFieldOptions(false); + setFieldOptionsFetchFail(true); + setMaxDistinctValuesError(maxDistinctValuesErrorMessage); setFormState({ - fieldOptionsFetchFail: true, - maxDistinctValuesError: maxDistinctValuesErrorMessage, - loadingFieldOptions: false, ...(shouldUpdateModelMemoryLimit ? { modelMemoryLimit: fallbackModelMemoryLimit } : {}), }); } diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/fetch_explain_data.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/fetch_explain_data.ts new file mode 100644 index 000000000000..655a5e6a5930 --- /dev/null +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/fetch_explain_data.ts @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ml } from '../../../../../services/ml_api_service'; +import { extractErrorMessage } from '../../../../../../../common/util/errors'; +import { DfAnalyticsExplainResponse, FieldSelectionItem } from '../../../../common/analytics'; +import { + getJobConfigFromFormState, + State, +} from '../../../analytics_management/hooks/use_create_analytics_form/state'; + +export interface FetchExplainDataReturnType { + success: boolean; + expectedMemory: string; + fieldSelection: FieldSelectionItem[]; + errorMessage: string; +} + +export const fetchExplainData = async (formState: State['form']) => { + const jobConfig = getJobConfigFromFormState(formState); + let errorMessage = ''; + let success = true; + let expectedMemory = ''; + let fieldSelection: FieldSelectionItem[] = []; + + try { + delete jobConfig.dest; + delete jobConfig.model_memory_limit; + const resp: DfAnalyticsExplainResponse = await ml.dataFrameAnalytics.explainDataFrameAnalytics( + jobConfig + ); + expectedMemory = resp.memory_estimation?.expected_memory_without_disk; + fieldSelection = resp.field_selection || []; + } catch (error) { + success = false; + errorMessage = extractErrorMessage(error); + } + + return { + success, + expectedMemory, + fieldSelection, + errorMessage, + }; +}; diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/index.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/index.ts index ed3f9ef2e938..45545cf98e0d 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/index.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/components/shared/index.ts @@ -5,3 +5,4 @@ */ export { Messages } from './messages'; +export { fetchExplainData } from './fetch_explain_data'; diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/page.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/page.tsx index 966ef33a1ac8..ff718277a88a 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/page.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/page.tsx @@ -144,7 +144,7 @@ export const Page: FC = ({ jobId }) => { - +

{jobId === undefined && ( { - const { - jobIdEmpty, - jobIdValid, - jobIdExists, - jobType, - createIndexPattern, - excludes, - maxDistinctValuesError, - requiredFieldsError, - } = state.form; + const { jobIdEmpty, jobIdValid, jobIdExists, jobType, createIndexPattern, excludes } = state.form; const { jobConfig } = state; state.advancedEditorMessages = []; @@ -330,8 +321,6 @@ export const validateAdvancedEditor = (state: State): State => { state.form.destinationIndexPatternTitleExists = destinationIndexPatternTitleExists; state.isValid = - maxDistinctValuesError === undefined && - requiredFieldsError === undefined && excludesValid && trainingPercentValid && state.form.modelMemoryLimitUnitValid && @@ -396,10 +385,8 @@ const validateForm = (state: State): State => { destinationIndexPatternTitleExists, createIndexPattern, dependentVariable, - maxDistinctValuesError, modelMemoryLimit, numTopFeatureImportanceValuesValid, - requiredFieldsError, } = state.form; const { estimatedModelMemoryLimit } = state; @@ -414,8 +401,6 @@ const validateForm = (state: State): State => { state.form.modelMemoryLimitValidationResult = mmlValidationResult; state.isValid = - maxDistinctValuesError === undefined && - requiredFieldsError === undefined && !jobTypeEmpty && !mmlValidationResult && !jobIdEmpty && diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts index 8a07704e3991..241866b56c5c 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts @@ -4,14 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiComboBoxOptionOption } from '@elastic/eui'; import { DeepPartial, DeepReadonly } from '../../../../../../../common/types/common'; import { checkPermission } from '../../../../../capabilities/check_capabilities'; import { mlNodesAvailable } from '../../../../../ml_nodes_check'; import { newJobCapsService } from '../../../../../services/new_job_capabilities_service'; import { - FieldSelectionItem, isClassificationAnalysis, isRegressionAnalysis, DataFrameAnalyticsId, @@ -52,8 +50,6 @@ export interface State { computeFeatureInfluence: string; createIndexPattern: boolean; dependentVariable: DependentVariable; - dependentVariableFetchFail: boolean; - dependentVariableOptions: EuiComboBoxOptionOption[]; description: string; destinationIndex: EsIndexName; destinationIndexNameExists: boolean; @@ -62,11 +58,8 @@ export interface State { destinationIndexPatternTitleExists: boolean; eta: undefined | number; excludes: string[]; - excludesTableItems: FieldSelectionItem[]; - excludesOptions: EuiComboBoxOptionOption[]; featureBagFraction: undefined | number; featureInfluenceThreshold: undefined | number; - fieldOptionsFetchFail: boolean; gamma: undefined | number; jobId: DataFrameAnalyticsId; jobIdExists: boolean; @@ -77,9 +70,7 @@ export interface State { jobConfigQuery: any; jobConfigQueryString: string | undefined; lambda: number | undefined; - loadingDepVarOptions: boolean; loadingFieldOptions: boolean; - maxDistinctValuesError: string | undefined; maxTrees: undefined | number; method: undefined | string; modelMemoryLimit: string | undefined; @@ -124,8 +115,6 @@ export const getInitialState = (): State => ({ computeFeatureInfluence: 'true', createIndexPattern: true, dependentVariable: '', - dependentVariableFetchFail: false, - dependentVariableOptions: [], description: '', destinationIndex: '', destinationIndexNameExists: false, @@ -136,10 +125,7 @@ export const getInitialState = (): State => ({ excludes: [], featureBagFraction: undefined, featureInfluenceThreshold: undefined, - fieldOptionsFetchFail: false, gamma: undefined, - excludesTableItems: [], - excludesOptions: [], jobId: '', jobIdExists: false, jobIdEmpty: true, @@ -149,9 +135,7 @@ export const getInitialState = (): State => ({ jobConfigQuery: { match_all: {} }, jobConfigQueryString: undefined, lambda: undefined, - loadingDepVarOptions: false, loadingFieldOptions: false, - maxDistinctValuesError: undefined, maxTrees: undefined, method: undefined, modelMemoryLimit: undefined, @@ -311,6 +295,9 @@ export const getJobConfigFromFormState = ( n_neighbors: formState.nNeighbors, }, formState.outlierFraction && { outlier_fraction: formState.outlierFraction }, + formState.featureInfluenceThreshold && { + feature_influence_threshold: formState.featureInfluenceThreshold, + }, formState.standardizationEnabled && { standardization_enabled: formState.standardizationEnabled, } diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/create_result_callout.tsx b/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/create_result_callout.tsx index 4602ceeec905..6b2048f062f0 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/create_result_callout.tsx +++ b/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/create_result_callout.tsx @@ -86,12 +86,12 @@ export const CreateResultCallout: FC = memo( fill={true} href={resultsUrl} aria-label={i18n.translate('xpack.ml.newJob.recognize.viewResultsAriaLabel', { - defaultMessage: 'View Results', + defaultMessage: 'View results', })} > diff --git a/x-pack/plugins/ml/public/application/overview/components/analytics_panel/analytics_panel.tsx b/x-pack/plugins/ml/public/application/overview/components/analytics_panel/analytics_panel.tsx index c379cd702dae..65e7ba9e8ab5 100644 --- a/x-pack/plugins/ml/public/application/overview/components/analytics_panel/analytics_panel.tsx +++ b/x-pack/plugins/ml/public/application/overview/components/analytics_panel/analytics_panel.tsx @@ -89,7 +89,7 @@ export const AnalyticsPanel: FC = ({ jobCreationDisabled }) => { body={

{i18n.translate('xpack.ml.overview.analyticsList.emptyPromptText', { - defaultMessage: `Data frame analytics enable you to perform different analyses of your data and annotates it with the results. The job puts the annotated data and a copy of the source data in a new index.`, + defaultMessage: `Data frame analytics enables you to perform outlier detection, regression, or classification analysis on your data and annotates it with the results. The job puts the annotated data and a copy of the source data in a new index.`, })}

} diff --git a/x-pack/plugins/monitoring/public/components/logstash/listing/listing.js b/x-pack/plugins/monitoring/public/components/logstash/listing/listing.js index 8e2c43e44ee1..78eb982a95dd 100644 --- a/x-pack/plugins/monitoring/public/components/logstash/listing/listing.js +++ b/x-pack/plugins/monitoring/public/components/logstash/listing/listing.js @@ -62,7 +62,7 @@ export class Listing extends PureComponent { return (
- + {name}
diff --git a/x-pack/plugins/monitoring/public/components/logstash/pipeline_listing/pipeline_listing.js b/x-pack/plugins/monitoring/public/components/logstash/pipeline_listing/pipeline_listing.js index 1b22bc6823bb..4cacf91913ab 100644 --- a/x-pack/plugins/monitoring/public/components/logstash/pipeline_listing/pipeline_listing.js +++ b/x-pack/plugins/monitoring/public/components/logstash/pipeline_listing/pipeline_listing.js @@ -46,7 +46,7 @@ export class PipelineListing extends Component { field: 'id', sortable: true, render: (id) => ( - + {id} ), diff --git a/x-pack/plugins/monitoring/public/directives/elasticsearch/ml_job_listing/index.js b/x-pack/plugins/monitoring/public/directives/elasticsearch/ml_job_listing/index.js index bef0fce4cd08..ec325673ddfd 100644 --- a/x-pack/plugins/monitoring/public/directives/elasticsearch/ml_job_listing/index.js +++ b/x-pack/plugins/monitoring/public/directives/elasticsearch/ml_job_listing/index.js @@ -72,7 +72,9 @@ const getColumns = () => [ render: (name, node) => { if (node) { return ( - {name} + + {name} + ); } diff --git a/x-pack/plugins/monitoring/public/directives/main/index.js b/x-pack/plugins/monitoring/public/directives/main/index.js index 97ec66c9b341..eda32cd39c0d 100644 --- a/x-pack/plugins/monitoring/public/directives/main/index.js +++ b/x-pack/plugins/monitoring/public/directives/main/index.js @@ -133,7 +133,7 @@ export class MonitoringMainController { this.pipelineHashShort = shortenPipelineHash(this.pipelineHash); this.onChangePipelineHash = () => { window.location.hash = getSafeForExternalLink( - `/logstash/pipelines/${this.pipelineId}/${this.pipelineHash}` + `#/logstash/pipelines/${this.pipelineId}/${this.pipelineHash}` ); }; } diff --git a/x-pack/plugins/security_solution/common/detection_engine/lists_common_deps.ts b/x-pack/plugins/security_solution/common/detection_engine/lists_common_deps.ts new file mode 100644 index 000000000000..a8b177f587a4 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/lists_common_deps.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { EntriesArray, namespaceType } from '../../../lists/common/schemas'; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/common/schemas.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/common/schemas.ts index 0c7bcdefd360..f6b732cd1f64 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/common/schemas.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/common/schemas.ts @@ -341,40 +341,3 @@ export type Note = t.TypeOf; export const noteOrUndefined = t.union([note, t.undefined]); export type NoteOrUndefined = t.TypeOf; - -// NOTE: Experimental list support not being shipped currently and behind a feature flag -// TODO: Remove this comment once we lists have passed testing and is ready for the release -export const list_field = t.string; -export const list_values_operator = t.keyof({ included: null, excluded: null }); -export const list_values_type = t.keyof({ match: null, match_all: null, list: null, exists: null }); -export const list_values = t.exact( - t.intersection([ - t.type({ - name: t.string, - }), - t.partial({ - id: t.string, - description: t.string, - created_at, - }), - ]) -); -export const list = t.exact( - t.intersection([ - t.type({ - field: t.string, - values_operator: list_values_operator, - values_type: list_values_type, - }), - t.partial({ values: t.array(list_values) }), - ]) -); -export const list_and = t.intersection([ - list, - t.partial({ - and: t.array(list), - }), -]); - -export const listAndOrUndefined = t.union([t.array(list_and), t.undefined]); -export type ListAndOrUndefined = t.TypeOf; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackaged_rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackaged_rules_schema.ts index 3e7e7e5409c9..43000f6d36f4 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackaged_rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackaged_rules_schema.ts @@ -40,16 +40,19 @@ import { } from '../common/schemas'; /* eslint-enable @typescript-eslint/camelcase */ -import { DefaultStringArray } from '../types/default_string_array'; -import { DefaultActionsArray } from '../types/default_actions_array'; -import { DefaultBooleanFalse } from '../types/default_boolean_false'; -import { DefaultFromString } from '../types/default_from_string'; -import { DefaultIntervalString } from '../types/default_interval_string'; -import { DefaultMaxSignalsNumber } from '../types/default_max_signals_number'; -import { DefaultToString } from '../types/default_to_string'; -import { DefaultThreatArray } from '../types/default_threat_array'; -import { DefaultThrottleNull } from '../types/default_throttle_null'; -import { ListsDefaultArray, ListsDefaultArraySchema } from '../types/lists_default_array'; +import { + DefaultStringArray, + DefaultActionsArray, + DefaultBooleanFalse, + DefaultFromString, + DefaultIntervalString, + DefaultMaxSignalsNumber, + DefaultToString, + DefaultThreatArray, + DefaultThrottleNull, + DefaultListArray, + ListArray, +} from '../types'; /** * Big differences between this schema and the createRulesSchema @@ -96,7 +99,7 @@ export const addPrepackagedRulesSchema = t.intersection([ throttle: DefaultThrottleNull, // defaults to "null" if not set during decode references: DefaultStringArray, // defaults to empty array of strings if not set during decode note, // defaults to "undefined" if not set during decode - exceptions_list: ListsDefaultArray, // defaults to empty array if not set during decode + exceptions_list: DefaultListArray, // defaults to empty array if not set during decode }) ), ]); @@ -130,5 +133,5 @@ export type AddPrepackagedRulesSchemaDecoded = Omit< to: To; threat: Threat; throttle: ThrottleOrNull; - exceptions_list: ListsDefaultArraySchema; + exceptions_list: ListArray; }; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackged_rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackged_rules_schema.test.ts index f946b3ad3b39..47a98166927b 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackged_rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/add_prepackged_rules_schema.test.ts @@ -19,6 +19,7 @@ import { getAddPrepackagedRulesSchemaDecodedMock, } from './add_prepackaged_rules_schema.mock'; import { DEFAULT_MAX_SIGNALS } from '../../../constants'; +import { getListArrayMock } from '../types/lists.mock'; describe('add prepackaged rules schema', () => { test('empty objects do not validate', () => { @@ -1379,14 +1380,189 @@ describe('add prepackaged rules schema', () => { }); }); - // TODO: The exception_list tests are skipped and empty until we re-integrate it from the lists plugin - describe.skip('exception_list', () => { - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and exceptions_list] does validate', () => {}); + describe('exception_list', () => { + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, version, and exceptions_list] does validate', () => { + const payload: AddPrepackagedRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + version: 1, + exceptions_list: getListArrayMock(), + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => {}); + const decoded = addPrepackagedRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: AddPrepackagedRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: false, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + filters: [], + exceptions_list: [ + { + id: 'some_uuid', + namespace_type: 'single', + }, + { + id: 'some_uuid', + namespace_type: 'agnostic', + }, + ], + }; + expect(message.schema).toEqual(expected); + }); - test('rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and invalid exceptions_list] does NOT validate', () => {}); + test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, version, and empty exceptions_list] does validate', () => { + const payload: AddPrepackagedRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + version: 1, + note: '# some markdown', + exceptions_list: [], + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => {}); + const decoded = addPrepackagedRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: AddPrepackagedRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: false, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + filters: [], + exceptions_list: [], + }; + expect(message.schema).toEqual(expected); + }); + + test('rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, version, and invalid exceptions_list] does NOT validate', () => { + const payload: Omit & { + exceptions_list: Array<{ id: string; namespace_type: string }>; + } = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + version: 1, + note: '# some markdown', + exceptions_list: [{ id: 'uuid_here', namespace_type: 'not a namespace type' }], + }; + + const decoded = addPrepackagedRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "not a namespace type" supplied to "exceptions_list,namespace_type"', + ]); + expect(message.schema).toEqual({}); + }); + + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, version, and non-existent exceptions_list] does validate with empty exceptions_list', () => { + const payload: AddPrepackagedRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + version: 1, + note: '# some markdown', + }; + + const decoded = addPrepackagedRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: AddPrepackagedRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: false, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + exceptions_list: [], + filters: [], + }; + expect(message.schema).toEqual(expected); + }); }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.test.ts index a126b833ba46..1648044f5305 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.test.ts @@ -18,6 +18,7 @@ import { getCreateRulesSchemaDecodedMock, } from './create_rules_schema.mock'; import { DEFAULT_MAX_SIGNALS } from '../../../constants'; +import { getListArrayMock } from '../types/lists.mock'; describe('create rules schema', () => { test('empty objects do not validate', () => { @@ -1435,14 +1436,185 @@ describe('create rules schema', () => { ); }); - // TODO: The exception_list tests are skipped and empty until we re-integrate it from the lists plugin - describe.skip('exception_list', () => { - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and exceptions_list] does validate', () => {}); + describe('exception_list', () => { + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and exceptions_list] does validate', () => { + const payload: CreateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: getListArrayMock(), + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => {}); + const decoded = createRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: CreateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + filters: [], + exceptions_list: [ + { + id: 'some_uuid', + namespace_type: 'single', + }, + { + id: 'some_uuid', + namespace_type: 'agnostic', + }, + ], + }; + expect(message.schema).toEqual(expected); + }); - test('rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and invalid exceptions_list] does NOT validate', () => {}); + test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => { + const payload: CreateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [], + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => {}); + const decoded = createRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: CreateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + filters: [], + exceptions_list: [], + }; + expect(message.schema).toEqual(expected); + }); + + test('rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and invalid exceptions_list] does NOT validate', () => { + const payload: Omit & { + exceptions_list: Array<{ id: string; namespace_type: string }>; + } = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [{ id: 'uuid_here', namespace_type: 'not a namespace type' }], + }; + + const decoded = createRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "not a namespace type" supplied to "exceptions_list,namespace_type"', + ]); + expect(message.schema).toEqual({}); + }); + + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => { + const payload: CreateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + }; + + const decoded = createRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: CreateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + exceptions_list: [], + filters: [], + }; + expect(message.schema).toEqual(expected); + }); }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.ts index 4e60201b8030..d623cff8f1fc 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/create_rules_schema.ts @@ -41,18 +41,21 @@ import { } from '../common/schemas'; /* eslint-enable @typescript-eslint/camelcase */ -import { DefaultStringArray } from '../types/default_string_array'; -import { DefaultActionsArray } from '../types/default_actions_array'; -import { DefaultBooleanTrue } from '../types/default_boolean_true'; -import { DefaultFromString } from '../types/default_from_string'; -import { DefaultIntervalString } from '../types/default_interval_string'; -import { DefaultMaxSignalsNumber } from '../types/default_max_signals_number'; -import { DefaultToString } from '../types/default_to_string'; -import { DefaultThreatArray } from '../types/default_threat_array'; -import { DefaultThrottleNull } from '../types/default_throttle_null'; -import { DefaultVersionNumber } from '../types/default_version_number'; -import { ListsDefaultArray, ListsDefaultArraySchema } from '../types/lists_default_array'; -import { DefaultUuid } from '../types/default_uuid'; +import { + DefaultStringArray, + DefaultActionsArray, + DefaultBooleanTrue, + DefaultFromString, + DefaultIntervalString, + DefaultMaxSignalsNumber, + DefaultToString, + DefaultThreatArray, + DefaultThrottleNull, + DefaultVersionNumber, + DefaultListArray, + ListArray, + DefaultUuid, +} from '../types'; export const createRulesSchema = t.intersection([ t.exact( @@ -92,7 +95,7 @@ export const createRulesSchema = t.intersection([ references: DefaultStringArray, // defaults to empty array of strings if not set during decode note, // defaults to "undefined" if not set during decode version: DefaultVersionNumber, // defaults to 1 if not set during decode - exceptions_list: ListsDefaultArray, // defaults to empty array if not set during decode + exceptions_list: DefaultListArray, // defaults to empty array if not set during decode }) ), ]); @@ -129,6 +132,6 @@ export type CreateRulesSchemaDecoded = Omit< threat: Threat; throttle: ThrottleOrNull; version: Version; - exceptions_list: ListsDefaultArraySchema; + exceptions_list: ListArray; rule_id: RuleId; }; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.test.ts index 9fe3e95a2062..12a13ab1a5ed 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.test.ts @@ -22,6 +22,7 @@ import { getImportRulesSchemaDecodedMock, } from './import_rules_schema.mock'; import { DEFAULT_MAX_SIGNALS } from '../../../constants'; +import { getListArrayMock } from '../types/lists.mock'; describe('import rules schema', () => { test('empty objects do not validate', () => { @@ -1569,14 +1570,188 @@ describe('import rules schema', () => { }); }); - // TODO: The exception_list tests are skipped and empty until we re-integrate it from the lists plugin - describe.skip('exception_list', () => { - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and exceptions_list] does validate', () => {}); + describe('exception_list', () => { + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and exceptions_list] does validate', () => { + const payload: ImportRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: getListArrayMock(), + }; + + const decoded = importRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: ImportRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + filters: [], + immutable: false, + exceptions_list: [ + { + id: 'some_uuid', + namespace_type: 'single', + }, + { + id: 'some_uuid', + namespace_type: 'agnostic', + }, + ], + }; + expect(message.schema).toEqual(expected); + }); + + test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => { + const payload: ImportRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [], + }; + + const decoded = importRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: ImportRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + immutable: false, + filters: [], + exceptions_list: [], + }; + expect(message.schema).toEqual(expected); + }); - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => {}); + test('rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and invalid exceptions_list] does NOT validate', () => { + const payload: Omit & { + exceptions_list: Array<{ id: string; namespace_type: string }>; + } = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [{ id: 'uuid_here', namespace_type: 'not a namespace type' }], + }; - test('rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and invalid exceptions_list] does NOT validate', () => {}); + const decoded = importRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "not a namespace type" supplied to "exceptions_list,namespace_type"', + ]); + expect(message.schema).toEqual({}); + }); - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => {}); + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => { + const payload: ImportRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + }; + + const decoded = importRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: ImportRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + version: 1, + immutable: false, + exceptions_list: [], + filters: [], + }; + expect(message.schema).toEqual(expected); + }); }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.ts index a2110263e8e5..7d79861aacf3 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/import_rules_schema.ts @@ -47,19 +47,22 @@ import { } from '../common/schemas'; /* eslint-enable @typescript-eslint/camelcase */ -import { DefaultStringArray } from '../types/default_string_array'; -import { DefaultActionsArray } from '../types/default_actions_array'; -import { DefaultBooleanTrue } from '../types/default_boolean_true'; -import { DefaultFromString } from '../types/default_from_string'; -import { DefaultIntervalString } from '../types/default_interval_string'; -import { DefaultMaxSignalsNumber } from '../types/default_max_signals_number'; -import { DefaultToString } from '../types/default_to_string'; -import { DefaultThreatArray } from '../types/default_threat_array'; -import { DefaultThrottleNull } from '../types/default_throttle_null'; -import { DefaultVersionNumber } from '../types/default_version_number'; -import { ListsDefaultArray, ListsDefaultArraySchema } from '../types/lists_default_array'; -import { OnlyFalseAllowed } from '../types/only_false_allowed'; -import { DefaultStringBooleanFalse } from '../types/default_string_boolean_false'; +import { + DefaultStringArray, + DefaultActionsArray, + DefaultBooleanTrue, + DefaultFromString, + DefaultIntervalString, + DefaultMaxSignalsNumber, + DefaultToString, + DefaultThreatArray, + DefaultThrottleNull, + DefaultVersionNumber, + OnlyFalseAllowed, + DefaultStringBooleanFalse, + DefaultListArray, + ListArray, +} from '../types'; /** * Differences from this and the createRulesSchema are @@ -111,7 +114,7 @@ export const importRulesSchema = t.intersection([ references: DefaultStringArray, // defaults to empty array of strings if not set during decode note, // defaults to "undefined" if not set during decode version: DefaultVersionNumber, // defaults to 1 if not set during decode - exceptions_list: ListsDefaultArray, // defaults to empty array if not set during decode + exceptions_list: DefaultListArray, // defaults to empty array if not set during decode created_at, // defaults "undefined" if not set during decode updated_at, // defaults "undefined" if not set during decode created_by, // defaults "undefined" if not set during decode @@ -153,7 +156,7 @@ export type ImportRulesSchemaDecoded = Omit< threat: Threat; throttle: ThrottleOrNull; version: Version; - exceptions_list: ListsDefaultArraySchema; + exceptions_list: ListArray; rule_id: RuleId; immutable: false; }; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.test.ts index 55363ffb1830..81a17df43daf 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.test.ts @@ -10,6 +10,7 @@ import { exactCheck } from '../../../exact_check'; import { pipe } from 'fp-ts/lib/pipeable'; import { foldLeftRight, getPaths } from '../../../test_utils'; import { left } from 'fp-ts/lib/Either'; +import { getListArrayMock } from '../types/lists.mock'; describe('patch_rules_schema', () => { test('made up values do not validate', () => { @@ -1139,14 +1140,156 @@ describe('patch_rules_schema', () => { expect(message.schema).toEqual({}); }); - // TODO: The exception_list tests are skipped and empty until we re-integrate it from the lists plugin - describe.skip('exception_list', () => { - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and exceptions_list] does validate', () => {}); + describe('exception_list', () => { + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, note, and exceptions_list] does validate', () => { + const payload: PatchRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + note: '# some documentation markdown', + exceptions_list: getListArrayMock(), + }; + + const decoded = patchRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: PatchRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + note: '# some documentation markdown', + exceptions_list: [ + { + id: 'some_uuid', + namespace_type: 'single', + }, + { + id: 'some_uuid', + namespace_type: 'agnostic', + }, + ], + }; + expect(message.schema).toEqual(expected); + }); + + test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => { + const payload: PatchRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [], + }; + + const decoded = patchRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: PatchRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [], + }; + expect(message.schema).toEqual(expected); + }); + + test('rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and invalid exceptions_list] does NOT validate', () => { + const payload: Omit & { + exceptions_list: Array<{ id: string; namespace_type: string }>; + } = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + exceptions_list: [{ id: 'uuid_here', namespace_type: 'not a namespace type' }], + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => {}); + const decoded = patchRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "not a namespace type" supplied to "exceptions_list,namespace_type"', + 'Invalid value "[{"id":"uuid_here","namespace_type":"not a namespace type"}]" supplied to "exceptions_list"', + ]); + expect(message.schema).toEqual({}); + }); - test('rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and invalid exceptions_list] does NOT validate', () => {}); + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => { + const payload: PatchRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => {}); + const decoded = patchRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: PatchRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + filters: [], + risk_score: 50, + note: '# some markdown', + }; + expect(message.schema).toEqual(expected); + }); }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.ts index 605e0272bbb4..29d5467071a3 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/patch_rules_schema.ts @@ -37,10 +37,10 @@ import { references, to, language, - listAndOrUndefined, query, id, } from '../common/schemas'; +import { listArrayOrUndefined } from '../types/lists'; /* eslint-enable @typescript-eslint/camelcase */ /** @@ -80,7 +80,7 @@ export const patchRulesSchema = t.exact( references, note, version, - exceptions_list: listAndOrUndefined, + exceptions_list: listArrayOrUndefined, }) ); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.test.ts index 1ff38f1351f5..02f8e7bbeb59 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.test.ts @@ -18,6 +18,7 @@ import { getUpdateRulesSchemaDecodedMock, } from './update_rules_schema.mock'; import { DEFAULT_MAX_SIGNALS } from '../../../constants'; +import { getListArrayMock } from '../types/lists.mock'; describe('update rules schema', () => { test('empty objects do not validate', () => { @@ -1377,14 +1378,182 @@ describe('update rules schema', () => { }); }); - // TODO: The exception_list tests are skipped and empty until we re-integrate it from the lists plugin - describe.skip('exception_list', () => { - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and exceptions_list] does validate', () => {}); + describe('exception_list', () => { + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and exceptions_list] does validate', () => { + const payload: UpdateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + filters: [], + note: '# some markdown', + exceptions_list: getListArrayMock(), + }; + + const decoded = updateRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: UpdateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + filters: [], + exceptions_list: [ + { + id: 'some_uuid', + namespace_type: 'single', + }, + { + id: 'some_uuid', + namespace_type: 'agnostic', + }, + ], + }; + expect(message.schema).toEqual(expected); + }); + + test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => { + const payload: UpdateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + filters: [], + note: '# some markdown', + exceptions_list: [], + }; + + const decoded = updateRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: UpdateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + filters: [], + exceptions_list: [], + }; + expect(message.schema).toEqual(expected); + }); - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and empty exceptions_list] does validate', () => {}); + test('rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and invalid exceptions_list] does NOT validate', () => { + const payload: Omit & { + exceptions_list: Array<{ id: string; namespace_type: string }>; + } = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + filters: [], + note: '# some markdown', + exceptions_list: [{ id: 'uuid_here', namespace_type: 'not a namespace type' }], + }; + + const decoded = updateRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "not a namespace type" supplied to "exceptions_list,namespace_type"', + ]); + expect(message.schema).toEqual({}); + }); - test('rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and invalid exceptions_list] does NOT validate', () => {}); + test('[rule_id, description, from, to, index, name, severity, interval, type, filters, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => { + const payload: UpdateRulesSchema = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + filters: [], + note: '# some markdown', + }; - test('[rule_id, description, from, to, index, name, severity, interval, type, filter, risk_score, note, and non-existent exceptions_list] does validate with empty exceptions_list', () => {}); + const decoded = updateRulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + const expected: UpdateRulesSchemaDecoded = { + rule_id: 'rule-1', + description: 'some description', + from: 'now-5m', + to: 'now', + index: ['index-1'], + name: 'some-name', + severity: 'low', + interval: '5m', + type: 'query', + risk_score: 50, + note: '# some markdown', + references: [], + actions: [], + enabled: true, + false_positives: [], + max_signals: DEFAULT_MAX_SIGNALS, + tags: [], + threat: [], + throttle: null, + exceptions_list: [], + filters: [], + }; + expect(message.schema).toEqual(expected); + }); }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.ts index 504233f95986..73078e617efc 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/update_rules_schema.ts @@ -43,16 +43,19 @@ import { } from '../common/schemas'; /* eslint-enable @typescript-eslint/camelcase */ -import { DefaultStringArray } from '../types/default_string_array'; -import { DefaultActionsArray } from '../types/default_actions_array'; -import { DefaultBooleanTrue } from '../types/default_boolean_true'; -import { DefaultFromString } from '../types/default_from_string'; -import { DefaultIntervalString } from '../types/default_interval_string'; -import { DefaultMaxSignalsNumber } from '../types/default_max_signals_number'; -import { DefaultToString } from '../types/default_to_string'; -import { DefaultThreatArray } from '../types/default_threat_array'; -import { DefaultThrottleNull } from '../types/default_throttle_null'; -import { ListsDefaultArray, ListsDefaultArraySchema } from '../types/lists_default_array'; +import { + DefaultStringArray, + DefaultActionsArray, + DefaultBooleanTrue, + DefaultFromString, + DefaultIntervalString, + DefaultMaxSignalsNumber, + DefaultToString, + DefaultThreatArray, + DefaultThrottleNull, + DefaultListArray, + ListArray, +} from '../types'; /** * This almost identical to the create_rules_schema except for a few details. @@ -100,7 +103,7 @@ export const updateRulesSchema = t.intersection([ references: DefaultStringArray, // defaults to empty array of strings if not set during decode note, // defaults to "undefined" if not set during decode version, // defaults to "undefined" if not set during decode - exceptions_list: ListsDefaultArray, // defaults to empty array if not set during decode + exceptions_list: DefaultListArray, // defaults to empty array if not set during decode }) ), ]); @@ -135,6 +138,6 @@ export type UpdateRulesSchemaDecoded = Omit< to: To; threat: Threat; throttle: ThrottleOrNull; - exceptions_list: ListsDefaultArraySchema; + exceptions_list: ListArray; rule_id: RuleId; }; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.mocks.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.mocks.ts index ecbf0321cdc6..e63a7ad981e1 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.mocks.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.mocks.ts @@ -3,6 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ +import { getListArrayMock } from '../types/lists.mock'; import { RulesSchema } from './rules_schema'; @@ -64,38 +65,7 @@ export const getRulesSchemaMock = (anchorDate: string = ANCHOR_DATE): RulesSchem language: 'kuery', rule_id: 'query-rule-id', interval: '5m', - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }); export const getRulesMlSchemaMock = (anchorDate: string = ANCHOR_DATE): RulesSchema => { diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.test.ts index 90aef656db36..b3f9096b5148 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.test.ts @@ -22,6 +22,7 @@ import { exactCheck } from '../../../exact_check'; import { foldLeftRight, getPaths } from '../../../test_utils'; import { TypeAndTimelineOnly } from './type_timeline_only_schema'; import { getRulesSchemaMock, getRulesMlSchemaMock } from './rules_schema.mocks'; +import { ListArray } from '../types/lists'; export const ANCHOR_DATE = '2020-02-20T03:57:54.037Z'; @@ -650,4 +651,47 @@ describe('rules_schema', () => { expect(fields.length).toEqual(2); }); }); + + describe('exceptions_list', () => { + test('it should validate an empty array for "exceptions_list"', () => { + const payload = getRulesSchemaMock(); + payload.exceptions_list = []; + const decoded = rulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + const expected = getRulesSchemaMock(); + expected.exceptions_list = []; + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(expected); + }); + + test('it should NOT validate when "exceptions_list" is not expected type', () => { + const payload: Omit & { + exceptions_list?: string; + } = { ...getRulesSchemaMock(), exceptions_list: 'invalid_data' }; + + const decoded = rulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "invalid_data" supplied to "exceptions_list"', + ]); + expect(message.schema).toEqual({}); + }); + + test('it should default to empty array if "exceptions_list" is undefined ', () => { + const payload: Omit & { + exceptions_list?: ListArray; + } = getRulesSchemaMock(); + payload.exceptions_list = undefined; + + const decoded = rulesSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual({ ...payload, exceptions_list: [] }); + }); + }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.ts index a7a31ec9e1b5..9803a80f5785 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/rules_schema.ts @@ -56,7 +56,7 @@ import { meta, note, } from '../common/schemas'; -import { ListsDefaultArray } from '../types/lists_default_array'; +import { DefaultListArray } from '../types/lists_default_array'; /** * This is the required fields for the rules schema response. Put all required properties on @@ -87,7 +87,7 @@ export const requiredRulesSchema = t.type({ updated_at, created_by, version, - exceptions_list: ListsDefaultArray, + exceptions_list: DefaultListArray, }); export type RequiredRulesSchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts new file mode 100644 index 000000000000..368dd4922eec --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './default_actions_array'; +export * from './default_boolean_false'; +export * from './default_boolean_true'; +export * from './default_empty_string'; +export * from './default_export_file_name'; +export * from './default_from_string'; +export * from './default_interval_string'; +export * from './default_language_string'; +export * from './default_max_signals_number'; +export * from './default_page'; +export * from './default_per_page'; +export * from './default_string_array'; +export * from './default_string_boolean_false'; +export * from './default_threat_array'; +export * from './default_throttle_null'; +export * from './default_to_string'; +export * from './default_uuid'; +export * from './default_version_number'; +export * from './iso_date_string'; +export * from './lists'; +export * from './lists_default_array'; +export * from './non_empty_string'; +export * from './only_false_allowed'; +export * from './positive_integer'; +export * from './positive_integer_greater_than_zero'; +export * from './references_default_array'; +export * from './risk_score'; +export * from './uuid'; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.mock.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.mock.ts new file mode 100644 index 000000000000..d76e2ac78f3d --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.mock.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { List, ListArray } from './lists'; + +export const getListMock = (): List => ({ + id: 'some_uuid', + namespace_type: 'single', +}); + +export const getListAgnosticMock = (): List => ({ + id: 'some_uuid', + namespace_type: 'agnostic', +}); + +export const getListArrayMock = (): ListArray => [getListMock(), getListAgnosticMock()]; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.test.ts new file mode 100644 index 000000000000..657a4b479f16 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.test.ts @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { pipe } from 'fp-ts/lib/pipeable'; +import { left } from 'fp-ts/lib/Either'; + +import { foldLeftRight, getPaths } from '../../../test_utils'; + +import { getListAgnosticMock, getListMock, getListArrayMock } from './lists.mock'; +import { + List, + ListArray, + ListArrayOrUndefined, + list, + listArray, + listArrayOrUndefined, +} from './lists'; + +describe('Lists', () => { + describe('list', () => { + test('it should validate a list', () => { + const payload = getListMock(); + const decoded = list.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should validate a list with "namespace_type" of"agnostic"', () => { + const payload = getListAgnosticMock(); + const decoded = list.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should NOT validate a list without an "id"', () => { + const payload = getListMock(); + delete payload.id; + const decoded = list.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "undefined" supplied to "id"', + ]); + expect(message.schema).toEqual({}); + }); + + test('it should NOT validate a list without "namespace_type"', () => { + const payload = getListMock(); + delete payload.namespace_type; + const decoded = list.decode(payload); + const message = pipe(decoded, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "undefined" supplied to "namespace_type"', + ]); + expect(message.schema).toEqual({}); + }); + + test('it should strip out extra keys', () => { + const payload: List & { + extraKey?: string; + } = getListMock(); + payload.extraKey = 'some value'; + const decoded = list.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(getListMock()); + }); + }); + + describe('listArray', () => { + test('it should validate an array of lists', () => { + const payload = getListArrayMock(); + const decoded = listArray.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should not validate when unexpected type found in array', () => { + const payload = ([1] as unknown) as ListArray; + const decoded = listArray.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "1" supplied to "Array<{| id: string, namespace_type: "agnostic" | "single" |}>"', + ]); + expect(message.schema).toEqual({}); + }); + }); + + describe('listArrayOrUndefined', () => { + test('it should validate an array of lists', () => { + const payload = getListArrayMock(); + const decoded = listArrayOrUndefined.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should validate when undefined', () => { + const payload = undefined; + const decoded = listArrayOrUndefined.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should not allow an item that is not of type "list" in array', () => { + const payload = ([1] as unknown) as ListArrayOrUndefined; + const decoded = listArrayOrUndefined.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "1" supplied to "(Array<{| id: string, namespace_type: "agnostic" | "single" |}> | undefined)"', + 'Invalid value "[1]" supplied to "(Array<{| id: string, namespace_type: "agnostic" | "single" |}> | undefined)"', + ]); + expect(message.schema).toEqual({}); + }); + }); +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.ts new file mode 100644 index 000000000000..07be038ff352 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists.ts @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; + +import { namespaceType } from '../../lists_common_deps'; + +export const list = t.exact( + t.type({ + id: t.string, + namespace_type: namespaceType, + }) +); + +export type List = t.TypeOf; +export const listArray = t.array(list); +export type ListArray = t.TypeOf; +export const listArrayOrUndefined = t.union([listArray, t.undefined]); +export type ListArrayOrUndefined = t.TypeOf; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.test.ts index 9eb55c22756f..2268e47bd114 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.test.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.test.ts @@ -4,187 +4,60 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ListsDefaultArray } from './lists_default_array'; import { pipe } from 'fp-ts/lib/pipeable'; import { left } from 'fp-ts/lib/Either'; -import { foldLeftRight, getPaths } from '../../../test_utils'; - -describe('lists_default_array', () => { - test('it should validate an empty array', () => { - const payload: string[] = []; - const decoded = ListsDefaultArray.decode(payload); - const message = pipe(decoded, foldLeftRight); - - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(payload); - }); - test('it should validate an array of lists', () => { - const payload = [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ]; - const decoded = ListsDefaultArray.decode(payload); - const message = pipe(decoded, foldLeftRight); - - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(payload); - }); +import { foldLeftRight, getPaths } from '../../../test_utils'; - test('it should not validate an array of lists that includes a values_operator other than included or excluded', () => { - const payload = [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'exists', - }, - { - field: 'host.hostname', - values_operator: 'jibber jabber', - values_type: 'exists', - }, - ]; - const decoded = ListsDefaultArray.decode(payload); - const message = pipe(decoded, foldLeftRight); +import { DefaultListArray, DefaultListArrayC } from './lists_default_array'; +import { getListArrayMock } from './lists.mock'; - expect(getPaths(left(message.errors))).toEqual([ - 'Invalid value "jibber jabber" supplied to "values_operator"', - ]); - expect(message.schema).toEqual({}); - }); - - // TODO - this scenario should never come up, as the values key is forbidden when values_type is "exists" in the incoming schema - need to find a good way to do this in io-ts - test('it will validate an array of lists that includes "values" when "values_type" is "exists"', () => { - const payload = [ - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'exists', - values: [ - { - name: '127.0.0.1', - }, - ], - }, - ]; - const decoded = ListsDefaultArray.decode(payload); +describe('lists_default_array', () => { + test('it should return a default array when null', () => { + const payload = null; + const decoded = DefaultListArray.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(payload); + expect(message.schema).toEqual([]); }); - // TODO - this scenario should never come up, as the values key is required when values_type is "match" in the incoming schema - need to find a good way to do this in io-ts - test('it will validate an array of lists that does not include "values" when "values_type" is "match"', () => { - const payload = [ - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - }, - ]; - const decoded = ListsDefaultArray.decode(payload); + test('it should return a default array when undefined', () => { + const payload = undefined; + const decoded = DefaultListArray.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(payload); + expect(message.schema).toEqual([]); }); - // TODO - this scenario should never come up, as the values key is required when values_type is "match_all" in the incoming schema - need to find a good way to do this in io-ts - test('it will validate an array of lists that does not include "values" when "values_type" is "match_all"', () => { - const payload = [ - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match_all', - }, - ]; - const decoded = ListsDefaultArray.decode(payload); + test('it should validate an empty array', () => { + const payload: string[] = []; + const decoded = DefaultListArray.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([]); expect(message.schema).toEqual(payload); }); - // TODO - this scenario should never come up, as the values key is required when values_type is "list" in the incoming schema - need to find a good way to do this in io-ts - test('it should not validate an array of lists that does not include "values" when "values_type" is "list"', () => { - const payload = [ - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'list', - }, - ]; - const decoded = ListsDefaultArray.decode(payload); + test('it should validate an array of lists', () => { + const payload = getListArrayMock(); + const decoded = DefaultListArray.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([]); expect(message.schema).toEqual(payload); }); - test('it should not validate an array with a number', () => { - const payload = [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - values: [ - { - name: '127.0.0.1', - }, - ], - }, - 5, - ]; - const decoded = ListsDefaultArray.decode(payload); + test('it should not validate an array of non accepted types', () => { + // Terrible casting for purpose of tests + const payload = ([1] as unknown) as DefaultListArrayC; + const decoded = DefaultListArray.decode(payload); const message = pipe(decoded, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([ - 'Invalid value "5" supplied to "listsWithDefaultArray"', - 'Invalid value "5" supplied to "listsWithDefaultArray"', + 'Invalid value "1" supplied to "DefaultListArray"', ]); expect(message.schema).toEqual({}); }); - - test('it should return a default array entry', () => { - const payload = null; - const decoded = ListsDefaultArray.decode(payload); - const message = pipe(decoded, foldLeftRight); - - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual([]); - }); }); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.ts index 7fe98cdc300e..ac5666cad23a 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/lists_default_array.ts @@ -7,28 +7,18 @@ import * as t from 'io-ts'; import { Either } from 'fp-ts/lib/Either'; -import { - list_and as listAnd, - list_values as listValues, - list_values_operator as listOperator, -} from '../common/schemas'; +import { ListArray, list } from './lists'; -export type List = t.TypeOf; -export type ListValues = t.TypeOf; -export type ListOperator = t.TypeOf; +export type DefaultListArrayC = t.Type; /** - * Types the ListsDefaultArray as: - * - If null or undefined, then a default array will be set for the list + * Types the DefaultListArray as: + * - If null or undefined, then a default array of type list will be set */ -export const ListsDefaultArray = new t.Type( - 'listsWithDefaultArray', - t.array(listAnd).is, - (input, context): Either => - input == null ? t.success([]) : t.array(listAnd).validate(input, context), +export const DefaultListArray: DefaultListArrayC = new t.Type( + 'DefaultListArray', + t.array(list).is, + (input, context): Either => + input == null ? t.success([]) : t.array(list).validate(input, context), t.identity ); - -export type ListsDefaultArrayC = typeof ListsDefaultArray; - -export type ListsDefaultArraySchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/cypress/integration/events_viewer.spec.ts b/x-pack/plugins/security_solution/cypress/integration/events_viewer.spec.ts index cd4573817cc2..84ca1e20e957 100644 --- a/x-pack/plugins/security_solution/cypress/integration/events_viewer.spec.ts +++ b/x-pack/plugins/security_solution/cypress/integration/events_viewer.spec.ts @@ -153,7 +153,7 @@ describe('Events Viewer', () => { }); }); - context.skip('Events columns', () => { + context('Events columns', () => { before(() => { loginAndWaitForPage(HOSTS_URL); openEvents(); diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx index 2239de376432..244819080c93 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx @@ -215,7 +215,7 @@ describe('Exception helpers', () => { fieldName: 'host.name', isNested: false, operator: 'is in list', - value: ['some host name'], + value: 'some-list-id', }, { fieldName: 'host.name', @@ -238,8 +238,8 @@ describe('Exception helpers', () => { { fieldName: 'host.name.host.name', isNested: true, - operator: 'exists', - value: null, + operator: 'is', + value: 'some host name', }, ]; expect(result).toEqual(expected); diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx index f8b9c39801ae..164940db619f 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx @@ -19,6 +19,7 @@ import { OperatorTypeEnum, entriesNested, entriesExists, + entriesList, } from '../../../lists_plugin_deps'; /** @@ -87,6 +88,16 @@ export const getFormattedEntries = (entries: EntriesArray): FormattedEntry[] => return formattedEntries.flat(); }; +export const getEntryValue = (entry: Entry): string | string[] | null => { + if (entriesList.is(entry)) { + return entry.list.id; + } else if (entriesExists.is(entry)) { + return null; + } else { + return entry.value; + } +}; + /** * Helper method for `getFormattedEntries` */ @@ -100,7 +111,7 @@ export const formatEntry = ({ item: Entry; }): FormattedEntry => { const operator = getExceptionOperatorSelect(item); - const value = !entriesExists.is(item) ? item.value : null; + const value = getEntryValue(item); return { fieldName: isNested ? `${parent}.${item.field}` : item.field, diff --git a/x-pack/plugins/security_solution/public/common/lib/connectors/jira/flyout.tsx b/x-pack/plugins/security_solution/public/common/lib/connectors/jira/flyout.tsx index c9953fdb30e0..0737db3cd08e 100644 --- a/x-pack/plugins/security_solution/public/common/lib/connectors/jira/flyout.tsx +++ b/x-pack/plugins/security_solution/public/common/lib/connectors/jira/flyout.tsx @@ -63,7 +63,7 @@ const JiraConnectorForm: React.FC> fullWidth error={errors.email} isInvalid={isEmailInvalid} - label={i18n.EMAIL_LABEL} + label={i18n.JIRA_EMAIL_LABEL} > > fullWidth error={errors.apiToken} isInvalid={isApiTokenInvalid} - label={i18n.API_TOKEN_LABEL} + label={i18n.JIRA_API_TOKEN_LABEL} > { } if (!action.secrets.email) { - errors.email = [...errors.email, i18n.EMAIL_REQUIRED]; + errors.email = [...errors.email, i18n.JIRA_EMAIL_REQUIRED]; } if (!action.secrets.apiToken) { - errors.apiToken = [...errors.apiToken, i18n.API_TOKEN_REQUIRED]; + errors.apiToken = [...errors.apiToken, i18n.JIRA_API_TOKEN_REQUIRED]; } return { errors }; diff --git a/x-pack/plugins/security_solution/public/common/lib/connectors/jira/translations.ts b/x-pack/plugins/security_solution/public/common/lib/connectors/jira/translations.ts index 286f81842411..bcb2c49a0de7 100644 --- a/x-pack/plugins/security_solution/public/common/lib/connectors/jira/translations.ts +++ b/x-pack/plugins/security_solution/public/common/lib/connectors/jira/translations.ts @@ -36,6 +36,34 @@ export const JIRA_PROJECT_KEY_REQUIRED = i18n.translate( } ); +export const JIRA_EMAIL_LABEL = i18n.translate( + 'xpack.securitySolution.case.connectors.jira.emailTextFieldLabel', + { + defaultMessage: 'Email or Username', + } +); + +export const JIRA_EMAIL_REQUIRED = i18n.translate( + 'xpack.securitySolution.case.connectors.jira.requiredEmailTextField', + { + defaultMessage: 'Email or Username is required', + } +); + +export const JIRA_API_TOKEN_LABEL = i18n.translate( + 'xpack.securitySolution.case.connectors.jira.apiTokenTextFieldLabel', + { + defaultMessage: 'API token or Password', + } +); + +export const JIRA_API_TOKEN_REQUIRED = i18n.translate( + 'xpack.securitySolution.case.connectors.jira.requiredApiTokenTextField', + { + defaultMessage: 'API token or Password is required', + } +); + export const MAPPING_FIELD_SUMMARY = i18n.translate( 'xpack.securitySolution.case.configureCases.mappingFieldSummary', { diff --git a/x-pack/plugins/security_solution/public/common/lib/connectors/translations.ts b/x-pack/plugins/security_solution/public/common/lib/connectors/translations.ts index 40848ea76900..6dd1247d40fc 100644 --- a/x-pack/plugins/security_solution/public/common/lib/connectors/translations.ts +++ b/x-pack/plugins/security_solution/public/common/lib/connectors/translations.ts @@ -58,14 +58,14 @@ export const PASSWORD_REQUIRED = i18n.translate( export const API_TOKEN_LABEL = i18n.translate( 'xpack.securitySolution.case.connectors.common.apiTokenTextFieldLabel', { - defaultMessage: 'Api token', + defaultMessage: 'API token', } ); export const API_TOKEN_REQUIRED = i18n.translate( 'xpack.securitySolution.case.connectors.common.requiredApiTokenTextField', { - defaultMessage: 'Api token is required', + defaultMessage: 'API token is required', } ); diff --git a/x-pack/plugins/security_solution/public/lists_plugin_deps.ts b/x-pack/plugins/security_solution/public/lists_plugin_deps.ts index 22732c86bd9a..575ff26330a4 100644 --- a/x-pack/plugins/security_solution/public/lists_plugin_deps.ts +++ b/x-pack/plugins/security_solution/public/lists_plugin_deps.ts @@ -27,4 +27,5 @@ export { OperatorTypeEnum, entriesNested, entriesExists, + entriesList, } from '../../lists/common/schemas'; diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts index ec0c526482b4..899f85ecdea3 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts +++ b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts @@ -17,7 +17,6 @@ import { sendPutDatasource, } from '../policy_list/services/ingest'; import { NewPolicyData, PolicyData } from '../../../../../../common/endpoint/types'; -import { factory as policyConfigFactory } from '../../../../../../common/endpoint/models/policy_config'; import { ImmutableMiddlewareFactory } from '../../../../../common/store'; export const policyDetailsMiddlewareFactory: ImmutableMiddlewareFactory = ( @@ -43,23 +42,6 @@ export const policyDetailsMiddlewareFactory: ImmutableMiddlewareFactory; + readonly payload: { + readonly events: Readonly; + readonly stats: Readonly>; + readonly lineageLimits: { readonly children: string | null; readonly ancestors: string | null }; + }; } interface ServerFailedToReturnResolverData { diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/graphing.test.ts b/x-pack/plugins/security_solution/public/resolver/store/data/graphing.test.ts index d120adb72cd8..163846e0414d 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/graphing.test.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/graphing.test.ts @@ -9,8 +9,13 @@ import { DataAction } from './action'; import { dataReducer } from './reducer'; import { DataState } from '../../types'; import { LegacyEndpointEvent, ResolverEvent } from '../../../../common/endpoint/types'; -import { graphableProcesses, processNodePositionsAndEdgeLineSegments } from './selectors'; +import { + graphableProcesses, + processNodePositionsAndEdgeLineSegments, + limitsReached, +} from './selectors'; import { mockProcessEvent } from '../../models/process_event_test_helpers'; +import { EndpointDocGenerator } from '../../../../common/endpoint/generate_data'; describe('resolver graph layout', () => { let processA: LegacyEndpointEvent; @@ -114,7 +119,10 @@ describe('resolver graph layout', () => { describe('when rendering no nodes', () => { beforeEach(() => { const events: ResolverEvent[] = []; - const action: DataAction = { type: 'serverReturnedResolverData', events, stats: new Map() }; + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } }, + }; store.dispatch(action); }); it('the graphableProcesses list should only include nothing', () => { @@ -128,7 +136,10 @@ describe('resolver graph layout', () => { describe('when rendering one node', () => { beforeEach(() => { const events = [processA]; - const action: DataAction = { type: 'serverReturnedResolverData', events, stats: new Map() }; + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } }, + }; store.dispatch(action); }); it('the graphableProcesses list should only include nothing', () => { @@ -142,7 +153,10 @@ describe('resolver graph layout', () => { describe('when rendering two nodes, one being the parent of the other', () => { beforeEach(() => { const events = [processA, processB]; - const action: DataAction = { type: 'serverReturnedResolverData', events, stats: new Map() }; + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } }, + }; store.dispatch(action); }); it('the graphableProcesses list should only include nothing', () => { @@ -166,7 +180,10 @@ describe('resolver graph layout', () => { processH, processI, ]; - const action: DataAction = { type: 'serverReturnedResolverData', events, stats: new Map() }; + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } }, + }; store.dispatch(action); }); it("the graphableProcesses list should only include events with 'processCreated' an 'processRan' eventType", () => { @@ -187,3 +204,48 @@ describe('resolver graph layout', () => { }); }); }); + +describe('resolver graph with too much lineage', () => { + let generator: EndpointDocGenerator; + let store: Store; + let allEvents: ResolverEvent[]; + let childrenCursor: string; + let ancestorCursor: string; + + beforeEach(() => { + generator = new EndpointDocGenerator('seed'); + allEvents = generator.generateTree({ ancestors: 1, generations: 2, children: 2 }).allEvents; + childrenCursor = 'aValidChildursor'; + ancestorCursor = 'aValidAncestorCursor'; + store = createStore(dataReducer, undefined); + }); + + describe('should select from state properly', () => { + it('should indicate there are too many ancestors', () => { + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { + events: allEvents, + stats: new Map(), + lineageLimits: { children: childrenCursor, ancestors: ancestorCursor }, + }, + }; + store.dispatch(action); + const { ancestors } = limitsReached(store.getState()); + expect(ancestors).toEqual(true); + }); + it('should indicate there are too many children', () => { + const action: DataAction = { + type: 'serverReturnedResolverData', + payload: { + events: allEvents, + stats: new Map(), + lineageLimits: { children: childrenCursor, ancestors: ancestorCursor }, + }, + }; + store.dispatch(action); + const { children } = limitsReached(store.getState()); + expect(children).toEqual(true); + }); + }); +}); diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/reducer.ts b/x-pack/plugins/security_solution/public/resolver/store/data/reducer.ts index 3e897a91a74c..a36d43b70b87 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/reducer.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/reducer.ts @@ -13,6 +13,7 @@ function initialState(): DataState { relatedEventsStats: new Map(), relatedEvents: new Map(), relatedEventsReady: new Map(), + lineageLimits: { children: null, ancestors: null }, isLoading: false, hasError: false, }; @@ -22,8 +23,9 @@ export const dataReducer: Reducer = (state = initialS if (action.type === 'serverReturnedResolverData') { return { ...state, - results: action.events, - relatedEventsStats: action.stats, + results: action.payload.events, + relatedEventsStats: action.payload.stats, + lineageLimits: action.payload.lineageLimits, isLoading: false, hasError: false, }; diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts index 2873993cc645..ba415e6d83c8 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts @@ -529,3 +529,15 @@ export const processNodePositionsAndEdgeLineSegments = createSelector( }; } ); + +/** + * Returns the `children` and `ancestors` limits for the current graph, if any. + * + * @param state {DataState} the DataState from the reducer + */ +export const limitsReached = (state: DataState): { children: boolean; ancestors: boolean } => { + return { + children: state.lineageLimits.children !== null, + ancestors: state.lineageLimits.ancestors !== null, + }; +}; diff --git a/x-pack/plugins/security_solution/public/resolver/store/middleware.ts b/x-pack/plugins/security_solution/public/resolver/store/middleware.ts index 7f6f58dac715..a352a076e5a9 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/middleware.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/middleware.ts @@ -77,6 +77,8 @@ export const resolverMiddlewareFactory: MiddlewareFactory = (context) => { } const nodeStats: Map = new Map(); nodeStats.set(entityId, stats); + const lineageLimits = { children: children.nextChild, ancestors: ancestry.nextAncestor }; + const events = [ ...lifecycle, ...getLifecycleEventsAndStats(children.childNodes, nodeStats), @@ -84,8 +86,11 @@ export const resolverMiddlewareFactory: MiddlewareFactory = (context) => { ]; api.dispatch({ type: 'serverReturnedResolverData', - events, - stats: nodeStats, + payload: { + events, + stats: nodeStats, + lineageLimits, + }, }); } catch (error) { api.dispatch({ diff --git a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts index bff30c62864f..3a5c48009e5b 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts @@ -152,6 +152,15 @@ export const graphableProcesses = composeSelectors( dataSelectors.graphableProcesses ); +/** + * Select the `ancestors` and `children` limits that were reached or exceeded + * during the request for the current tree. + */ +export const lineageLimitsReached = composeSelectors( + dataStateSelector, + dataSelectors.limitsReached +); + /** * Calls the `secondSelector` with the result of the `selector`. Use this when re-exporting a * concern-specific selector. `selector` should return the concern-specific state. diff --git a/x-pack/plugins/security_solution/public/resolver/types.ts b/x-pack/plugins/security_solution/public/resolver/types.ts index a48f3b59b0f6..f0e401dd2e89 100644 --- a/x-pack/plugins/security_solution/public/resolver/types.ts +++ b/x-pack/plugins/security_solution/public/resolver/types.ts @@ -147,9 +147,10 @@ export type CameraState = { */ export interface DataState { readonly results: readonly ResolverEvent[]; - readonly relatedEventsStats: Map; + readonly relatedEventsStats: Readonly>; readonly relatedEvents: Map; readonly relatedEventsReady: Map; + readonly lineageLimits: Readonly<{ children: string | null; ancestors: string | null }>; isLoading: boolean; hasError: boolean; } diff --git a/x-pack/plugins/security_solution/public/resolver/view/use_camera.test.tsx b/x-pack/plugins/security_solution/public/resolver/view/use_camera.test.tsx index 8ed9f00d51af..dc7cb9a2ab19 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/use_camera.test.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/use_camera.test.tsx @@ -176,8 +176,11 @@ describe('useCamera on an unpainted element', () => { } const serverResponseAction: ResolverAction = { type: 'serverReturnedResolverData', - events, - stats: new Map(), + payload: { + events, + stats: new Map(), + lineageLimits: { children: null, ancestors: null }, + }, }; act(() => { store.dispatch(serverResponseAction); diff --git a/x-pack/plugins/security_solution/scripts/endpoint/README.md b/x-pack/plugins/security_solution/scripts/endpoint/README.md index 0c36a4730723..bd9502f2f59e 100644 --- a/x-pack/plugins/security_solution/scripts/endpoint/README.md +++ b/x-pack/plugins/security_solution/scripts/endpoint/README.md @@ -13,52 +13,10 @@ Example command sequence to get ES and kibana running with sample data after ins `yarn es snapshot` -> starts ES -`npx yarn start --xpack.securitySolution.enabled=true --no-base-path` -> starts kibana +`npx yarn start --no-base-path` -> starts kibana. Note: you may need other configurations steps to start the security solution with endpoint support. -`cd ~/path/to/kibana/x-pack/plugins/endpoint` +`cd x-pack/plugins/security_solution/scripts/endpoint` -`yarn test:generate --auth elastic:changeme` -> run the resolver_generator.ts script +`yarn test:generate` -> run the resolver_generator.ts script -Resolver generator CLI options: - -```bash -Options: - --help Show help [boolean] - --seed, -s random seed to use for document generator - [string] - --node, -n elasticsearch node url - [string] [default: "http://elastic:changeme@localhost:9200"] - --kibana, -k kibana url - [string] [default: "http://elastic:changeme@localhost:5601"] - --eventIndex, --ei index to store events in - [string] [default: "events-endpoint-1"] - --metadataIndex, --mi index to store host metadata in - [string] [default: "metrics-endpoint.metadata-default-1"] - --policyIndex, --pi index to store host policy in - [string] [default: "metrics-endpoint.policy-default-1"] - --ancestors, --anc number of ancestors of origin to create - [number] [default: 3] - --generations, --gen number of child generations to create - [number] [default: 3] - --children, --ch maximum number of children per node - [number] [default: 3] - --relatedEvents, --related number of related events to create for each - process event [number] [default: 5] - --relatedAlerts, --relAlerts number of related alerts to create for each - process event [number] [default: 5] - --percentWithRelated, --pr percent of process events to add related events - and related alerts to [number] [default: 30] - --percentTerminated, --pt percent of process events to add termination - event for [number] [default: 30] - --maxChildrenPerNode, --maxCh always generate the max number of children per - node instead of it being random up to the max - children [boolean] [default: false] - --numHosts, --ne number of different hosts to generate alerts - for [number] [default: 1] - --numDocs, --nd number of metadata and policy response doc to - generate per host [number] [default: 5] - --alertsPerHost, --ape number of resolver trees to make for each host - [number] [default: 1] - --delete, -d delete indices and remake them - [boolean] [default: false] -``` +To see Resolver generator CLI options, run `yarn test:generate --help`. diff --git a/x-pack/plugins/security_solution/server/endpoint/alerts/handlers/alerts.test.ts b/x-pack/plugins/security_solution/server/endpoint/alerts/handlers/alerts.test.ts index 0134f9e72ab5..6c0ff9fcdc66 100644 --- a/x-pack/plugins/security_solution/server/endpoint/alerts/handlers/alerts.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/alerts/handlers/alerts.test.ts @@ -11,7 +11,7 @@ import { } from '../../../../../../../src/core/server/mocks'; import { registerAlertRoutes } from '../routes'; import { alertingIndexGetQuerySchema } from '../../../../common/endpoint_alerts/schema/alert_index'; -import { createMockAgentService } from '../../mocks'; +import { createMockEndpointAppContextServiceStartContract } from '../../mocks'; import { EndpointAppContextService } from '../../endpoint_app_context_services'; import { createMockConfig } from '../../../lib/detection_engine/routes/__mocks__'; @@ -28,9 +28,7 @@ describe('test alerts route', () => { routerMock = httpServiceMock.createRouter(); endpointAppContextService = new EndpointAppContextService(); - endpointAppContextService.start({ - agentService: createMockAgentService(), - }); + endpointAppContextService.start(createMockEndpointAppContextServiceStartContract()); registerAlertRoutes(routerMock, { logFactory: loggingSystemMock.create(), diff --git a/x-pack/plugins/security_solution/server/endpoint/endpoint_app_context_services.ts b/x-pack/plugins/security_solution/server/endpoint/endpoint_app_context_services.ts index cb8c913a73b8..7b8a368b6c97 100644 --- a/x-pack/plugins/security_solution/server/endpoint/endpoint_app_context_services.ts +++ b/x-pack/plugins/security_solution/server/endpoint/endpoint_app_context_services.ts @@ -3,7 +3,15 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { AgentService } from '../../../ingest_manager/server'; +import { AgentService, IngestManagerStartContract } from '../../../ingest_manager/server'; +import { handleDatasourceCreate } from './ingest_integration'; + +export type EndpointAppContextServiceStartContract = Pick< + IngestManagerStartContract, + 'agentService' +> & { + registerIngestCallback: IngestManagerStartContract['registerExternalCallback']; +}; /** * A singleton that holds shared services that are initialized during the start up phase @@ -12,8 +20,9 @@ import { AgentService } from '../../../ingest_manager/server'; export class EndpointAppContextService { private agentService: AgentService | undefined; - public start(dependencies: { agentService: AgentService }) { + public start(dependencies: EndpointAppContextServiceStartContract) { this.agentService = dependencies.agentService; + dependencies.registerIngestCallback('datasourceCreate', handleDatasourceCreate); } public stop() {} diff --git a/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts new file mode 100644 index 000000000000..6ff094931158 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { factory as policyConfigFactory } from '../../common/endpoint/models/policy_config'; +import { NewPolicyData } from '../../common/endpoint/types'; +import { NewDatasource } from '../../../ingest_manager/common/types/models'; + +/** + * Callback to handle creation of Datasources in Ingest Manager + * @param newDatasource + */ +export const handleDatasourceCreate = async ( + newDatasource: NewDatasource +): Promise => { + // We only care about Endpoint datasources + if (newDatasource.package?.name !== 'endpoint') { + return newDatasource; + } + + // We cast the type here so that any changes to the Endpoint specific data + // follow the types/schema expected + let updatedDatasource = newDatasource as NewPolicyData; + + // Until we get the Default Policy Configuration in the Endpoint package, + // we will add it here manually at creation time. + // @ts-ignore + if (newDatasource.inputs.length === 0) { + updatedDatasource = { + ...newDatasource, + inputs: [ + { + type: 'endpoint', + enabled: true, + streams: [], + config: { + policy: { + value: policyConfigFactory(), + }, + }, + }, + ], + }; + } + + return updatedDatasource; +}; diff --git a/x-pack/plugins/security_solution/server/endpoint/mocks.ts b/x-pack/plugins/security_solution/server/endpoint/mocks.ts index b10e9e4dc90e..5435eff4ef15 100644 --- a/x-pack/plugins/security_solution/server/endpoint/mocks.ts +++ b/x-pack/plugins/security_solution/server/endpoint/mocks.ts @@ -6,7 +6,28 @@ import { IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; import { xpackMocks } from '../../../../mocks'; -import { AgentService, IngestManagerStartContract } from '../../../ingest_manager/server'; +import { + AgentService, + IngestManagerStartContract, + ExternalCallback, +} from '../../../ingest_manager/server'; +import { EndpointAppContextServiceStartContract } from './endpoint_app_context_services'; +import { createDatasourceServiceMock } from '../../../ingest_manager/server/mocks'; + +/** + * Crates a mocked input contract for the `EndpointAppContextService#start()` method + */ +export const createMockEndpointAppContextServiceStartContract = (): jest.Mocked< + EndpointAppContextServiceStartContract +> => { + return { + agentService: createMockAgentService(), + registerIngestCallback: jest.fn< + ReturnType, + Parameters + >(), + }; +}; /** * Creates a mock AgentService @@ -32,6 +53,8 @@ export const createMockIngestManagerStartContract = ( getESIndexPattern: jest.fn().mockResolvedValue(indexPattern), }, agentService: createMockAgentService(), + registerExternalCallback: jest.fn((...args: ExternalCallback) => {}), + datasourceService: createDatasourceServiceMock(), }; }; diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts index ba51a3b6aa92..c04975fa8b28 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts @@ -27,8 +27,10 @@ import { } from '../../../../common/endpoint/types'; import { SearchResponse } from 'elasticsearch'; import { registerEndpointRoutes } from './index'; -import { createMockAgentService, createRouteHandlerContext } from '../../mocks'; -import { AgentService } from '../../../../../ingest_manager/server'; +import { + createMockEndpointAppContextServiceStartContract, + createRouteHandlerContext, +} from '../../mocks'; import Boom from 'boom'; import { EndpointAppContextService } from '../../endpoint_app_context_services'; import { createMockConfig } from '../../../lib/detection_engine/routes/__mocks__'; @@ -44,7 +46,9 @@ describe('test endpoint route', () => { let routeHandler: RequestHandler; // eslint-disable-next-line @typescript-eslint/no-explicit-any let routeConfig: RouteConfig; - let mockAgentService: jest.Mocked; + let mockAgentService: ReturnType< + typeof createMockEndpointAppContextServiceStartContract + >['agentService']; let endpointAppContextService: EndpointAppContextService; beforeEach(() => { @@ -56,11 +60,10 @@ describe('test endpoint route', () => { mockClusterClient.asScoped.mockReturnValue(mockScopedClient); routerMock = httpServiceMock.createRouter(); mockResponse = httpServerMock.createResponseFactory(); - mockAgentService = createMockAgentService(); endpointAppContextService = new EndpointAppContextService(); - endpointAppContextService.start({ - agentService: mockAgentService, - }); + const startContract = createMockEndpointAppContextServiceStartContract(); + endpointAppContextService.start(startContract); + mockAgentService = startContract.agentService; registerEndpointRoutes(routerMock, { logFactory: loggingSystemMock.create(), diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/policy/handlers.test.ts b/x-pack/plugins/security_solution/server/endpoint/routes/policy/handlers.test.ts index 6c1f0a206ffa..16af3a95bc72 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/policy/handlers.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/policy/handlers.test.ts @@ -4,7 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ import { EndpointAppContextService } from '../../endpoint_app_context_services'; -import { createMockAgentService, createRouteHandlerContext } from '../../mocks'; +import { + createMockEndpointAppContextServiceStartContract, + createRouteHandlerContext, +} from '../../mocks'; import { getHostPolicyResponseHandler } from './handlers'; import { IScopedClusterClient, @@ -17,7 +20,6 @@ import { loggingSystemMock, savedObjectsClientMock, } from '../../../../../../../src/core/server/mocks'; -import { AgentService } from '../../../../../ingest_manager/server/services'; import { SearchResponse } from 'elasticsearch'; import { GetHostPolicyResponse, HostPolicyResponse } from '../../../../common/endpoint/types'; import { EndpointDocGenerator } from '../../../../common/endpoint/generate_data'; @@ -28,17 +30,13 @@ describe('test policy response handler', () => { let mockScopedClient: jest.Mocked; let mockSavedObjectClient: jest.Mocked; let mockResponse: jest.Mocked; - let mockAgentService: jest.Mocked; beforeEach(() => { mockScopedClient = elasticsearchServiceMock.createScopedClusterClient(); mockSavedObjectClient = savedObjectsClientMock.create(); mockResponse = httpServerMock.createResponseFactory(); endpointAppContextService = new EndpointAppContextService(); - mockAgentService = createMockAgentService(); - endpointAppContextService.start({ - agentService: mockAgentService, - }); + endpointAppContextService.start(createMockEndpointAppContextServiceStartContract()); }); afterEach(() => endpointAppContextService.stop()); diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/request_responses.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/request_responses.ts index 9928ce4807da..581946f2300b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/request_responses.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/request_responses.ts @@ -27,6 +27,7 @@ import { RuleNotificationAlertType } from '../../notifications/types'; import { QuerySignalsSchemaDecoded } from '../../../../../common/detection_engine/schemas/request/query_signals_index_schema'; import { SetSignalsStatusSchemaDecoded } from '../../../../../common/detection_engine/schemas/request/set_signal_status_schema'; import { getCreateRulesSchemaMock } from '../../../../../common/detection_engine/schemas/request/create_rules_schema.mock'; +import { getListArrayMock } from '../../../../../common/detection_engine/schemas/types/lists.mock'; export const typicalSetStatusSignalByIdsPayload = (): SetSignalsStatusSchemaDecoded => ({ signal_ids: ['somefakeid1', 'somefakeid2'], @@ -390,38 +391,7 @@ export const getResult = (): RuleAlertType => ({ references: ['http://www.example.com', 'https://ww.example.com'], note: '# Investigative notes', version: 1, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptionsList: getListArrayMock(), }, createdAt: new Date('2019-12-13T16:40:33.400Z'), updatedAt: new Date('2019-12-13T16:40:33.400Z'), diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/utils.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/utils.ts index 063c9dffd66d..7b7d3fbdea0b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/utils.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/__mocks__/utils.ts @@ -8,6 +8,7 @@ import { Readable } from 'stream'; import { HapiReadableStream } from '../../rules/types'; import { RulesSchema } from '../../../../../common/detection_engine/schemas/response/rules_schema'; +import { getListArrayMock } from '../../../../../common/detection_engine/schemas/types/lists.mock'; /** * Given a string, builds a hapi stream as our @@ -76,38 +77,7 @@ export const getOutputRuleAlertForRest = (): Omit< ], }, ], - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), filters: [ { query: { diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/rules/validate.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/rules/validate.test.ts index 1f5442e23d88..006569671262 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/routes/rules/validate.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/routes/rules/validate.test.ts @@ -14,6 +14,7 @@ import { FindResult } from '../../../../../../alerts/server'; import { BulkError } from '../utils'; import { setFeatureFlagsForTestsOnly, unSetFeatureFlagsForTestsOnly } from '../../feature_flags'; import { RulesSchema } from '../../../../../common/detection_engine/schemas/response/rules_schema'; +import { getListArrayMock } from '../../../../../common/detection_engine/schemas/types/lists.mock'; export const ruleOutput: RulesSchema = { actions: [], @@ -68,38 +69,7 @@ export const ruleOutput: RulesSchema = { }, }, ], - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'], meta: { someMeta: 'someField', diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_all.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_all.test.ts index ee21c3354002..7d4bbfdced43 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_all.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_all.test.ts @@ -80,36 +80,8 @@ describe('getExportAll', () => { note: '# Investigative notes', version: 1, exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, + { id: 'some_uuid', namespace_type: 'single' }, + { id: 'some_uuid', namespace_type: 'agnostic' }, ], })}\n`, exportDetails: `${JSON.stringify({ diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_by_object_ids.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_by_object_ids.test.ts index b00b7353a370..043e563a4c8b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_by_object_ids.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/get_export_by_object_ids.test.ts @@ -88,36 +88,8 @@ describe('get_export_by_object_ids', () => { note: '# Investigative notes', version: 1, exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, + { id: 'some_uuid', namespace_type: 'single' }, + { id: 'some_uuid', namespace_type: 'agnostic' }, ], })}\n`, exportDetails: `${JSON.stringify({ @@ -216,36 +188,8 @@ describe('get_export_by_object_ids', () => { note: '# Investigative notes', version: 1, exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, + { id: 'some_uuid', namespace_type: 'single' }, + { id: 'some_uuid', namespace_type: 'agnostic' }, ], }, ], diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/types.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/types.ts index 4b84057f6d79..fc95f0cfeb78 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/types.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/types.ts @@ -14,7 +14,6 @@ import { SavedObjectsClientContract, } from 'kibana/server'; import { RuleAlertAction } from '../../../../common/detection_engine/types'; -import { ListsDefaultArraySchema } from '../../../../common/detection_engine/schemas/types/lists_default_array'; import { FalsePositives, From, @@ -62,7 +61,6 @@ import { ThreatOrUndefined, TypeOrUndefined, ReferencesOrUndefined, - ListAndOrUndefined, PerPageOrUndefined, PageOrUndefined, SortFieldOrUndefined, @@ -80,6 +78,7 @@ import { AlertsClient, PartialAlert } from '../../../../../alerts/server'; import { Alert, SanitizedAlert } from '../../../../../alerts/common'; import { SIGNALS_ID } from '../../../../common/constants'; import { RuleTypeParams, PartialFilter } from '../types'; +import { ListArrayOrUndefined, ListArray } from '../../../../common/detection_engine/schemas/types'; export interface RuleAlertType extends Alert { params: RuleTypeParams; @@ -194,7 +193,7 @@ export interface CreateRulesOptions { references: References; note: NoteOrUndefined; version: Version; - exceptionsList: ListsDefaultArraySchema; + exceptionsList: ListArray; actions: RuleAlertAction[]; } @@ -230,7 +229,7 @@ export interface UpdateRulesOptions { references: References; note: NoteOrUndefined; version: VersionOrUndefined; - exceptionsList: ListsDefaultArraySchema; + exceptionsList: ListArray; actions: RuleAlertAction[]; } @@ -264,7 +263,7 @@ export interface PatchRulesOptions { references: ReferencesOrUndefined; note: NoteOrUndefined; version: VersionOrUndefined; - exceptionsList: ListAndOrUndefined; + exceptionsList: ListArrayOrUndefined; actions: RuleAlertAction[] | undefined; rule: SanitizedAlert | null; } diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/utils.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/utils.ts index d40cb5d96669..5c620a5df61f 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/utils.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/utils.ts @@ -31,9 +31,9 @@ import { ThreatOrUndefined, TypeOrUndefined, ReferencesOrUndefined, - ListAndOrUndefined, } from '../../../../common/detection_engine/schemas/common/schemas'; import { PartialFilter } from '../types'; +import { ListArrayOrUndefined } from '../../../../common/detection_engine/schemas/types'; export const calculateInterval = ( interval: string | undefined, @@ -74,7 +74,7 @@ export interface UpdateProperties { references: ReferencesOrUndefined; note: NoteOrUndefined; version: VersionOrUndefined; - exceptionsList: ListAndOrUndefined; + exceptionsList: ListArrayOrUndefined; anomalyThreshold: AnomalyThresholdOrUndefined; } diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/patches/update_list.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/patches/update_list.json index 8d831f3a961d..6323597fc094 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/patches/update_list.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/patches/update_list.json @@ -2,31 +2,8 @@ "rule_id": "query-with-list", "exceptions_list": [ { - "field": "source.ip", - "values_operator": "excluded", - "values_type": "exists" - }, - { - "field": "host.name", - "values_operator": "included", - "values_type": "match", - "values": [ - { - "name": "rock01" - } - ], - "and": [ - { - "field": "host.id", - "values_operator": "included", - "values_type": "match_all", - "values": [ - { - "name": "123456" - } - ] - } - ] + "id": "some_updated_fake_id", + "namespace_type": "single" } ] } diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_and.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_and.json deleted file mode 100644 index 1575a712e2cb..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_and.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "List - and", - "description": "Query with a list that includes and. This rule should only produce signals when host.name exists and when both event.module is endgame and event.category is anything other than file", - "rule_id": "query-with-list-and", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "excluded", - "values_type": "match", - "values": [ - { - "name": "endgame" - } - ], - "and": [ - { - "field": "event.category", - "values_operator": "included", - "values_type": "match", - "values": [ - { - "name": "file" - } - ] - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_excluded.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_excluded.json deleted file mode 100644 index 4e6d9403a276..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_excluded.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "List - excluded", - "description": "Query with a list of values_operator excluded. This rule should only produce signals when host.name exists and event.module is suricata", - "rule_id": "query-with-list-excluded", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "excluded", - "values_type": "match", - "values": [ - { - "name": "suricata" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_exists.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_exists.json deleted file mode 100644 index 97beace37633..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_exists.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "List - exists", - "description": "Query with a list that includes exists. This rule should only produce signals when host.name exists and event.action does not exist", - "rule_id": "query-with-list-exists", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.action", - "values_operator": "included", - "values_type": "exists" - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list.json deleted file mode 100644 index ad0585b5a2ec..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "Query with a list", - "description": "Query with a list. This rule should only produce signals when either host.name exists and event.module is system and user.name is zeek or gdm OR when host.name exists and event.module is not endgame or zeek or system.", - "rule_id": "query-with-list", - "risk_score": 2, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "excluded", - "values_type": "match", - "values": [ - { - "name": "system" - } - ], - "and": [ - { - "field": "user.name", - "values_operator": "excluded", - "values_type": "match_all", - "values": [ - { - "name": "zeek" - }, - { - "name": "gdm" - } - ] - } - ] - }, - { - "field": "event.module", - "values_operator": "included", - "values_type": "match_all", - "values": [ - { - "name": "endgame" - }, - { - "name": "zeek" - }, - { - "name": "system" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list_plugin.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list_plugin.json deleted file mode 100644 index fa6fe6ac7111..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_list_plugin.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "Query with a list", - "description": "Query with a list only generate signals if source.ip is not in list", - "rule_id": "query-with-list", - "risk_score": 2, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "source.ip", - "values_operator": "excluded", - "values_type": "list", - "values": [ - { - "id": "ci-badguys.txt", - "name": "ip" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match.json deleted file mode 100644 index 6e6880cc28f2..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "List - match", - "description": "Query with a list that includes match. This rule should only produce signals when host.name exists and event.module is not suricata", - "rule_id": "query-with-list-match", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "included", - "values_type": "match", - "values": [ - { - "name": "suricata" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match_all.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match_all.json deleted file mode 100644 index 44cc26ac3315..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_match_all.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "List - match_all", - "description": "Query with a list that includes match_all. This rule should only produce signals when host.name exists and event.module is not suricata or auditd", - "rule_id": "query-with-list-match-all", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "language": "kuery", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "included", - "values_type": "match_all", - "values": [ - { - "name": "suricata" - }, - { - "name": "auditd" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_or.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_or.json deleted file mode 100644 index 9c4eda559d5b..000000000000 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/lists/query_with_or.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "List - or", - "description": "Query with a list that includes or. This rule should only produce signals when host.name exists and event.module is suricata OR when host.name exists and event.category is file", - "rule_id": "query-with-list-or", - "risk_score": 1, - "severity": "high", - "type": "query", - "query": "host.name: *", - "interval": "30s", - "exceptions_list": [ - { - "field": "event.module", - "values_operator": "excluded", - "values_type": "match", - "values": [ - { - "name": "suricata" - } - ] - }, - { - "field": "event.category", - "values_operator": "excluded", - "values_type": "match", - "values": [ - { - "name": "file" - } - ] - } - ] -} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/query_with_list.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/query_with_list.json new file mode 100644 index 000000000000..1cb4c144aa29 --- /dev/null +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/queries/query_with_list.json @@ -0,0 +1,10 @@ +{ + "name": "Rule w exceptions", + "description": "Sample rule with exception list", + "risk_score": 1, + "severity": "high", + "type": "query", + "query": "host.name: *", + "interval": "30s", + "exceptions_list": [{ "id": "endpoint_list", "namespace_type": "single" }] +} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/updates/update_list.json b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/updates/update_list.json index df22dff5c046..f7359d586bd8 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/updates/update_list.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/scripts/rules/updates/update_list.json @@ -6,33 +6,5 @@ "severity": "high", "type": "query", "query": "user.name: root or user.name: admin", - "exceptions_list": [ - { - "field": "source.ip", - "values_operator": "excluded", - "values_type": "exists" - }, - { - "field": "host.name", - "values_operator": "included", - "values_type": "match", - "values": [ - { - "name": "rock01" - } - ], - "and": [ - { - "field": "host.id", - "values_operator": "included", - "values_type": "match_all", - "values": [ - { - "name": "123456" - } - ] - } - ] - } - ] + "exceptions_list": [{ "id": "some_updated_fake_id", "namespace_type": "single" }] } diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/__mocks__/es_results.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/__mocks__/es_results.ts index 101c998efa24..50f6e7d9e9c1 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/__mocks__/es_results.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/__mocks__/es_results.ts @@ -14,6 +14,7 @@ import { loggingSystemMock } from '../../../../../../../../src/core/server/mocks import { RuleTypeParams } from '../../types'; import { IRuleStatusAttributes } from '../../rules/types'; import { ruleStatusSavedObjectType } from '../../rules/saved_object_mappings'; +import { getListArrayMock } from '../../../../../common/detection_engine/schemas/types/lists.mock'; export const sampleRuleAlertParams = ( maxSignals?: number | undefined, @@ -44,38 +45,7 @@ export const sampleRuleAlertParams = ( meta: undefined, threat: undefined, version: 1, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptionsList: getListArrayMock(), }); export const sampleDocNoSortId = (someUuid: string = sampleIdGuid): SignalSourceHit => ({ diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_bulk_body.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_bulk_body.test.ts index 80c2441193a0..ad4393281883 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_bulk_body.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_bulk_body.test.ts @@ -12,6 +12,7 @@ import { } from './__mocks__/es_results'; import { buildBulkBody } from './build_bulk_body'; import { SignalHit } from './types'; +import { getListArrayMock } from '../../../../common/detection_engine/schemas/types/lists.mock'; describe('buildBulkBody', () => { beforeEach(() => { @@ -91,38 +92,7 @@ describe('buildBulkBody', () => { version: 1, created_at: fakeSignalSourceHit.signal.rule?.created_at, updated_at: fakeSignalSourceHit.signal.rule?.updated_at, - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }, }, }; @@ -218,38 +188,7 @@ describe('buildBulkBody', () => { updated_at: fakeSignalSourceHit.signal.rule?.updated_at, throttle: 'no_actions', threat: [], - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }, }, }; @@ -343,38 +282,7 @@ describe('buildBulkBody', () => { created_at: fakeSignalSourceHit.signal.rule?.created_at, updated_at: fakeSignalSourceHit.signal.rule?.updated_at, throttle: 'no_actions', - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }, }, }; @@ -461,38 +369,7 @@ describe('buildBulkBody', () => { updated_at: fakeSignalSourceHit.signal.rule?.updated_at, created_at: fakeSignalSourceHit.signal.rule?.created_at, throttle: 'no_actions', - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }, }, }; diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.test.ts index 07adfde71c1a..ce7cc50e81d6 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.test.ts @@ -6,16 +6,24 @@ import { buildQueryExceptions, - buildExceptions, + buildExceptionItemEntries, operatorBuilder, buildExists, buildMatch, - buildMatchAll, + buildMatchAny, evaluateValues, formatQuery, getLanguageBooleanOperator, + buildNested, } from './build_exceptions_query'; -import { List } from '../../../../common/detection_engine/schemas/types/lists_default_array'; +import { + EntriesArray, + EntryExists, + EntryMatch, + EntryMatchAny, + EntryNested, +} from '../../../../../lists/common/schemas'; +import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; describe('build_exceptions_query', () => { describe('getLanguageBooleanOperator', () => { @@ -34,30 +42,30 @@ describe('build_exceptions_query', () => { describe('operatorBuilder', () => { describe('kuery', () => { - test('it returns "not " when operator is "excluded"', () => { - const operator = operatorBuilder({ operator: 'excluded', language: 'kuery' }); + test('it returns "not " when operator is "included"', () => { + const operator = operatorBuilder({ operator: 'included', language: 'kuery' }); - expect(operator).toEqual(' and '); + expect(operator).toEqual('not '); }); - test('it returns empty string when operator is "included"', () => { - const operator = operatorBuilder({ operator: 'included', language: 'kuery' }); + test('it returns empty string when operator is "excluded"', () => { + const operator = operatorBuilder({ operator: 'excluded', language: 'kuery' }); - expect(operator).toEqual(' and not '); + expect(operator).toEqual(''); }); }); describe('lucene', () => { - test('it returns "NOT " when operator is "excluded"', () => { - const operator = operatorBuilder({ operator: 'excluded', language: 'lucene' }); + test('it returns "NOT " when operator is "included"', () => { + const operator = operatorBuilder({ operator: 'included', language: 'lucene' }); - expect(operator).toEqual(' AND '); + expect(operator).toEqual('NOT '); }); - test('it returns empty string when operator is "included"', () => { - const operator = operatorBuilder({ operator: 'included', language: 'lucene' }); + test('it returns empty string when operator is "excluded"', () => { + const operator = operatorBuilder({ operator: 'excluded', language: 'lucene' }); - expect(operator).toEqual(' AND NOT '); + expect(operator).toEqual(''); }); }); }); @@ -65,161 +73,117 @@ describe('build_exceptions_query', () => { describe('buildExists', () => { describe('kuery', () => { test('it returns formatted wildcard string when operator is "excluded"', () => { - const query = buildExists({ operator: 'excluded', field: 'host.name', language: 'kuery' }); + const query = buildExists({ + item: { type: 'exists', operator: 'excluded', field: 'host.name' }, + language: 'kuery', + }); - expect(query).toEqual(' and host.name:*'); + expect(query).toEqual('host.name:*'); }); test('it returns formatted wildcard string when operator is "included"', () => { - const query = buildExists({ operator: 'included', field: 'host.name', language: 'kuery' }); + const query = buildExists({ + item: { type: 'exists', operator: 'included', field: 'host.name' }, + language: 'kuery', + }); - expect(query).toEqual(' and not host.name:*'); + expect(query).toEqual('not host.name:*'); }); }); describe('lucene', () => { test('it returns formatted wildcard string when operator is "excluded"', () => { - const query = buildExists({ operator: 'excluded', field: 'host.name', language: 'lucene' }); + const query = buildExists({ + item: { type: 'exists', operator: 'excluded', field: 'host.name' }, + language: 'lucene', + }); - expect(query).toEqual(' AND _exists_host.name'); + expect(query).toEqual('_exists_host.name'); }); test('it returns formatted wildcard string when operator is "included"', () => { - const query = buildExists({ operator: 'included', field: 'host.name', language: 'lucene' }); + const query = buildExists({ + item: { type: 'exists', operator: 'included', field: 'host.name' }, + language: 'lucene', + }); - expect(query).toEqual(' AND NOT _exists_host.name'); + expect(query).toEqual('NOT _exists_host.name'); }); }); }); describe('buildMatch', () => { describe('kuery', () => { - test('it returns empty string if no items in "values"', () => { - const query = buildMatch({ - operator: 'included', - field: 'host.name', - values: [], - language: 'kuery', - }); - - expect(query).toEqual(''); - }); - test('it returns formatted string when operator is "included"', () => { - const values = [ - { - name: 'suricata', - }, - ]; const query = buildMatch({ - operator: 'included', - field: 'host.name', - values, + item: { + type: 'match', + operator: 'included', + field: 'host.name', + value: 'suricata', + }, language: 'kuery', }); - expect(query).toEqual(' and not host.name:suricata'); + expect(query).toEqual('not host.name:suricata'); }); test('it returns formatted string when operator is "excluded"', () => { - const values = [ - { - name: 'suricata', - }, - ]; const query = buildMatch({ - operator: 'excluded', - field: 'host.name', - values, - language: 'kuery', - }); - - expect(query).toEqual(' and host.name:suricata'); - }); - - // TODO: need to clean up types and maybe restrict values to one if type is 'match' - test('it returns formatted string when "values" includes more than one item', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + item: { + type: 'match', + operator: 'excluded', + field: 'host.name', + value: 'suricata', }, - ]; - const query = buildMatch({ - operator: 'included', - field: 'host.name', - values, language: 'kuery', }); - expect(query).toEqual(' and not host.name:suricata'); + expect(query).toEqual('host.name:suricata'); }); }); describe('lucene', () => { test('it returns formatted string when operator is "included"', () => { - const values = [ - { - name: 'suricata', - }, - ]; const query = buildMatch({ - operator: 'included', - field: 'host.name', - values, + item: { + type: 'match', + operator: 'included', + field: 'host.name', + value: 'suricata', + }, language: 'lucene', }); - expect(query).toEqual(' AND NOT host.name:suricata'); + expect(query).toEqual('NOT host.name:suricata'); }); test('it returns formatted string when operator is "excluded"', () => { - const values = [ - { - name: 'suricata', - }, - ]; const query = buildMatch({ - operator: 'excluded', - field: 'host.name', - values, - language: 'lucene', - }); - - expect(query).toEqual(' AND host.name:suricata'); - }); - - // TODO: need to clean up types and maybe restrict values to one if type is 'match' - test('it returns formatted string when "values" includes more than one item', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + item: { + type: 'match', + operator: 'excluded', + field: 'host.name', + value: 'suricata', }, - ]; - const query = buildMatch({ - operator: 'included', - field: 'host.name', - values, language: 'lucene', }); - expect(query).toEqual(' AND NOT host.name:suricata'); + expect(query).toEqual('host.name:suricata'); }); }); }); - describe('buildMatchAll', () => { + describe('buildMatchAny', () => { describe('kuery', () => { test('it returns empty string if given an empty array for "values"', () => { - const exceptionSegment = buildMatchAll({ - operator: 'included', - field: 'host.name', - values: [], + const exceptionSegment = buildMatchAny({ + item: { + operator: 'included', + field: 'host.name', + value: [], + type: 'match_any', + }, language: 'kuery', }); @@ -227,113 +191,180 @@ describe('build_exceptions_query', () => { }); test('it returns formatted string when "values" includes only one item', () => { - const values = [ - { - name: 'suricata', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'included', + field: 'host.name', + value: ['suricata'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'included', - field: 'host.name', - values, language: 'kuery', }); - expect(exceptionSegment).toEqual(' and not host.name:suricata'); + expect(exceptionSegment).toEqual('not host.name:(suricata)'); }); test('it returns formatted string when operator is "included"', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'included', + field: 'host.name', + value: ['suricata', 'auditd'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'included', - field: 'host.name', - values, language: 'kuery', }); - expect(exceptionSegment).toEqual(' and not host.name:(suricata or auditd)'); + expect(exceptionSegment).toEqual('not host.name:(suricata or auditd)'); }); test('it returns formatted string when operator is "excluded"', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'excluded', + field: 'host.name', + value: ['suricata', 'auditd'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'excluded', - field: 'host.name', - values, language: 'kuery', }); - expect(exceptionSegment).toEqual(' and host.name:(suricata or auditd)'); + expect(exceptionSegment).toEqual('host.name:(suricata or auditd)'); }); }); describe('lucene', () => { test('it returns formatted string when operator is "included"', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'included', + field: 'host.name', + value: ['suricata', 'auditd'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'included', - field: 'host.name', - values, language: 'lucene', }); - expect(exceptionSegment).toEqual(' AND NOT host.name:(suricata OR auditd)'); + expect(exceptionSegment).toEqual('NOT host.name:(suricata OR auditd)'); }); test('it returns formatted string when operator is "excluded"', () => { - const values = [ - { - name: 'suricata', - }, - { - name: 'auditd', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'excluded', + field: 'host.name', + value: ['suricata', 'auditd'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'excluded', - field: 'host.name', - values, language: 'lucene', }); - expect(exceptionSegment).toEqual(' AND host.name:(suricata OR auditd)'); + expect(exceptionSegment).toEqual('host.name:(suricata OR auditd)'); }); test('it returns formatted string when "values" includes only one item', () => { - const values = [ - { - name: 'suricata', + const exceptionSegment = buildMatchAny({ + item: { + operator: 'included', + field: 'host.name', + value: ['suricata'], + type: 'match_any', }, - ]; - const exceptionSegment = buildMatchAll({ - operator: 'included', - field: 'host.name', - values, language: 'lucene', }); - expect(exceptionSegment).toEqual(' AND NOT host.name:suricata'); + expect(exceptionSegment).toEqual('NOT host.name:(suricata)'); + }); + }); + }); + + describe('buildNested', () => { + describe('kuery', () => { + test('it returns formatted query when one item in nested entry', () => { + const item: EntryNested = { + field: 'parent', + type: 'nested', + entries: [ + { + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', + }, + ], + }; + const result = buildNested({ item, language: 'kuery' }); + + expect(result).toEqual('parent:{ nestedField:value-3 }'); + }); + + test('it returns formatted query when multiple items in nested entry', () => { + const item: EntryNested = { + field: 'parent', + type: 'nested', + entries: [ + { + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', + }, + { + field: 'nestedFieldB', + operator: 'excluded', + type: 'match', + value: 'value-4', + }, + ], + }; + const result = buildNested({ item, language: 'kuery' }); + + expect(result).toEqual('parent:{ nestedField:value-3 and nestedFieldB:value-4 }'); + }); + }); + + // TODO: Does lucene support nested query syntax? + describe.skip('lucene', () => { + test('it returns formatted query when one item in nested entry', () => { + const item: EntryNested = { + field: 'parent', + type: 'nested', + entries: [ + { + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', + }, + ], + }; + const result = buildNested({ item, language: 'lucene' }); + + expect(result).toEqual('parent:{ nestedField:value-3 }'); + }); + + test('it returns formatted query when multiple items in nested entry', () => { + const item: EntryNested = { + field: 'parent', + type: 'nested', + entries: [ + { + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', + }, + { + field: 'nestedFieldB', + operator: 'excluded', + type: 'match', + value: 'value-4', + }, + ], + }; + const result = buildNested({ item, language: 'lucene' }); + + expect(result).toEqual('parent:{ nestedField:value-3 AND nestedFieldB:value-4 }'); }); }); }); @@ -341,110 +372,96 @@ describe('build_exceptions_query', () => { describe('evaluateValues', () => { describe('kuery', () => { test('it returns formatted wildcard string when "type" is "exists"', () => { - const list: List = { - values_operator: 'included', - values_type: 'exists', + const list: EntryExists = { + operator: 'included', + type: 'exists', field: 'host.name', }; const result = evaluateValues({ - list, + item: list, language: 'kuery', }); - expect(result).toEqual(' and not host.name:*'); + expect(result).toEqual('not host.name:*'); }); test('it returns formatted string when "type" is "match"', () => { - const list: List = { - values_operator: 'included', - values_type: 'match', + const list: EntryMatch = { + operator: 'included', + type: 'match', field: 'host.name', - values: [{ name: 'suricata' }], + value: 'suricata', }; const result = evaluateValues({ - list, + item: list, language: 'kuery', }); - expect(result).toEqual(' and not host.name:suricata'); + expect(result).toEqual('not host.name:suricata'); }); - test('it returns formatted string when "type" is "match_all"', () => { - const list: List = { - values_operator: 'included', - values_type: 'match_all', + test('it returns formatted string when "type" is "match_any"', () => { + const list: EntryMatchAny = { + operator: 'included', + type: 'match_any', field: 'host.name', - values: [ - { - name: 'suricata', - }, - { - name: 'auditd', - }, - ], + value: ['suricata', 'auditd'], }; const result = evaluateValues({ - list, + item: list, language: 'kuery', }); - expect(result).toEqual(' and not host.name:(suricata or auditd)'); + expect(result).toEqual('not host.name:(suricata or auditd)'); }); }); describe('lucene', () => { describe('kuery', () => { test('it returns formatted wildcard string when "type" is "exists"', () => { - const list: List = { - values_operator: 'included', - values_type: 'exists', + const list: EntryExists = { + operator: 'included', + type: 'exists', field: 'host.name', }; const result = evaluateValues({ - list, + item: list, language: 'lucene', }); - expect(result).toEqual(' AND NOT _exists_host.name'); + expect(result).toEqual('NOT _exists_host.name'); }); test('it returns formatted string when "type" is "match"', () => { - const list: List = { - values_operator: 'included', - values_type: 'match', + const list: EntryMatch = { + operator: 'included', + type: 'match', field: 'host.name', - values: [{ name: 'suricata' }], + value: 'suricata', }; const result = evaluateValues({ - list, + item: list, language: 'lucene', }); - expect(result).toEqual(' AND NOT host.name:suricata'); + expect(result).toEqual('NOT host.name:suricata'); }); - test('it returns formatted string when "type" is "match_all"', () => { - const list: List = { - values_operator: 'included', - values_type: 'match_all', + test('it returns formatted string when "type" is "match_any"', () => { + const list: EntryMatchAny = { + operator: 'included', + type: 'match_any', field: 'host.name', - values: [ - { - name: 'suricata', - }, - { - name: 'auditd', - }, - ], + value: ['suricata', 'auditd'], }; const result = evaluateValues({ - list, + item: list, language: 'lucene', }); - expect(result).toEqual(' AND NOT host.name:(suricata OR auditd)'); + expect(result).toEqual('NOT host.name:(suricata OR auditd)'); }); }); }); @@ -459,7 +476,7 @@ describe('build_exceptions_query', () => { test('it returns expected query string when single exception in array', () => { const formattedQuery = formatQuery({ - exceptions: [' and b:(value-1 or value-2) and not c:*'], + exceptions: ['b:(value-1 or value-2) and not c:*'], query: 'a:*', language: 'kuery', }); @@ -469,7 +486,7 @@ describe('build_exceptions_query', () => { test('it returns expected query string when multiple exceptions in array', () => { const formattedQuery = formatQuery({ - exceptions: [' and b:(value-1 or value-2) and not c:*', ' and not d:*'], + exceptions: ['b:(value-1 or value-2) and not c:*', 'not d:*'], query: 'a:*', language: 'kuery', }); @@ -480,149 +497,70 @@ describe('build_exceptions_query', () => { }); }); - describe('buildExceptions', () => { - test('it returns empty array if empty lists array passed in', () => { - const query = buildExceptions({ - query: 'a:*', + describe('buildExceptionItemEntries', () => { + test('it returns empty string if empty lists array passed in', () => { + const query = buildExceptionItemEntries({ language: 'kuery', lists: [], }); - expect(query).toEqual([]); + expect(query).toEqual(''); }); test('it returns expected query when more than one item in list', () => { // Equal to query && !(b && !c) -> (query AND NOT b) OR (query AND c) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const payload: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value-1', - }, - { - name: 'value-2', - }, - ], + operator: 'included', + type: 'match_any', + value: ['value-1', 'value-2'], }, { field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value-3', - }, - ], - }, - ]; - const query = buildExceptions({ - query: 'a:*', - language: 'kuery', - lists, - }); - const expectedQuery = [' and not b:(value-1 or value-2)', ' and c:value-3']; - - expect(query).toEqual(expectedQuery); - }); - - test('it returns expected query when list item includes nested "and" value', () => { - // Equal to query && !(b || !c) -> (query AND NOT b AND c) - // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ - { - field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value-1', - }, - { - name: 'value-2', - }, - ], - and: [ - { - field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value-3', - }, - ], - }, - ], + operator: 'excluded', + type: 'match', + value: 'value-3', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', - lists, + lists: payload, }); - const expectedQuery = [' and not b:(value-1 or value-2) and c:value-3']; + const expectedQuery = 'not b:(value-1 or value-2) and c:value-3'; expect(query).toEqual(expectedQuery); }); - test('it returns expected query when list item includes nested "and" value of empty array', () => { + test('it returns expected query when list item includes nested value', () => { // Equal to query && !(b || !c) -> (query AND NOT b AND c) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value-1', - }, - { - name: 'value-2', - }, - ], - and: [], + operator: 'included', + type: 'match_any', + value: ['value-1', 'value-2'], }, - ]; - const query = buildExceptions({ - query: 'a:*', - language: 'kuery', - lists, - }); - const expectedQuery = [' and not b:(value-1 or value-2)']; - - expect(query).toEqual(expectedQuery); - }); - - test('it returns expected query when list item includes nested "and" value of null', () => { - // Equal to query && !(b || !c) -> (query AND NOT b AND c) - // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ { - field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ + field: 'parent', + type: 'nested', + entries: [ { - name: 'value-1', - }, - { - name: 'value-2', + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', }, ], - and: undefined, }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:(value-1 or value-2)']; + const expectedQuery = 'not b:(value-1 or value-2) and parent:{ nestedField:value-3 }'; expect(query).toEqual(expectedQuery); }); @@ -630,130 +568,112 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes multiple items and nested "and" values', () => { // Equal to query && !((b || !c) && d) -> (query AND NOT b AND c) OR (query AND NOT d) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value-1', - }, - { - name: 'value-2', - }, - ], - and: [ + operator: 'included', + type: 'match_any', + value: ['value-1', 'value-2'], + }, + { + field: 'parent', + type: 'nested', + entries: [ { - field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value-3', - }, - ], + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', }, ], }, { field: 'd', - values_operator: 'included', - values_type: 'exists', + operator: 'included', + type: 'exists', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:(value-1 or value-2) and c:value-3', ' and not d:*']; - + const expectedQuery = + 'not b:(value-1 or value-2) and parent:{ nestedField:value-3 } and not d:*'; expect(query).toEqual(expectedQuery); }); test('it returns expected query when language is "lucene"', () => { // Equal to query && !((b || !c) && !d) -> (query AND NOT b AND c) OR (query AND d) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value-1', - }, - { - name: 'value-2', - }, - ], - and: [ + operator: 'included', + type: 'match_any', + value: ['value-1', 'value-2'], + }, + { + field: 'parent', + type: 'nested', + entries: [ { - field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value-3', - }, - ], + field: 'nestedField', + operator: 'excluded', + type: 'match', + value: 'value-3', }, ], }, { field: 'e', - values_operator: 'excluded', - values_type: 'exists', + operator: 'excluded', + type: 'exists', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'lucene', lists, }); - const expectedQuery = [' AND NOT b:(value-1 OR value-2) AND c:value-3', ' AND _exists_e']; - + const expectedQuery = + 'NOT b:(value-1 OR value-2) AND parent:{ nestedField:value-3 } AND _exists_e'; expect(query).toEqual(expectedQuery); }); describe('exists', () => { - test('it returns expected query when list includes single list item with values_operator of "included"', () => { + test('it returns expected query when list includes single list item with operator of "included"', () => { // Equal to query && !(b) -> (query AND NOT b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'exists', + operator: 'included', + type: 'exists', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:*']; + const expectedQuery = 'not b:*'; expect(query).toEqual(expectedQuery); }); - test('it returns expected query when list includes single list item with values_operator of "excluded"', () => { + test('it returns expected query when list includes single list item with operator of "excluded"', () => { // Equal to query && !(!b) -> (query AND b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'exists', + operator: 'excluded', + type: 'exists', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:*']; + const expectedQuery = 'b:*'; expect(query).toEqual(expectedQuery); }); @@ -761,26 +681,30 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes list item with "and" values', () => { // Equal to query && !(!b || !c) -> (query AND b AND c) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'exists', - and: [ + operator: 'excluded', + type: 'exists', + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'exists', + operator: 'excluded', + type: 'match', + value: 'value-1', }, ], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:* and c:*']; + const expectedQuery = 'b:* and parent:{ c:value-1 }'; expect(query).toEqual(expectedQuery); }); @@ -788,88 +712,83 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes multiple items', () => { // Equal to query && !((b || !c || d) && e) -> (query AND NOT b AND c AND NOT d) OR (query AND NOT e) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'exists', - and: [ + operator: 'included', + type: 'exists', + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'exists', + operator: 'excluded', + type: 'match', + value: 'value-1', }, { field: 'd', - values_operator: 'included', - values_type: 'exists', + operator: 'included', + type: 'match', + value: 'value-2', }, ], }, { field: 'e', - values_operator: 'included', - values_type: 'exists', + operator: 'included', + type: 'exists', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:* and c:* and not d:*', ' and not e:*']; + const expectedQuery = 'not b:* and parent:{ c:value-1 and d:value-2 } and not e:*'; expect(query).toEqual(expectedQuery); }); }); describe('match', () => { - test('it returns expected query when list includes single list item with values_operator of "included"', () => { + test('it returns expected query when list includes single list item with operator of "included"', () => { // Equal to query && !(b) -> (query AND NOT b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match', - values: [ - { - name: 'value', - }, - ], + operator: 'included', + type: 'match', + value: 'value', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:value']; + const expectedQuery = 'not b:value'; expect(query).toEqual(expectedQuery); }); - test('it returns expected query when list includes single list item with values_operator of "excluded"', () => { + test('it returns expected query when list includes single list item with operator of "excluded"', () => { // Equal to query && !(!b) -> (query AND b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value', - }, - ], + operator: 'excluded', + type: 'match', + value: 'value', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:value']; + const expectedQuery = 'b:value'; expect(query).toEqual(expectedQuery); }); @@ -877,36 +796,31 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes list item with "and" values', () => { // Equal to query && !(!b || !c) -> (query AND b AND c) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'value', - }, - ], - and: [ + operator: 'excluded', + type: 'match', + value: 'value', + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'valueC', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, ], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:value and c:valueC']; + const expectedQuery = 'b:value and parent:{ c:valueC }'; expect(query).toEqual(expectedQuery); }); @@ -914,160 +828,117 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes multiple items', () => { // Equal to query && !((b || !c || d) && e) -> (query AND NOT b AND c AND NOT d) OR (query AND NOT e) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match', - values: [ - { - name: 'value', - }, - ], - and: [ + operator: 'included', + type: 'match', + value: 'value', + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'valueC', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, { field: 'd', - values_operator: 'included', - values_type: 'match', - values: [ - { - name: 'valueC', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, ], }, { field: 'e', - values_operator: 'included', - values_type: 'match', - values: [ - { - name: 'valueC', - }, - ], + operator: 'included', + type: 'match', + value: 'valueC', }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [ - ' and not b:value and c:valueC and not d:valueC', - ' and not e:valueC', - ]; + const expectedQuery = 'not b:value and parent:{ c:valueC and d:valueC } and not e:valueC'; expect(query).toEqual(expectedQuery); }); }); - describe('match_all', () => { - test('it returns expected query when list includes single list item with values_operator of "included"', () => { + describe('match_any', () => { + test('it returns expected query when list includes single list item with operator of "included"', () => { // Equal to query && !(b) -> (query AND NOT b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], + operator: 'included', + type: 'match_any', + value: ['value', 'value-1'], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and not b:(value or value-1)']; + const expectedQuery = 'not b:(value or value-1)'; expect(query).toEqual(expectedQuery); }); - test('it returns expected query when list includes single list item with values_operator of "excluded"', () => { + test('it returns expected query when list includes single list item with operator of "excluded"', () => { // Equal to query && !(!b) -> (query AND b) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], + operator: 'excluded', + type: 'match_any', + value: ['value', 'value-1'], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:(value or value-1)']; + const expectedQuery = 'b:(value or value-1)'; expect(query).toEqual(expectedQuery); }); - test('it returns expected query when list includes list item with "and" values', () => { + test('it returns expected query when list includes list item with nested values', () => { // Equal to query && !(!b || c) -> (query AND b AND NOT c) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'excluded', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], - and: [ + operator: 'excluded', + type: 'match_any', + value: ['value', 'value-1'], + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueC', - }, - { - name: 'value-2', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, ], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [' and b:(value or value-1) and not c:(valueC or value-2)']; + const expectedQuery = 'b:(value or value-1) and parent:{ c:valueC }'; expect(query).toEqual(expectedQuery); }); @@ -1075,71 +946,25 @@ describe('build_exceptions_query', () => { test('it returns expected query when list includes multiple items', () => { // Equal to query && !((b || !c || d) && e) -> ((query AND NOT b AND c AND NOT d) OR (query AND NOT e) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const lists: EntriesArray = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], - and: [ - { - field: 'c', - values_operator: 'excluded', - values_type: 'match_all', - values: [ - { - name: 'valueC', - }, - { - name: 'value-2', - }, - ], - }, - { - field: 'd', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueD', - }, - { - name: 'value-3', - }, - ], - }, - ], + operator: 'included', + type: 'match_any', + value: ['value', 'value-1'], }, { field: 'e', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueE', - }, - { - name: 'value-4', - }, - ], + operator: 'included', + type: 'match_any', + value: ['valueE', 'value-4'], }, ]; - const query = buildExceptions({ - query: 'a:*', + const query = buildExceptionItemEntries({ language: 'kuery', lists, }); - const expectedQuery = [ - ' and not b:(value or value-1) and c:(valueC or value-2) and not d:(valueD or value-3)', - ' and not e:(valueE or value-4)', - ]; + const expectedQuery = 'not b:(value or value-1) and not e:(valueE or value-4)'; expect(query).toEqual(expectedQuery); }); @@ -1157,65 +982,47 @@ describe('build_exceptions_query', () => { test('it returns expected query when lists exist and language is "kuery"', () => { // Equal to query && !((b || !c || d) && e) -> ((query AND NOT b AND c AND NOT d) OR (query AND NOT e) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const payload = getExceptionListItemSchemaMock(); + const payload2 = getExceptionListItemSchemaMock(); + payload2.entries = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], - and: [ + operator: 'included', + type: 'match_any', + value: ['value', 'value-1'], + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'match_all', - values: [ - { - name: 'valueC', - }, - { - name: 'value-2', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, { field: 'd', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueD', - }, - { - name: 'value-3', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueD', }, ], }, { field: 'e', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueE', - }, - { - name: 'value-4', - }, - ], + operator: 'included', + type: 'match_any', + value: ['valueE', 'value-4'], }, ]; - const query = buildQueryExceptions({ query: 'a:*', language: 'kuery', lists }); + const query = buildQueryExceptions({ + query: 'a:*', + language: 'kuery', + lists: [payload, payload2], + }); const expectedQuery = - '(a:* and not b:(value or value-1) and c:(valueC or value-2) and not d:(valueD or value-3)) or (a:* and not e:(valueE or value-4))'; + '(a:* and some.parentField:{ nested.field:some value } and not some.not.nested.field:some value) or (a:* and not b:(value or value-1) and parent:{ c:valueC and d:valueD } and not e:(valueE or value-4))'; expect(query).toEqual([{ query: expectedQuery, language: 'kuery' }]); }); @@ -1223,65 +1030,47 @@ describe('build_exceptions_query', () => { test('it returns expected query when lists exist and language is "lucene"', () => { // Equal to query && !((b || !c || d) && e) -> ((query AND NOT b AND c AND NOT d) OR (query AND NOT e) // https://www.dcode.fr/boolean-expressions-calculator - const lists: List[] = [ + const payload = getExceptionListItemSchemaMock(); + const payload2 = getExceptionListItemSchemaMock(); + payload2.entries = [ { field: 'b', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'value', - }, - { - name: 'value-1', - }, - ], - and: [ + operator: 'included', + type: 'match_any', + value: ['value', 'value-1'], + }, + { + field: 'parent', + type: 'nested', + entries: [ { field: 'c', - values_operator: 'excluded', - values_type: 'match_all', - values: [ - { - name: 'valueC', - }, - { - name: 'value-2', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueC', }, { field: 'd', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueD', - }, - { - name: 'value-3', - }, - ], + operator: 'excluded', + type: 'match', + value: 'valueD', }, ], }, { field: 'e', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: 'valueE', - }, - { - name: 'value-4', - }, - ], + operator: 'included', + type: 'match_any', + value: ['valueE', 'value-4'], }, ]; - const query = buildQueryExceptions({ query: 'a:*', language: 'lucene', lists }); + const query = buildQueryExceptions({ + query: 'a:*', + language: 'lucene', + lists: [payload, payload2], + }); const expectedQuery = - '(a:* AND NOT b:(value OR value-1) AND c:(valueC OR value-2) AND NOT d:(valueD OR value-3)) OR (a:* AND NOT e:(valueE OR value-4))'; + '(a:* AND some.parentField:{ nested.field:some value } AND NOT some.not.nested.field:some value) OR (a:* AND NOT b:(value OR value-1) AND parent:{ c:valueC AND d:valueD } AND NOT e:(valueE OR value-4))'; expect(query).toEqual([{ query: expectedQuery, language: 'lucene' }]); }); diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.ts index 233b20792299..ba0d9dec7d1b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_exceptions_query.ts @@ -3,17 +3,23 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { - ListAndOrUndefined, - Language, - Query, -} from '../../../../common/detection_engine/schemas/common/schemas'; -import { - ListOperator, - ListValues, - List, -} from '../../../../common/detection_engine/schemas/types/lists_default_array'; +import { Language, Query } from '../../../../common/detection_engine/schemas/common/schemas'; import { Query as DataQuery } from '../../../../../../../src/plugins/data/server'; +import { + Entry, + ExceptionListItemSchema, + EntryMatch, + EntryMatchAny, + EntryNested, + EntryExists, + EntriesArray, + Operator, + entriesMatchAny, + entriesExists, + entriesMatch, + entriesNested, + entriesList, +} from '../../../../../lists/common/schemas'; type Operators = 'and' | 'or' | 'not'; type LuceneOperators = 'AND' | 'OR' | 'NOT'; @@ -41,37 +47,30 @@ export const operatorBuilder = ({ operator, language, }: { - operator: ListOperator; + operator: Operator; language: Language; }): string => { - const and = getLanguageBooleanOperator({ - language, - value: 'and', - }); - const or = getLanguageBooleanOperator({ + const not = getLanguageBooleanOperator({ language, value: 'not', }); switch (operator) { - case 'excluded': - return ` ${and} `; case 'included': - return ` ${and} ${or} `; + return `${not} `; default: return ''; } }; export const buildExists = ({ - operator, - field, + item, language, }: { - operator: ListOperator; - field: string; + item: EntryExists; language: Language; }): string => { + const { operator, field } = item; const exceptionOperator = operatorBuilder({ operator, language }); switch (language) { @@ -85,64 +84,70 @@ export const buildExists = ({ }; export const buildMatch = ({ - operator, - field, - values, + item, language, }: { - operator: ListOperator; - field: string; - values: ListValues[]; + item: EntryMatch; language: Language; }): string => { - if (values.length > 0) { - const exceptionOperator = operatorBuilder({ operator, language }); - const [exception] = values; + const { value, operator, field } = item; + const exceptionOperator = operatorBuilder({ operator, language }); - return `${exceptionOperator}${field}:${exception.name}`; - } else { - return ''; - } + return `${exceptionOperator}${field}:${value}`; }; -export const buildMatchAll = ({ - operator, - field, - values, +export const buildMatchAny = ({ + item, language, }: { - operator: ListOperator; - field: string; - values: ListValues[]; + item: EntryMatchAny; language: Language; }): string => { - switch (values.length) { + const { value, operator, field } = item; + + switch (value.length) { case 0: return ''; - case 1: - return buildMatch({ operator, field, values, language }); default: const or = getLanguageBooleanOperator({ language, value: 'or' }); const exceptionOperator = operatorBuilder({ operator, language }); - const matchAllValues = values.map((value) => { - return value.name; - }); + const matchAnyValues = value.map((v) => v); - return `${exceptionOperator}${field}:(${matchAllValues.join(` ${or} `)})`; + return `${exceptionOperator}${field}:(${matchAnyValues.join(` ${or} `)})`; } }; -export const evaluateValues = ({ list, language }: { list: List; language: Language }): string => { - const { values_operator: operator, values_type: type, field, values } = list; - switch (type) { - case 'exists': - return buildExists({ operator, field, language }); - case 'match': - return buildMatch({ operator, field, values: values ?? [], language }); - case 'match_all': - return buildMatchAll({ operator, field, values: values ?? [], language }); - default: - return ''; +export const buildNested = ({ + item, + language, +}: { + item: EntryNested; + language: Language; +}): string => { + const { field, entries } = item; + const and = getLanguageBooleanOperator({ language, value: 'and' }); + const values = entries.map((entry) => `${entry.field}:${entry.value}`); + + return `${field}:{ ${values.join(` ${and} `)} }`; +}; + +export const evaluateValues = ({ + item, + language, +}: { + item: Entry | EntryNested; + language: Language; +}): string => { + if (entriesExists.is(item)) { + return buildExists({ item, language }); + } else if (entriesMatch.is(item)) { + return buildMatch({ item, language }); + } else if (entriesMatchAny.is(item)) { + return buildMatchAny({ item, language }); + } else if (entriesNested.is(item)) { + return buildNested({ item, language }); + } else { + return ''; } }; @@ -157,8 +162,9 @@ export const formatQuery = ({ }): string => { if (exceptions.length > 0) { const or = getLanguageBooleanOperator({ language, value: 'or' }); + const and = getLanguageBooleanOperator({ language, value: 'and' }); const formattedExceptions = exceptions.map((exception) => { - return `(${query}${exception})`; + return `(${query} ${and} ${exception})`; }); return formattedExceptions.join(` ${or} `); @@ -167,23 +173,22 @@ export const formatQuery = ({ } }; -export const buildExceptions = ({ - query, +export const buildExceptionItemEntries = ({ lists, language, }: { - query: string; - lists: List[]; + lists: EntriesArray; language: Language; -}): string[] => { - return lists.reduce((accum, listItem) => { - const { and, ...exceptionDetails } = { ...listItem }; - const andExceptionsSegments = and ? buildExceptions({ query, lists: and, language }) : []; - const exceptionSegment = evaluateValues({ list: exceptionDetails, language }); - const exception = [...exceptionSegment, ...andExceptionsSegments]; - - return [...accum, exception.join('')]; - }, []); +}): string => { + const and = getLanguageBooleanOperator({ language, value: 'and' }); + const exceptionItem = lists + .filter((t) => !entriesList.is(t)) + .reduce((accum, listItem) => { + const exceptionSegment = evaluateValues({ item: listItem, language }); + return [...accum, exceptionSegment]; + }, []); + + return exceptionItem.join(` ${and} `); }; export const buildQueryExceptions = ({ @@ -193,12 +198,13 @@ export const buildQueryExceptions = ({ }: { query: Query; language: Language; - lists: ListAndOrUndefined; + lists: ExceptionListItemSchema[] | undefined; }): DataQuery[] => { if (lists && lists !== null) { - const exceptions = buildExceptions({ lists, language, query }); + const exceptions = lists.map((exceptionItem) => + buildExceptionItemEntries({ lists: exceptionItem.entries, language }) + ); const formattedQuery = formatQuery({ exceptions, language, query }); - return [ { query: formattedQuery, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_rule.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_rule.test.ts index eb87976a6fba..9aef5a370b86 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_rule.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/build_rule.test.ts @@ -7,6 +7,7 @@ import { buildRule } from './build_rule'; import { sampleRuleAlertParams, sampleRuleGuid } from './__mocks__/es_results'; import { RulesSchema } from '../../../../common/detection_engine/schemas/response/rules_schema'; +import { getListArrayMock } from '../../../../common/detection_engine/schemas/types/lists.mock'; describe('buildRule', () => { beforeEach(() => { @@ -80,38 +81,7 @@ describe('buildRule', () => { query: 'host.name: Braden', }, ], - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), version: 1, }; expect(rule).toEqual(expected); @@ -164,38 +134,7 @@ describe('buildRule', () => { updated_at: rule.updated_at, created_at: rule.created_at, throttle: 'no_actions', - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }; expect(rule).toEqual(expected); }); @@ -247,38 +186,7 @@ describe('buildRule', () => { updated_at: rule.updated_at, created_at: rule.created_at, throttle: 'no_actions', - exceptions_list: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'exists', - }, - { - field: 'host.name', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'rock01', - }, - ], - and: [ - { - field: 'host.id', - values_operator: 'included', - values_type: 'match_all', - values: [ - { - name: '123', - }, - { - name: '678', - }, - ], - }, - ], - }, - ], + exceptions_list: getListArrayMock(), }; expect(rule).toEqual(expected); }); diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.test.ts index 4e9eb8587484..bb56926390af 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.test.ts @@ -8,6 +8,7 @@ import uuid from 'uuid'; import { filterEventsAgainstList } from './filter_events_with_list'; import { mockLogger, repeatedSearchResultsWithSortId } from './__mocks__/es_results'; +import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; import { getListItemResponseMock } from '../../../../../lists/common/schemas/response/list_item_schema.mock'; import { listMock } from '../../../../../lists/server/mocks'; @@ -36,92 +37,42 @@ describe('filterEventsAgainstList', () => { expect(res.hits.hits.length).toEqual(4); }); - it('should throw an error if malformed exception list present', async () => { - let message = ''; - try { - await filterEventsAgainstList({ - logger: mockLogger, - listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'excluded', - values_type: 'list', - values: undefined, + describe('operator_type is included', () => { + it('should respond with same list if no items match value list', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', }, - ], - eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3), [ - '1.1.1.1', - '2.2.2.2', - '3.3.3.3', - '7.7.7.7', - ]), - }); - } catch (exc) { - message = exc.message; - } - expect(message).toEqual( - 'Failed to query lists index. Reason: Malformed exception list provided' - ); - }); + }, + ]; - it('should throw an error if unsupported exception type', async () => { - let message = ''; - try { - await filterEventsAgainstList({ - logger: mockLogger, - listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'excluded', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'unsupportedListPluginType', - }, - ], - }, - ], - eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3), [ - '1.1.1.1', - '2.2.2.2', - '3.3.3.3', - '7.7.7.7', - ]), - }); - } catch (exc) { - message = exc.message; - } - expect(message).toEqual( - 'Failed to query lists index. Reason: Unsupported list type used, please use one of ip,keyword' - ); - }); - - describe('operator_type is includes', () => { - it('should respond with same list if no items match value list', async () => { const res = await filterEventsAgainstList({ logger: mockLogger, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3)), }); expect(res.hits.hits.length).toEqual(4); }); it('should respond with less items in the list if some values match', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; listClient.getListItemByValues = jest.fn(({ value }) => Promise.resolve( value.slice(0, 2).map((item) => ({ @@ -133,19 +84,7 @@ describe('filterEventsAgainstList', () => { const res = await filterEventsAgainstList({ logger: mockLogger, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3), [ '1.1.1.1', '2.2.2.2', @@ -162,27 +101,39 @@ describe('filterEventsAgainstList', () => { }); describe('operator type is excluded', () => { it('should respond with empty list if no items match value list', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'excluded', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; const res = await filterEventsAgainstList({ logger: mockLogger, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'excluded', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3)), }); expect(res.hits.hits.length).toEqual(0); }); it('should respond with less items in the list if some values match', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'excluded', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; listClient.getListItemByValues = jest.fn(({ value }) => Promise.resolve( value.slice(0, 2).map((item) => ({ @@ -194,19 +145,7 @@ describe('filterEventsAgainstList', () => { const res = await filterEventsAgainstList({ logger: mockLogger, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'excluded', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], eventSearchResult: repeatedSearchResultsWithSortId(4, 4, someGuids.slice(0, 3), [ '1.1.1.1', '2.2.2.2', diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.ts index 48b120d1b580..1a2f648eb856 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/filter_events_with_list.ts @@ -6,15 +6,17 @@ import { get } from 'lodash/fp'; import { Logger } from 'src/core/server'; -import { ListAndOrUndefined } from '../../../../common/detection_engine/schemas/common/schemas'; -import { List } from '../../../../common/detection_engine/schemas/types/lists_default_array'; -import { type } from '../../../../../lists/common/schemas/common'; import { ListClient } from '../../../../../lists/server'; import { SignalSearchResponse, SearchTypes } from './types'; +import { + entriesList, + EntryList, + ExceptionListItemSchema, +} from '../../../../../lists/common/schemas'; interface FilterEventsAgainstList { listClient: ListClient; - exceptionsList: ListAndOrUndefined; + exceptionsList: ExceptionListItemSchema[]; logger: Logger; eventSearchResult: SignalSearchResponse; } @@ -34,63 +36,63 @@ export const filterEventsAgainstList = async ({ const isStringableType = (val: SearchTypes) => ['string', 'number', 'boolean'].includes(typeof val); // grab the signals with values found in the given exception lists. - const filteredHitsPromises = exceptionsList - .filter((exceptionItem: List) => exceptionItem.values_type === 'list') - .map(async (exceptionItem: List) => { - if (exceptionItem.values == null || exceptionItem.values.length === 0) { - throw new Error('Malformed exception list provided'); - } - if (!type.is(exceptionItem.values[0].name)) { - throw new Error( - `Unsupported list type used, please use one of ${Object.keys(type.keys).join()}` - ); - } - if (!exceptionItem.values[0].id) { - throw new Error(`Missing list id for exception on field ${exceptionItem.field}`); - } - // acquire the list values we are checking for. - const valuesOfGivenType = eventSearchResult.hits.hits.reduce((acc, searchResultItem) => { - const valueField = get(exceptionItem.field, searchResultItem._source); - if (valueField != null && isStringableType(valueField)) { - acc.add(valueField.toString()); - } - return acc; - }, new Set()); + const filteredHitsPromises = exceptionsList.map( + async (exceptionItem: ExceptionListItemSchema) => { + const { entries } = exceptionItem; - // matched will contain any list items that matched with the - // values passed in from the Set. - const matchedListItems = await listClient.getListItemByValues({ - listId: exceptionItem.values[0].id, - type: exceptionItem.values[0].name, - value: [...valuesOfGivenType], - }); + const filteredHitsEntries = entries + .filter((t): t is EntryList => entriesList.is(t)) + .map(async (entry) => { + // acquire the list values we are checking for. + const valuesOfGivenType = eventSearchResult.hits.hits.reduce( + (acc, searchResultItem) => { + const valueField = get(entry.field, searchResultItem._source); + if (valueField != null && isStringableType(valueField)) { + acc.add(valueField.toString()); + } + return acc; + }, + new Set() + ); - // create a set of list values that were a hit - easier to work with - const matchedListItemsSet = new Set( - matchedListItems.map((item) => item.value) - ); + // matched will contain any list items that matched with the + // values passed in from the Set. + const matchedListItems = await listClient.getListItemByValues({ + listId: entry.list.id, + type: entry.list.type, + value: [...valuesOfGivenType], + }); - // do a single search after with these values. - // painless script to do nested query in elasticsearch - // filter out the search results that match with the values found in the list. - const operator = exceptionItem.values_operator; - const filteredEvents = eventSearchResult.hits.hits.filter((item) => { - const eventItem = get(exceptionItem.field, item._source); - if (operator === 'included') { - if (eventItem != null) { - return !matchedListItemsSet.has(eventItem); - } - } else if (operator === 'excluded') { - if (eventItem != null) { - return matchedListItemsSet.has(eventItem); - } - } - return false; - }); - const diff = eventSearchResult.hits.hits.length - filteredEvents.length; - logger.debug(`Lists filtered out ${diff} events`); - return filteredEvents; - }); + // create a set of list values that were a hit - easier to work with + const matchedListItemsSet = new Set( + matchedListItems.map((item) => item.value) + ); + + // do a single search after with these values. + // painless script to do nested query in elasticsearch + // filter out the search results that match with the values found in the list. + const operator = entry.operator; + const filteredEvents = eventSearchResult.hits.hits.filter((item) => { + const eventItem = get(entry.field, item._source); + if (operator === 'included') { + if (eventItem != null) { + return !matchedListItemsSet.has(eventItem); + } + } else if (operator === 'excluded') { + if (eventItem != null) { + return matchedListItemsSet.has(eventItem); + } + } + return false; + }); + const diff = eventSearchResult.hits.hits.length - filteredEvents.length; + logger.debug(`Lists filtered out ${diff} events`); + return filteredEvents; + }); + + return (await Promise.all(filteredHitsEntries)).flat(); + } + ); const filteredHits = await Promise.all(filteredHitsPromises); const toReturn: SignalSearchResponse = { diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.test.ts index 61cd9cfedd94..9b3a446bc666 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.test.ts @@ -7,6 +7,7 @@ import { getQueryFilter, getFilter } from './get_filter'; import { PartialFilter } from '../types'; import { alertsMock, AlertServicesMock } from '../../../../../alerts/server/mocks'; +import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; describe('get_filter', () => { let servicesMock: AlertServicesMock; @@ -381,18 +382,7 @@ describe('get_filter', () => { 'kuery', [], ['auditbeat-*'], - [ - { - field: 'event.module', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'suricata', - }, - ], - }, - ] + [getExceptionListItemSchemaMock()] ); expect(esQuery).toEqual({ bool: { @@ -414,11 +404,39 @@ describe('get_filter', () => { }, { bool: { - minimum_should_match: 1, - should: [ + filter: [ { - match: { - 'event.module': 'suricata', + nested: { + path: 'some.parentField', + query: { + bool: { + minimum_should_match: 1, + should: [ + { + match: { + 'some.parentField.nested.field': 'some value', + }, + }, + ], + }, + }, + score_mode: 'none', + }, + }, + { + bool: { + must_not: { + bool: { + minimum_should_match: 1, + should: [ + { + match: { + 'some.not.nested.field': 'some value', + }, + }, + ], + }, + }, }, }, ], @@ -450,7 +468,7 @@ describe('get_filter', () => { }); test('it should work when lists has value undefined', () => { - const esQuery = getQueryFilter('host.name: linux', 'kuery', [], ['auditbeat-*'], undefined); + const esQuery = getQueryFilter('host.name: linux', 'kuery', [], ['auditbeat-*'], []); expect(esQuery).toEqual({ bool: { filter: [ @@ -529,7 +547,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: ['auditbeat-*'], - lists: undefined, + lists: [], }); expect(filter).toEqual({ bool: { @@ -564,7 +582,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: ['auditbeat-*'], - lists: undefined, + lists: [], }) ).rejects.toThrow('query, filters, and index parameter should be defined'); }); @@ -579,7 +597,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: ['auditbeat-*'], - lists: undefined, + lists: [], }) ).rejects.toThrow('query, filters, and index parameter should be defined'); }); @@ -594,7 +612,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: undefined, - lists: undefined, + lists: [], }) ).rejects.toThrow('query, filters, and index parameter should be defined'); }); @@ -608,7 +626,7 @@ describe('get_filter', () => { savedId: 'some-id', services: servicesMock, index: ['auditbeat-*'], - lists: undefined, + lists: [], }); expect(filter).toEqual({ bool: { @@ -632,7 +650,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: ['auditbeat-*'], - lists: undefined, + lists: [], }) ).rejects.toThrow('savedId parameter should be defined'); }); @@ -647,7 +665,7 @@ describe('get_filter', () => { savedId: 'some-id', services: servicesMock, index: undefined, - lists: undefined, + lists: [], }) ).rejects.toThrow('savedId parameter should be defined'); }); @@ -662,7 +680,7 @@ describe('get_filter', () => { savedId: 'some-id', services: servicesMock, index: undefined, - lists: undefined, + lists: [], }) ).rejects.toThrow('Unsupported Rule of type "machine_learning" supplied to getFilter'); }); @@ -812,18 +830,7 @@ describe('get_filter', () => { savedId: undefined, services: servicesMock, index: ['auditbeat-*'], - lists: [ - { - field: 'event.module', - values_operator: 'excluded', - values_type: 'match', - values: [ - { - name: 'suricata', - }, - ], - }, - ], + lists: [getExceptionListItemSchemaMock()], }); expect(filter).toEqual({ bool: { @@ -845,11 +852,39 @@ describe('get_filter', () => { }, { bool: { - minimum_should_match: 1, - should: [ + filter: [ { - match: { - 'event.module': 'suricata', + nested: { + path: 'some.parentField', + query: { + bool: { + minimum_should_match: 1, + should: [ + { + match: { + 'some.parentField.nested.field': 'some value', + }, + }, + ], + }, + }, + score_mode: 'none', + }, + }, + { + bool: { + must_not: { + bool: { + minimum_should_match: 1, + should: [ + { + match: { + 'some.not.nested.field': 'some value', + }, + }, + ], + }, + }, }, }, ], diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.ts index 3e9f79c67d8c..50ce01aaa6f7 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/get_filter.ts @@ -10,11 +10,11 @@ import { Type, SavedIdOrUndefined, IndexOrUndefined, - ListAndOrUndefined, Language, Index, Query, } from '../../../../common/detection_engine/schemas/common/schemas'; +import { ExceptionListItemSchema } from '../../../../../lists/common/schemas'; import { AlertServices } from '../../../../../alerts/server'; import { assertUnreachable } from '../../../utils/build_query'; import { @@ -33,7 +33,7 @@ export const getQueryFilter = ( language: Language, filters: PartialFilter[], index: Index, - lists: ListAndOrUndefined + lists: ExceptionListItemSchema[] ) => { const indexPattern = { fields: [], @@ -64,7 +64,7 @@ interface GetFilterArgs { savedId: SavedIdOrUndefined; services: AlertServices; index: IndexOrUndefined; - lists: ListAndOrUndefined; + lists: ExceptionListItemSchema[]; } interface QueryAttributes { diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.test.ts index 163ed76d0c6c..1923f43c47b9 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.test.ts @@ -17,6 +17,7 @@ import { alertsMock, AlertServicesMock } from '../../../../../alerts/server/mock import uuid from 'uuid'; import { getListItemResponseMock } from '../../../../../lists/common/schemas/response/list_item_schema.mock'; import { listMock } from '../../../../../lists/server/mocks'; +import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; describe('searchAfterAndBulkCreate', () => { let mockService: AlertServicesMock; @@ -94,22 +95,23 @@ describe('searchAfterAndBulkCreate', () => { }, ], }); + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; + const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ ruleParams: sampleParams, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], services: mockService, logger: mockLogger, id: sampleRuleGuid, @@ -168,22 +170,22 @@ describe('searchAfterAndBulkCreate', () => { }, ], }); + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ ruleParams: sampleParams, listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], services: mockService, logger: mockLogger, id: sampleRuleGuid, @@ -254,7 +256,7 @@ describe('searchAfterAndBulkCreate', () => { const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ ruleParams: sampleParams, listClient, - exceptionsList: undefined, + exceptionsList: [], services: mockService, logger: mockLogger, id: sampleRuleGuid, @@ -281,25 +283,25 @@ describe('searchAfterAndBulkCreate', () => { }); test('if unsuccessful first bulk create', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; const sampleParams = sampleRuleAlertParams(10); mockService.callCluster .mockResolvedValueOnce(repeatedSearchResultsWithSortId(4, 1, someGuids.slice(0, 3))) .mockRejectedValue(new Error('bulk failed')); // Added this recently const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], ruleParams: sampleParams, services: mockService, logger: mockLogger, @@ -327,6 +329,18 @@ describe('searchAfterAndBulkCreate', () => { }); test('should return success with 0 total hits', async () => { + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; const sampleParams = sampleRuleAlertParams(); mockService.callCluster.mockResolvedValueOnce(sampleEmptyDocSearchResults()); listClient.getListItemByValues = jest.fn(({ value }) => @@ -339,19 +353,7 @@ describe('searchAfterAndBulkCreate', () => { ); const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], ruleParams: sampleParams, services: mockService, logger: mockLogger, @@ -405,21 +407,21 @@ describe('searchAfterAndBulkCreate', () => { })) ) ); + const exceptionItem = getExceptionListItemSchemaMock(); + exceptionItem.entries = [ + { + field: 'source.ip', + operator: 'included', + type: 'list', + list: { + id: 'ci-badguys.txt', + type: 'ip', + }, + }, + ]; const { success, createdSignalsCount, lastLookBackDate } = await searchAfterAndBulkCreate({ listClient, - exceptionsList: [ - { - field: 'source.ip', - values_operator: 'included', - values_type: 'list', - values: [ - { - id: 'ci-badguys.txt', - name: 'ip', - }, - ], - }, - ], + exceptionsList: [exceptionItem], ruleParams: sampleParams, services: mockService, logger: mockLogger, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.ts index 65679dc23e64..747525712155 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/search_after_bulk_create.ts @@ -4,7 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ListAndOrUndefined } from '../../../../common/detection_engine/schemas/common/schemas'; import { AlertServices } from '../../../../../alerts/server'; import { ListClient } from '../../../../../lists/server'; import { RuleAlertAction } from '../../../../common/detection_engine/types'; @@ -14,12 +13,13 @@ import { singleSearchAfter } from './single_search_after'; import { singleBulkCreate } from './single_bulk_create'; import { SignalSearchResponse } from './types'; import { filterEventsAgainstList } from './filter_events_with_list'; +import { ExceptionListItemSchema } from '../../../../../lists/common/schemas'; interface SearchAfterAndBulkCreateParams { ruleParams: RuleTypeParams; services: AlertServices; listClient: ListClient | undefined; // TODO: undefined is for temporary development, remove before merged - exceptionsList: ListAndOrUndefined; + exceptionsList: ExceptionListItemSchema[]; logger: Logger; id: string; inputIndexPattern: string[]; diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.test.ts index 23c2d6068c09..5832b4075a40 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.test.ts @@ -10,7 +10,7 @@ import { getResult, getMlResult } from '../routes/__mocks__/request_responses'; import { signalRulesAlertType } from './signal_rule_alert_type'; import { alertsMock, AlertServicesMock } from '../../../../../alerts/server/mocks'; import { ruleStatusServiceFactory } from './rule_status_service'; -import { getGapBetweenRuns } from './utils'; +import { getGapBetweenRuns, getListsClient, getExceptions, sortExceptionItems } from './utils'; import { RuleExecutorOptions } from './types'; import { searchAfterAndBulkCreate } from './search_after_bulk_create'; import { scheduleNotificationActions } from '../notifications/schedule_notification_actions'; @@ -18,6 +18,9 @@ import { RuleAlertType } from '../rules/types'; import { findMlSignals } from './find_ml_signals'; import { bulkCreateMlSignals } from './bulk_create_ml_signals'; import { listMock } from '../../../../../lists/server/mocks'; +import { getListClientMock } from '../../../../../lists/server/services/lists/list_client.mock'; +import { getExceptionListClientMock } from '../../../../../lists/server/services/exception_lists/exception_list_client.mock'; +import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; jest.mock('./rule_status_saved_objects_client'); jest.mock('./rule_status_service'); @@ -84,6 +87,15 @@ describe('rules_notification_alert_type', () => { }; (ruleStatusServiceFactory as jest.Mock).mockReturnValue(ruleStatusService); (getGapBetweenRuns as jest.Mock).mockReturnValue(moment.duration(0)); + (getListsClient as jest.Mock).mockReturnValue({ + listClient: getListClientMock(), + exceptionsClient: getExceptionListClientMock(), + }); + (getExceptions as jest.Mock).mockReturnValue([getExceptionListItemSchemaMock()]); + (sortExceptionItems as jest.Mock).mockReturnValue({ + exceptionsWithoutValueLists: [getExceptionListItemSchemaMock()], + exceptionsWithValueLists: [], + }); (searchAfterAndBulkCreate as jest.Mock).mockClear(); (searchAfterAndBulkCreate as jest.Mock).mockResolvedValue({ success: true, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts index 728bd66b7d65..1bf27dc6e26b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts @@ -15,9 +15,6 @@ import { } from '../../../../common/constants'; import { isJobStarted, isMlRule } from '../../../../common/machine_learning/helpers'; import { SetupPlugins } from '../../../plugin'; - -import { ListClient } from '../../../../../lists/server'; - import { getInputIndex } from './get_input_output_index'; import { searchAfterAndBulkCreate, @@ -25,7 +22,7 @@ import { } from './search_after_bulk_create'; import { getFilter } from './get_filter'; import { SignalRuleAlertTypeDefinition, RuleAlertAttributes } from './types'; -import { getGapBetweenRuns, parseScheduleDates } from './utils'; +import { getGapBetweenRuns, parseScheduleDates, getListsClient, getExceptions } from './utils'; import { signalParamsSchema } from './signal_params_schema'; import { siemRuleActionGroups } from './siem_rule_action_groups'; import { findMlSignals } from './find_ml_signals'; @@ -38,7 +35,6 @@ import { ruleStatusServiceFactory } from './rule_status_service'; import { buildRuleMessageFactory } from './rule_messages'; import { ruleStatusSavedObjectsClientFactory } from './rule_status_saved_objects_client'; import { getNotificationResultsLink } from '../notifications/utils'; -import { hasListsFeature } from '../feature_flags'; export const signalRulesAlertType = ({ logger, @@ -140,6 +136,18 @@ export const signalRulesAlertType = ({ await ruleStatusService.error(gapMessage, { gap: gapString }); } try { + const { listClient, exceptionsClient } = await getListsClient({ + services, + updatedByUser, + spaceId, + lists, + savedObjectClient: services.savedObjectsClient, + }); + const exceptionItems = await getExceptions({ + client: exceptionsClient, + lists: exceptionsList, + }); + if (isMlRule(type)) { if (ml == null) { throw new Error('ML plugin unavailable during rule execution'); @@ -214,18 +222,6 @@ export const signalRulesAlertType = ({ result.bulkCreateTimes.push(bulkCreateDuration); } } else { - let listClient: ListClient | undefined; - if (hasListsFeature()) { - if (lists == null) { - throw new Error('lists plugin unavailable during rule execution'); - } - listClient = await lists.getListClient( - services.callCluster, - spaceId, - updatedByUser ?? 'elastic' - ); - } - const inputIndex = await getInputIndex(services, version, index); const esFilter = await getFilter({ type, @@ -235,13 +231,12 @@ export const signalRulesAlertType = ({ savedId, services, index: inputIndex, - // temporary filter out list type - lists: exceptionsList?.filter((item) => item.values_type !== 'list'), + lists: exceptionItems ?? [], }); result = await searchAfterAndBulkCreate({ listClient, - exceptionsList, + exceptionsList: exceptionItems ?? [], ruleParams: params, services, logger, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.test.ts index f74694df613c..24c2d24ee972 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.test.ts @@ -7,6 +7,12 @@ import moment from 'moment'; import sinon from 'sinon'; +import { alertsMock, AlertServicesMock } from '../../../../../alerts/server/mocks'; +import { listMock } from '../../../../../lists/server/mocks'; +import { EntriesArray } from '../../../../common/detection_engine/lists_common_deps'; + +import * as featureFlags from '../feature_flags'; + import { generateId, parseInterval, @@ -14,10 +20,10 @@ import { getDriftTolerance, getGapBetweenRuns, errorAggregator, + getListsClient, + hasLargeValueList, } from './utils'; - import { BulkResponseErrorAggregation } from './types'; - import { sampleBulkResponse, sampleEmptyBulkResponse, @@ -529,4 +535,107 @@ describe('utils', () => { expect(aggregated).toEqual(expected); }); }); + + describe('#getListsClient', () => { + let alertServices: AlertServicesMock; + + beforeEach(() => { + alertServices = alertsMock.createAlertServices(); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('it successfully returns list and exceptions list client', async () => { + jest.spyOn(featureFlags, 'hasListsFeature').mockReturnValue(true); + + const { listClient, exceptionsClient } = await getListsClient({ + services: alertServices, + savedObjectClient: alertServices.savedObjectsClient, + updatedByUser: 'some_user', + spaceId: '', + lists: listMock.createSetup(), + }); + + expect(listClient).toBeDefined(); + expect(exceptionsClient).toBeDefined(); + }); + + test('it returns list and exceptions client of "undefined" if lists feature flag is off', async () => { + jest.spyOn(featureFlags, 'hasListsFeature').mockReturnValue(false); + + const listsClient = await getListsClient({ + services: alertServices, + savedObjectClient: alertServices.savedObjectsClient, + updatedByUser: 'some_user', + spaceId: '', + lists: listMock.createSetup(), + }); + + expect(listsClient).toEqual({ listClient: undefined, exceptionsClient: undefined }); + }); + + test('it throws if "lists" is undefined', async () => { + jest.spyOn(featureFlags, 'hasListsFeature').mockReturnValue(true); + + await expect(() => + getListsClient({ + services: alertServices, + savedObjectClient: alertServices.savedObjectsClient, + updatedByUser: 'some_user', + spaceId: '', + lists: undefined, + }) + ).rejects.toThrowError('lists plugin unavailable during rule execution'); + }); + }); + + describe('#hasLargeValueList', () => { + test('it returns false if empty array', () => { + const hasLists = hasLargeValueList([]); + + expect(hasLists).toBeFalsy(); + }); + + test('it returns true if item of type EntryList exists', () => { + const entries: EntriesArray = [ + { + field: 'actingProcess.file.signer', + type: 'list', + operator: 'included', + list: { id: 'some id', type: 'ip' }, + }, + { + field: 'file.signature.signer', + type: 'match', + operator: 'excluded', + value: 'Global Signer', + }, + ]; + const hasLists = hasLargeValueList(entries); + + expect(hasLists).toBeTruthy(); + }); + + test('it returns false if item of type EntryList does not exist', () => { + const entries: EntriesArray = [ + { + field: 'actingProcess.file.signer', + type: 'match', + operator: 'included', + value: 'Elastic, N.V.', + }, + { + field: 'file.signature.signer', + type: 'match', + operator: 'excluded', + value: 'Global Signer', + }, + ]; + const hasLists = hasLargeValueList(entries); + + expect(hasLists).toBeFalsy(); + }); + }); }); diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.ts index f0ca08b73fac..e431e65fad62 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/utils.ts @@ -7,9 +7,125 @@ import { createHash } from 'crypto'; import moment from 'moment'; import dateMath from '@elastic/datemath'; -import { parseDuration } from '../../../../../alerts/server'; +import { SavedObjectsClientContract } from '../../../../../../../src/core/server'; +import { AlertServices, parseDuration } from '../../../../../alerts/server'; +import { ExceptionListClient, ListClient, ListPluginSetup } from '../../../../../lists/server'; +import { EntriesArray, ExceptionListItemSchema } from '../../../../../lists/common/schemas'; +import { ListArrayOrUndefined } from '../../../../common/detection_engine/schemas/types/lists'; +import { hasListsFeature } from '../feature_flags'; import { BulkResponse, BulkResponseErrorAggregation } from './types'; +interface SortExceptionsReturn { + exceptionsWithValueLists: ExceptionListItemSchema[]; + exceptionsWithoutValueLists: ExceptionListItemSchema[]; +} + +export const getListsClient = async ({ + lists, + spaceId, + updatedByUser, + services, + savedObjectClient, +}: { + lists: ListPluginSetup | undefined; + spaceId: string; + updatedByUser: string | null; + services: AlertServices; + savedObjectClient: SavedObjectsClientContract; +}): Promise<{ + listClient: ListClient | undefined; + exceptionsClient: ExceptionListClient | undefined; +}> => { + // TODO Remove check once feature is no longer behind flag + if (hasListsFeature()) { + if (lists == null) { + throw new Error('lists plugin unavailable during rule execution'); + } + + const listClient = await lists.getListClient( + services.callCluster, + spaceId, + updatedByUser ?? 'elastic' + ); + const exceptionsClient = await lists.getExceptionListClient( + savedObjectClient, + updatedByUser ?? 'elastic' + ); + + return { listClient, exceptionsClient }; + } else { + return { listClient: undefined, exceptionsClient: undefined }; + } +}; + +export const hasLargeValueList = (entries: EntriesArray): boolean => { + const found = entries.filter(({ type }) => type === 'list'); + return found.length > 0; +}; + +export const getExceptions = async ({ + client, + lists, +}: { + client: ExceptionListClient | undefined; + lists: ListArrayOrUndefined; +}): Promise => { + // TODO Remove check once feature is no longer behind flag + if (hasListsFeature()) { + if (client == null) { + throw new Error('lists plugin unavailable during rule execution'); + } + + if (lists != null) { + try { + // Gather all exception items of all exception lists linked to rule + const exceptions = await Promise.all( + lists + .map(async (list) => { + const { id, namespace_type: namespaceType } = list; + const items = await client.findExceptionListItem({ + listId: id, + namespaceType, + page: 1, + perPage: 5000, + filter: undefined, + sortOrder: undefined, + sortField: undefined, + }); + return items != null ? items.data : []; + }) + .flat() + ); + return exceptions.flat(); + } catch { + return []; + } + } + } +}; + +export const sortExceptionItems = (exceptions: ExceptionListItemSchema[]): SortExceptionsReturn => { + return exceptions.reduce( + (acc, exception) => { + const { entries } = exception; + const { exceptionsWithValueLists, exceptionsWithoutValueLists } = acc; + + if (hasLargeValueList(entries)) { + return { + exceptionsWithValueLists: [...exceptionsWithValueLists, { ...exception }], + exceptionsWithoutValueLists, + }; + } else { + return { + exceptionsWithValueLists, + exceptionsWithoutValueLists: [...exceptionsWithoutValueLists, { ...exception }], + }; + } + }, + { exceptionsWithValueLists: [], exceptionsWithoutValueLists: [] } + ); +}; + export const generateId = ( docIndex: string, docId: string, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/types.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/types.ts index 6e284908e335..90484a46dc6d 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/types.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/types.ts @@ -28,11 +28,11 @@ import { Version, MetaOrUndefined, RuleId, - ListAndOrUndefined, } from '../../../common/detection_engine/schemas/common/schemas'; import { CallAPIOptions } from '../../../../../../src/core/server'; import { Filter } from '../../../../../../src/plugins/data/server'; import { RuleType } from '../../../common/detection_engine/types'; +import { ListArrayOrUndefined } from '../../../common/detection_engine/schemas/types'; export type PartialFilter = Partial; @@ -62,7 +62,7 @@ export interface RuleTypeParams { type: RuleType; references: References; version: Version; - exceptionsList: ListAndOrUndefined; + exceptionsList: ListArrayOrUndefined; } // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/x-pack/plugins/security_solution/server/plugin.ts b/x-pack/plugins/security_solution/server/plugin.ts index 9fe7307e8cb6..879c132ddec5 100644 --- a/x-pack/plugins/security_solution/server/plugin.ts +++ b/x-pack/plugins/security_solution/server/plugin.ts @@ -219,7 +219,9 @@ export class Plugin implements IPlugin { before(() => esArchiver.load('empty_kibana')); after(() => esArchiver.unload('empty_kibana')); - it('should return buckets when the results index exists with matching documents', async () => { + it('should return buckets when there are matching ml result documents', async () => { const { body } = await supertest .post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH) .set(COMMON_HEADERS) @@ -68,7 +68,7 @@ export default ({ getService }: FtrProviderContext) => { ).to.be(true); }); - it('should return no buckets when the results index exists without matching documents', async () => { + it('should return no buckets when there are no matching ml result documents', async () => { const { body } = await supertest .post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH) .set(COMMON_HEADERS) @@ -78,7 +78,7 @@ export default ({ getService }: FtrProviderContext) => { sourceId: 'default', timeRange: { startTime: TIME_BEFORE_START - 10 * 15 * 60 * 1000, - endTime: TIME_BEFORE_START, + endTime: TIME_BEFORE_START - 1, }, bucketDuration: 15 * 60 * 1000, }, @@ -94,25 +94,6 @@ export default ({ getService }: FtrProviderContext) => { expect(logEntryRateBuckets.data.bucketDuration).to.be(15 * 60 * 1000); expect(logEntryRateBuckets.data.histogramBuckets).to.be.empty(); }); - - it('should return a NotFound error when the results index does not exist', async () => { - await supertest - .post(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH) - .set(COMMON_HEADERS) - .send( - getLogEntryRateRequestPayloadRT.encode({ - data: { - sourceId: 'does-not-exist', - timeRange: { - startTime: TIME_BEFORE_START, - endTime: TIME_AFTER_END, - }, - bucketDuration: 15 * 60 * 1000, - }, - }) - ) - .expect(404); - }); }); }); }); diff --git a/x-pack/test/functional/apps/ml/data_frame_analytics/cloning.ts b/x-pack/test/functional/apps/ml/data_frame_analytics/cloning.ts index 357ea3621352..525e25d0158b 100644 --- a/x-pack/test/functional/apps/ml/data_frame_analytics/cloning.ts +++ b/x-pack/test/functional/apps/ml/data_frame_analytics/cloning.ts @@ -156,25 +156,45 @@ export default function ({ getService }: FtrProviderContext) { await ml.testResources.deleteIndexPatternByTitle(testData.job.dest!.index as string); }); - it('should open the flyout with a proper header', async () => { - expect(await ml.dataFrameAnalyticsCreation.getHeaderText()).to.be( - `Clone job from ${testData.job.id}` + it('should open the wizard with a proper header', async () => { + expect(await ml.dataFrameAnalyticsCreation.getHeaderText()).to.match( + /Clone analytics job/ ); }); - it('should have correct init form values', async () => { - await ml.dataFrameAnalyticsCreation.assertInitialCloneJobForm( + it('should have correct init form values for config step', async () => { + await ml.dataFrameAnalyticsCreation.assertInitialCloneJobConfigStep( testData.job as DataFrameAnalyticsConfig ); }); - it('should have disabled Create button on open', async () => { - expect(await ml.dataFrameAnalyticsCreation.isCreateButtonDisabled()).to.be(true); + it('should continue to the additional options step', async () => { + await ml.dataFrameAnalyticsCreation.continueToAdditionalOptionsStep(); }); - it('should enable Create button on a valid form input', async () => { + it('should have correct init form values for additional options step', async () => { + await ml.dataFrameAnalyticsCreation.assertInitialCloneJobAdditionalOptionsStep( + testData.job as DataFrameAnalyticsConfig + ); + }); + + it('should continue to the details step', async () => { + await ml.dataFrameAnalyticsCreation.continueToDetailsStep(); + }); + + it('should have correct init form values for details step', async () => { + await ml.dataFrameAnalyticsCreation.assertInitialCloneJobDetailsStep( + testData.job as DataFrameAnalyticsConfig + ); await ml.dataFrameAnalyticsCreation.setJobId(cloneJobId); await ml.dataFrameAnalyticsCreation.setDestIndex(cloneDestIndex); + }); + + it('should continue to the create step', async () => { + await ml.dataFrameAnalyticsCreation.continueToCreateStep(); + }); + + it('should have enabled Create button on a valid form input', async () => { expect(await ml.dataFrameAnalyticsCreation.isCreateButtonDisabled()).to.be(false); }); @@ -182,11 +202,12 @@ export default function ({ getService }: FtrProviderContext) { await ml.dataFrameAnalyticsCreation.createAnalyticsJob(cloneJobId); }); - it('finishes analytics processing', async () => { + it('should finish analytics processing', async () => { await ml.dataFrameAnalytics.waitForAnalyticsCompletion(cloneJobId); }); - it('displays the created job in the analytics table', async () => { + it('should display the created job in the analytics table', async () => { + await ml.dataFrameAnalyticsCreation.navigateToJobManagementPage(); await ml.dataFrameAnalyticsTable.refreshAnalyticsTable(); await ml.dataFrameAnalyticsTable.filterWithSearchString(cloneJobId); const rows = await ml.dataFrameAnalyticsTable.parseAnalyticsTable(); diff --git a/x-pack/test/functional/services/ml/data_frame_analytics_creation.ts b/x-pack/test/functional/services/ml/data_frame_analytics_creation.ts index 081eb8775fa5..f67ea583e25c 100644 --- a/x-pack/test/functional/services/ml/data_frame_analytics_creation.ts +++ b/x-pack/test/functional/services/ml/data_frame_analytics_creation.ts @@ -124,37 +124,15 @@ export function MachineLearningDataFrameAnalyticsCreationProvider( await this.assertJobDescriptionValue(jobDescription); }, - async assertSourceIndexInputExists() { - await testSubjects.existOrFail('mlAnalyticsCreateJobFlyoutSourceIndexSelect > comboBoxInput'); - }, - - async assertSourceIndexSelection(expectedSelection: string[]) { - const actualSelection = await comboBox.getComboBoxSelectedOptions( - 'mlAnalyticsCreateJobFlyoutSourceIndexSelect > comboBoxInput' - ); - expect(actualSelection).to.eql( - expectedSelection, - `Source index should be '${expectedSelection}' (got '${actualSelection}')` - ); - }, - - async assertExcludedFieldsSelection(expectedSelection: string[]) { - const actualSelection = await comboBox.getComboBoxSelectedOptions( - 'mlAnalyticsCreateJobFlyoutExcludesSelect > comboBoxInput' - ); - expect(actualSelection).to.eql( - expectedSelection, - `Excluded fields should be '${expectedSelection}' (got '${actualSelection}')` - ); - }, - - async selectSourceIndex(sourceIndex: string) { - await comboBox.set( - 'mlAnalyticsCreateJobFlyoutSourceIndexSelect > comboBoxInput', - sourceIndex - ); - await this.assertSourceIndexSelection([sourceIndex]); - }, + // async assertExcludedFieldsSelection(expectedSelection: string[]) { + // const actualSelection = await comboBox.getComboBoxSelectedOptions( + // 'mlAnalyticsCreateJobWizardExcludesSelect' + // ); + // expect(actualSelection).to.eql( + // expectedSelection, + // `Excluded fields should be '${expectedSelection}' (got '${actualSelection}')` + // ); + // }, async assertDestIndexInputExists() { await testSubjects.existOrFail('mlAnalyticsCreateJobFlyoutDestinationIndexInput'); @@ -384,24 +362,29 @@ export function MachineLearningDataFrameAnalyticsCreationProvider( }, async getHeaderText() { - return await testSubjects.getVisibleText('mlDataFrameAnalyticsFlyoutHeaderTitle'); + return await testSubjects.getVisibleText('mlDataFrameAnalyticsWizardHeaderTitle'); }, - async assertInitialCloneJobForm(job: DataFrameAnalyticsConfig) { + async assertInitialCloneJobConfigStep(job: DataFrameAnalyticsConfig) { const jobType = Object.keys(job.analysis)[0]; await this.assertJobTypeSelection(jobType); - await this.assertJobIdValue(''); // id should be empty - await this.assertJobDescriptionValue(String(job.description)); - await this.assertSourceIndexSelection(job.source.index as string[]); - await this.assertDestIndexValue(''); // destination index should be empty if (isClassificationAnalysis(job.analysis) || isRegressionAnalysis(job.analysis)) { await this.assertDependentVariableSelection([job.analysis[jobType].dependent_variable]); await this.assertTrainingPercentValue(String(job.analysis[jobType].training_percent)); } - await this.assertExcludedFieldsSelection(job.analyzed_fields.excludes); + // await this.assertExcludedFieldsSelection(job.analyzed_fields.excludes); + }, + + async assertInitialCloneJobAdditionalOptionsStep(job: DataFrameAnalyticsConfig) { await this.assertModelMemoryValue(job.model_memory_limit); }, + async assertInitialCloneJobDetailsStep(job: DataFrameAnalyticsConfig) { + await this.assertJobIdValue(''); // id should be empty + await this.assertJobDescriptionValue(String(job.description)); + await this.assertDestIndexValue(''); // destination index should be empty + }, + async assertCreationCalloutMessagesExist() { await testSubjects.existOrFail('analyticsWizardCreationCallout_0'); await testSubjects.existOrFail('analyticsWizardCreationCallout_1'); diff --git a/x-pack/test/functional/services/ml/data_frame_analytics_table.ts b/x-pack/test/functional/services/ml/data_frame_analytics_table.ts index 60507f5ab333..f452c9cce7a1 100644 --- a/x-pack/test/functional/services/ml/data_frame_analytics_table.ts +++ b/x-pack/test/functional/services/ml/data_frame_analytics_table.ts @@ -126,7 +126,7 @@ export function MachineLearningDataFrameAnalyticsTableProvider({ getService }: F public async cloneJob(analyticsId: string) { await this.openRowActions(analyticsId); await testSubjects.click(`mlAnalyticsJobCloneButton`); - await testSubjects.existOrFail('mlAnalyticsCreateJobFlyout'); + await testSubjects.existOrFail('mlAnalyticsCreationContainer'); } })(); } diff --git a/x-pack/tsconfig.json b/x-pack/tsconfig.json index 306294c57b3c..e978702a3563 100644 --- a/x-pack/tsconfig.json +++ b/x-pack/tsconfig.json @@ -14,6 +14,7 @@ "test/**/*", "plugins/security_solution/cypress/**/*", "plugins/apm/e2e/cypress/**/*", + "plugins/apm/scripts/**/*", "**/typespec_tests.ts" ], "compilerOptions": { diff --git a/yarn.lock b/yarn.lock index b600ccb75c9f..bb13ee8105e0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2111,6 +2111,15 @@ debug "^3.1.0" lodash.once "^4.1.1" +"@dabh/diagnostics@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.2.tgz#290d08f7b381b8f94607dc8f471a12c675f9db31" + integrity sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q== + dependencies: + colorspace "1.1.x" + enabled "2.0.x" + kuler "^2.0.0" + "@elastic/apm-rum-core@^5.3.0": version "5.3.0" resolved "https://registry.yarnpkg.com/@elastic/apm-rum-core/-/apm-rum-core-5.3.0.tgz#3ae5e84eba5b5287b92458a49755f6e39e7bba5b" @@ -8478,16 +8487,16 @@ backo2@1.0.2: resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= -backport@5.4.1: - version "5.4.1" - resolved "https://registry.yarnpkg.com/backport/-/backport-5.4.1.tgz#b066e8bbece91bc813187c13b7bea69ef5355471" - integrity sha512-vFR5Juss2pveS2OyyoE5n14j7ZDqeZXakzv4KngTEUTsb+5r/AVj2OG8LfJ14RJBMKBYSf1ojSKgDiWtUi0r+w== +backport@5.4.6: + version "5.4.6" + resolved "https://registry.yarnpkg.com/backport/-/backport-5.4.6.tgz#8d8d8cb7c0df4079a40c6f4892f393daa92c1ef8" + integrity sha512-O3fFmQXKZN5sP6R6GwXeobsEgoFzvnuTGj8/TTTjxt1xA07pfhTY67M16rr0eiDDtuSxAqWMX9Zo+5Q3DuxfpQ== dependencies: axios "^0.19.2" dedent "^0.7.0" del "^5.1.0" find-up "^4.1.0" - inquirer "^7.1.0" + inquirer "^7.2.0" lodash.flatmap "^4.5.0" lodash.isempty "^4.4.0" lodash.isstring "^4.0.1" @@ -8496,7 +8505,7 @@ backport@5.4.1: ora "^4.0.4" safe-json-stringify "^1.2.0" strip-json-comments "^3.1.0" - winston "^3.2.1" + winston "^3.3.3" yargs "^15.3.1" bail@^1.0.0: @@ -12992,6 +13001,11 @@ enabled@1.0.x: dependencies: env-variable "0.0.x" +enabled@2.0.x: + version "2.0.0" + resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2" + integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ== + encodeurl@^1.0.2, encodeurl@~1.0.1, encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" @@ -14499,6 +14513,11 @@ fecha@^2.3.3: resolved "https://registry.yarnpkg.com/fecha/-/fecha-2.3.3.tgz#948e74157df1a32fd1b12c3a3c3cdcb6ec9d96cd" integrity sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg== +fecha@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.0.tgz#3ffb6395453e3f3efff850404f0a59b6747f5f41" + integrity sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg== + fetch-mock@^7.3.9: version "7.3.9" resolved "https://registry.yarnpkg.com/fetch-mock/-/fetch-mock-7.3.9.tgz#a80fd2a1728f72e0634ef7a9734bc61200096487" @@ -14909,6 +14928,11 @@ fmin@0.0.2: tape "^4.5.1" uglify-js "^2.6.2" +fn.name@1.x.x: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" + integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== + focus-lock@^0.5.2: version "0.5.4" resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.5.4.tgz#537644d61b9e90fd97075aa680b8add1de24e819" @@ -17816,10 +17840,10 @@ inquirer@^7.0.0: strip-ansi "^5.1.0" through "^2.3.6" -inquirer@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.1.0.tgz#1298a01859883e17c7264b82870ae1034f92dd29" - integrity sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg== +inquirer@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.2.0.tgz#63ce99d823090de7eb420e4bb05e6f3449aa389a" + integrity sha512-E0c4rPwr9ByePfNlTIB8z51kK1s2n6jrHuJeEHENl/sbq2G/S1auvibgEwNR4uSyiU+PiYHqSwsgGiXjG8p5ZQ== dependencies: ansi-escapes "^4.2.1" chalk "^3.0.0" @@ -20116,6 +20140,11 @@ kuler@1.0.x: dependencies: colornames "^1.1.1" +kuler@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" + integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== + last-run@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/last-run/-/last-run-1.1.1.tgz#45b96942c17b1c79c772198259ba943bebf8ca5b" @@ -20954,6 +20983,17 @@ logform@^2.1.1: ms "^2.1.1" triple-beam "^1.3.0" +logform@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/logform/-/logform-2.2.0.tgz#40f036d19161fc76b68ab50fdc7fe495544492f2" + integrity sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg== + dependencies: + colors "^1.2.1" + fast-safe-stringify "^2.0.4" + fecha "^4.2.0" + ms "^2.1.1" + triple-beam "^1.3.0" + loglevel@^1.6.4: version "1.6.4" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.4.tgz#f408f4f006db8354d0577dcf6d33485b3cb90d56" @@ -23186,6 +23226,13 @@ one-time@0.0.4: resolved "https://registry.yarnpkg.com/one-time/-/one-time-0.0.4.tgz#f8cdf77884826fe4dff93e3a9cc37b1e4480742e" integrity sha1-+M33eISCb+Tf+T46nMN7HkSAdC4= +one-time@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45" + integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g== + dependencies: + fn.name "1.x.x" + onetime@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" @@ -26172,7 +26219,7 @@ read-pkg@^5.1.1, read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@1 || 2", readable-stream@~2.3.3: +"readable-stream@1 || 2", readable-stream@^2.3.7, readable-stream@~2.3.3: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -32875,6 +32922,14 @@ winston-transport@^4.3.0: readable-stream "^2.3.6" triple-beam "^1.2.0" +winston-transport@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.4.0.tgz#17af518daa690d5b2ecccaa7acf7b20ca7925e59" + integrity sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw== + dependencies: + readable-stream "^2.3.7" + triple-beam "^1.2.0" + winston@3.2.1, winston@^3.0.0, winston@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/winston/-/winston-3.2.1.tgz#63061377976c73584028be2490a1846055f77f07" @@ -32890,6 +32945,21 @@ winston@3.2.1, winston@^3.0.0, winston@^3.2.1: triple-beam "^1.3.0" winston-transport "^4.3.0" +winston@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.3.3.tgz#ae6172042cafb29786afa3d09c8ff833ab7c9170" + integrity sha512-oEXTISQnC8VlSAKf1KYSSd7J6IWuRPQqDdo8eoRNaYKLvwSb5+79Z3Yi1lrl6KDpU6/VWaxpakDAtb1oQ4n9aw== + dependencies: + "@dabh/diagnostics" "^2.0.2" + async "^3.1.0" + is-stream "^2.0.0" + logform "^2.2.0" + one-time "^1.0.0" + readable-stream "^3.4.0" + stack-trace "0.0.x" + triple-beam "^1.3.0" + winston-transport "^4.4.0" + with@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe"