From 5f844bfb6a39e17010d8a331ea5b0338693b5f36 Mon Sep 17 00:00:00 2001 From: Dmitry Date: Tue, 24 Nov 2020 16:59:18 +0100 Subject: [PATCH 01/18] update geckodriver to 0.28 (#84085) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 39149c801da4..af80102641db 100644 --- a/package.json +++ b/package.json @@ -654,7 +654,7 @@ "file-loader": "^4.2.0", "file-saver": "^1.3.8", "formsy-react": "^1.1.5", - "geckodriver": "^1.20.0", + "geckodriver": "^1.21.0", "glob-watcher": "5.0.3", "graphql-code-generator": "^0.18.2", "graphql-codegen-add": "^0.18.2", diff --git a/yarn.lock b/yarn.lock index d20c7f78c979..8d47d3e84378 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14255,10 +14255,10 @@ gaze@^1.0.0, gaze@^1.1.0: dependencies: globule "^1.0.0" -geckodriver@^1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/geckodriver/-/geckodriver-1.20.0.tgz#cd16edb177b88e31affcb54b18a238cae88950a7" - integrity sha512-5nVF4ixR+ZGhVsc4udnVihA9RmSlO6guPV1d2HqxYsgAOUNh0HfzxbzG7E49w4ilXq/CSu87x9yWvrsOstrADQ== +geckodriver@^1.21.0: + version "1.21.0" + resolved "https://registry.yarnpkg.com/geckodriver/-/geckodriver-1.21.0.tgz#1f04780ebfb451ffd08fa8fddc25cc26e37ac4a2" + integrity sha512-NamdJwGIWpPiafKQIvGman95BBi/SBqHddRXAnIEpFNFCFToTW0sEA0nUckMKCBNn1DVIcLfULfyFq/sTn9bkA== dependencies: adm-zip "0.4.16" bluebird "3.7.2" From a12bb044382ada8f557590851943a1568f24c58f Mon Sep 17 00:00:00 2001 From: Scotty Bollinger Date: Tue, 24 Nov 2020 10:16:33 -0600 Subject: [PATCH 02/18] [Workplace Search] Initial rendering of Org Sources (#84164) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix broken routes Didn’t have a way to test these when created * Get context from global state No need to do this in 2 places now. There was a race condition where the default logic value for `isOrganization` was set to `false` We don’t need a useEffect call here because the value is synchronous and has no side effects. Calling the method directly fixes the race condition. * Add the ‘path’ to the logic files for easier debugging * Add SourceSubNav component * Flip routes to match new convention It was decided by Product that instead of keying off of `/org` to determine context, that we would now flip it where we key of provate with `/p`. This means that /sources is now organization where before it was personal * Convert routers to use children instead of props This aligns with App Search and allows for easier telemtry and breadcrumbs * Add breadcrumbs and basic telemetry * Add in and refactor subnavigation As a part of this commit, the approach for rendering subnavs was refactored to align with App Search. There was a bug where some components weren’t rendering properly because the SourceLogic and GroupsLogic files were never unmounting. The reason for this is the subnav components use their respective logic files to get the IDs needed for rendering the subnav links. That is, SourceSubNav would call SourceLogic to get the ID to render the links and would stay rendered for the duration of the user’s time in the app. The result is that users would leave the source details page and navigate to add a new source and the logic file would never reset to a loading state and the UI would break. The fix was to borrow from the pattern App Search uses and pass the subnavs as props. Because App Search only uses a single engines subnav, they only needed one prop. We use multiple props for each subnav. Also, the subnav should not be rendered on the root routes (/sources, /p/sources, and /groups) so conditionals were added to only render the subnavs when not on those root routes. * Add FlashMessages * Fix some failed tests Missed this in first commit * Update SourceIcon to use EuiIcon Before this change, the legacy styles were not ported over. This gives a uniform size for both wrapped and unwrapped icons. The icons are a bit smaller on the add source page but Eui has lowered it’s largest size ‘xxl’ and we would need to write manual overrides. IMO the change is not significant enough to override. * Fix broken icons * Replace legacy div with component The eui.css file in ent-search is no longer up to date with current EUI and this was broken. The best fix was to use the component that renders as expected * Add base styles for Sources More in a future PR but this makes the majority of things look correct. * Cleanup Fix some type errors and rename constants * Couple more failing tests We have multiple `Layouts` now with the new subnavs * Fix prepare routes Like the first commit, missed these when porting over routes with no UI. * Clean up the desgin of the source connect screen The columns were way off in Kibana * Remove ORG_PATH const No longer needed since ‘/org’ is gone --- .../components/layout/nav.tsx | 13 +- .../shared/assets/source_icons/index.ts | 5 + .../shared/source_icon/source_icon.scss | 21 +++ .../shared/source_icon/source_icon.test.tsx | 6 +- .../shared/source_icon/source_icon.tsx | 13 +- .../workplace_search/constants.ts | 12 ++ .../workplace_search/index.test.tsx | 4 +- .../applications/workplace_search/index.tsx | 44 ++++-- .../workplace_search/routes.test.tsx | 6 +- .../applications/workplace_search/routes.ts | 24 ++-- .../add_source/configured_sources_list.tsx | 5 +- .../add_source/connect_instance.tsx | 4 +- .../components/source_sub_nav.tsx | 59 ++++++++ .../views/content_sources/index.ts | 1 + .../views/content_sources/source_logic.ts | 3 +- .../views/content_sources/source_router.tsx | 62 ++++---- .../views/content_sources/sources.scss | 23 +++ .../views/content_sources/sources_logic.ts | 1 + .../views/content_sources/sources_router.tsx | 133 ++++++++++-------- .../groups/components/group_manager_modal.tsx | 4 +- .../views/overview/onboarding_steps.test.tsx | 4 +- .../views/overview/onboarding_steps.tsx | 4 +- .../views/overview/organization_stats.tsx | 4 +- .../routes/workplace_search/sources.test.ts | 16 +-- .../server/routes/workplace_search/sources.ts | 18 +-- 25 files changed, 332 insertions(+), 157 deletions(-) create mode 100644 x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.scss create mode 100644 x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/source_sub_nav.tsx create mode 100644 x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources.scss diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/layout/nav.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/layout/nav.tsx index 6fa6698e6b6b..de6c75d60189 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/layout/nav.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/layout/nav.tsx @@ -11,11 +11,9 @@ import { WORKPLACE_SEARCH_PLUGIN } from '../../../../../common/constants'; import { getWorkplaceSearchUrl } from '../../../shared/enterprise_search_url'; import { SideNav, SideNavLink } from '../../../shared/layout'; -import { GroupSubNav } from '../../views/groups/components/group_sub_nav'; import { NAV } from '../../constants'; import { - ORG_SOURCES_PATH, SOURCES_PATH, SECURITY_PATH, ROLE_MAPPINGS_PATH, @@ -23,17 +21,22 @@ import { ORG_SETTINGS_PATH, } from '../../routes'; -export const WorkplaceSearchNav: React.FC = () => { +interface Props { + sourcesSubNav?: React.ReactNode; + groupsSubNav?: React.ReactNode; +} + +export const WorkplaceSearchNav: React.FC = ({ sourcesSubNav, groupsSubNav }) => { // TODO: icons return ( {NAV.OVERVIEW} - + {NAV.SOURCES} - }> + {NAV.GROUPS} diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/assets/source_icons/index.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/assets/source_icons/index.ts index 5f93694da09b..2ac3f518e4e1 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/assets/source_icons/index.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/assets/source_icons/index.ts @@ -30,22 +30,27 @@ import zendesk from './zendesk.svg'; export const images = { box, confluence, + confluenceCloud: confluence, + confluenceServer: confluence, crawler, custom, drive, dropbox, github, + githubEnterpriseServer: github, gmail, googleDrive, google, jira, jiraServer, + jiraCloud: jira, loadingSmall, office365, oneDrive, outlook, people, salesforce, + salesforceSandbox: salesforce, serviceNow, sharePoint, slack, diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.scss b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.scss new file mode 100644 index 000000000000..b04d5b8bc218 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.scss @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +.wrapped-icon { + width: 30px; + height: 30px; + overflow: hidden; + margin-right: 4px; + position: relative; + display: flex; + justify-content: center; + align-items: center; + + img { + max-width: 100%; + max-height: 100%; + } +} diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.test.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.test.tsx index c17b89c93a28..4007f7a69f77 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.test.tsx @@ -7,19 +7,21 @@ import React from 'react'; import { shallow } from 'enzyme'; +import { EuiIcon } from '@elastic/eui'; + import { SourceIcon } from './'; describe('SourceIcon', () => { it('renders unwrapped icon', () => { const wrapper = shallow(); - expect(wrapper.find('img')).toHaveLength(1); + expect(wrapper.find(EuiIcon)).toHaveLength(1); expect(wrapper.find('.user-group-source')).toHaveLength(0); }); it('renders wrapped icon', () => { const wrapper = shallow(); - expect(wrapper.find('.user-group-source')).toHaveLength(1); + expect(wrapper.find('.wrapped-icon')).toHaveLength(1); }); }); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.tsx index dec9e25fe244..1af5420a164b 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/components/shared/source_icon/source_icon.tsx @@ -8,6 +8,10 @@ import React from 'react'; import { camelCase } from 'lodash'; +import { EuiIcon } from '@elastic/eui'; + +import './source_icon.scss'; + import { images } from '../assets/source_icons'; import { imagesFull } from '../assets/sources_full_bleed'; @@ -27,14 +31,15 @@ export const SourceIcon: React.FC = ({ fullBleed = false, }) => { const icon = ( - {name} ); return wrapped ? ( -
+
{icon}
) : ( diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/constants.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/constants.ts index 1846115d7390..327ee7b30582 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/constants.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/constants.ts @@ -25,15 +25,27 @@ export const NAV = { 'xpack.enterpriseSearch.workplaceSearch.nav.groups.sourcePrioritization', { defaultMessage: 'Source Prioritization' } ), + CONTENT: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.content', { + defaultMessage: 'Content', + }), ROLE_MAPPINGS: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.roleMappings', { defaultMessage: 'Role Mappings', }), SECURITY: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.security', { defaultMessage: 'Security', }), + SCHEMA: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.schema', { + defaultMessage: 'Schema', + }), + DISPLAY_SETTINGS: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.displaySettings', { + defaultMessage: 'Display Settings', + }), SETTINGS: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.settings', { defaultMessage: 'Settings', }), + ADD_SOURCE: i18n.translate('xpack.enterpriseSearch.workplaceSearch.nav.addSource', { + defaultMessage: 'Add Source', + }), PERSONAL_DASHBOARD: i18n.translate( 'xpack.enterpriseSearch.workplaceSearch.nav.personalDashboard', { diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.test.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.test.tsx index 5f1e2dd18d3b..20b15bcfc45c 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.test.tsx @@ -57,7 +57,7 @@ describe('WorkplaceSearchConfigured', () => { it('renders layout and header actions', () => { const wrapper = shallow(); - expect(wrapper.find(Layout).prop('readOnlyMode')).toBeFalsy(); + expect(wrapper.find(Layout).first().prop('readOnlyMode')).toBeFalsy(); expect(wrapper.find(Overview)).toHaveLength(1); expect(mockKibanaValues.renderHeaderActions).toHaveBeenCalledWith(WorkplaceSearchHeaderActions); }); @@ -90,6 +90,6 @@ describe('WorkplaceSearchConfigured', () => { const wrapper = shallow(); - expect(wrapper.find(Layout).prop('readOnlyMode')).toEqual(true); + expect(wrapper.find(Layout).first().prop('readOnlyMode')).toEqual(true); }); }); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.tsx index 776cae24dfdf..562a2ffb3288 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/index.tsx @@ -16,13 +16,17 @@ import { AppLogic } from './app_logic'; import { Layout } from '../shared/layout'; import { WorkplaceSearchNav, WorkplaceSearchHeaderActions } from './components/layout'; -import { GROUPS_PATH, SETUP_GUIDE_PATH } from './routes'; +import { GROUPS_PATH, SETUP_GUIDE_PATH, SOURCES_PATH, PERSONAL_SOURCES_PATH } from './routes'; import { SetupGuide } from './views/setup_guide'; import { ErrorState } from './views/error_state'; import { NotFound } from '../shared/not_found'; import { Overview } from './views/overview'; import { GroupsRouter } from './views/groups'; +import { SourcesRouter } from './views/content_sources'; + +import { GroupSubNav } from './views/groups/components/group_sub_nav'; +import { SourceSubNav } from './views/content_sources/components/source_sub_nav'; export const WorkplaceSearch: React.FC = (props) => { const { config } = useValues(KibanaLogic); @@ -37,6 +41,10 @@ export const WorkplaceSearchConfigured: React.FC = (props) => { const { pathname } = useLocation(); + // We don't want so show the subnavs on the container root pages. + const showSourcesSubnav = pathname !== SOURCES_PATH && pathname !== PERSONAL_SOURCES_PATH; + const showGroupsSubnav = pathname !== GROUPS_PATH; + /** * Personal dashboard urls begin with /p/ * EX: http://localhost:5601/app/enterprise_search/workplace_search/p/sources @@ -45,6 +53,7 @@ export const WorkplaceSearchConfigured: React.FC = (props) => { // TODO: Once auth is figured out, we need to have a check for the equivilent of `isAdmin`. const isOrganization = !pathname.match(personalSourceUrlRegex); + setContext(isOrganization); useEffect(() => { if (!hasInitialized) { @@ -53,10 +62,6 @@ export const WorkplaceSearchConfigured: React.FC = (props) => { } }, [hasInitialized]); - useEffect(() => { - setContext(isOrganization); - }, [isOrganization]); - return ( @@ -65,19 +70,32 @@ export const WorkplaceSearchConfigured: React.FC = (props) => { {errorConnecting ? : } + + } />} + restrictWidth + readOnlyMode={readOnlyMode} + > + + + + + } />} + restrictWidth + readOnlyMode={readOnlyMode} + > + + + } restrictWidth readOnlyMode={readOnlyMode}> {errorConnecting ? ( ) : ( - - - - - - - - + + + )} diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.test.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.test.tsx index d03c0abb441b..3fddcf3b77fe 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.test.tsx @@ -12,7 +12,7 @@ import { EuiLink } from '@elastic/eui'; import { getContentSourcePath, SOURCES_PATH, - ORG_SOURCES_PATH, + PERSONAL_SOURCES_PATH, SOURCE_DETAILS_PATH, } from './routes'; @@ -26,13 +26,13 @@ describe('getContentSourcePath', () => { const wrapper = shallow(); const path = wrapper.find(EuiLink).prop('href'); - expect(path).toEqual(`${ORG_SOURCES_PATH}/123`); + expect(path).toEqual(`${SOURCES_PATH}/123`); }); it('should format user route', () => { const wrapper = shallow(); const path = wrapper.find(EuiLink).prop('href'); - expect(path).toEqual(`${SOURCES_PATH}/123`); + expect(path).toEqual(`${PERSONAL_SOURCES_PATH}/123`); }); }); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.ts index e41a043911dc..3ec22ede888a 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/routes.ts @@ -44,21 +44,21 @@ export const CUSTOM_API_DOCS_URL = `${DOCS_PREFIX}/workplace-search-custom-sourc export const CUSTOM_API_DOCUMENT_PERMISSIONS_DOCS_URL = `${CUSTOM_SOURCE_DOCS_URL}#custom-api-source-document-level-access-control`; export const ENT_SEARCH_LICENSE_MANAGEMENT = `${ENT_SEARCH_DOCS_PREFIX}/license-management.html`; -export const ORG_PATH = '/org'; +export const PERSONAL_PATH = '/p'; -export const ROLE_MAPPINGS_PATH = `${ORG_PATH}/role-mappings`; +export const ROLE_MAPPINGS_PATH = '/role-mappings'; export const ROLE_MAPPING_PATH = `${ROLE_MAPPINGS_PATH}/:roleId`; export const ROLE_MAPPING_NEW_PATH = `${ROLE_MAPPINGS_PATH}/new`; -export const USERS_PATH = `${ORG_PATH}/users`; -export const SECURITY_PATH = `${ORG_PATH}/security`; +export const USERS_PATH = '/users'; +export const SECURITY_PATH = '/security'; export const GROUPS_PATH = '/groups'; export const GROUP_PATH = `${GROUPS_PATH}/:groupId`; export const GROUP_SOURCE_PRIORITIZATION_PATH = `${GROUPS_PATH}/:groupId/source_prioritization`; export const SOURCES_PATH = '/sources'; -export const ORG_SOURCES_PATH = `${ORG_PATH}${SOURCES_PATH}`; +export const PERSONAL_SOURCES_PATH = `${PERSONAL_PATH}${SOURCES_PATH}`; export const SOURCE_ADDED_PATH = `${SOURCES_PATH}/added`; export const ADD_SOURCE_PATH = `${SOURCES_PATH}/add`; @@ -81,7 +81,7 @@ export const ADD_SLACK_PATH = `${SOURCES_PATH}/add/slack`; export const ADD_ZENDESK_PATH = `${SOURCES_PATH}/add/zendesk`; export const ADD_CUSTOM_PATH = `${SOURCES_PATH}/add/custom`; -export const PERSONAL_SETTINGS_PATH = '/settings'; +export const PERSONAL_SETTINGS_PATH = `${PERSONAL_PATH}/settings`; export const SOURCE_DETAILS_PATH = `${SOURCES_PATH}/:sourceId`; export const SOURCE_CONTENT_PATH = `${SOURCES_PATH}/:sourceId/content`; @@ -93,7 +93,7 @@ export const REINDEX_JOB_PATH = `${SOURCES_PATH}/:sourceId/schema-errors/:active export const DISPLAY_SETTINGS_SEARCH_RESULT_PATH = `${SOURCE_DISPLAY_SETTINGS_PATH}/`; export const DISPLAY_SETTINGS_RESULT_DETAIL_PATH = `${SOURCE_DISPLAY_SETTINGS_PATH}/result-detail`; -export const ORG_SETTINGS_PATH = `${ORG_PATH}/settings`; +export const ORG_SETTINGS_PATH = '/settings'; export const ORG_SETTINGS_CUSTOMIZE_PATH = `${ORG_SETTINGS_PATH}/customize`; export const ORG_SETTINGS_CONNECTORS_PATH = `${ORG_SETTINGS_PATH}/connectors`; export const ORG_SETTINGS_OAUTH_APPLICATION_PATH = `${ORG_SETTINGS_PATH}/oauth`; @@ -120,9 +120,9 @@ export const getContentSourcePath = ( path: string, sourceId: string, isOrganization: boolean -): string => generatePath(isOrganization ? ORG_PATH + path : path, { sourceId }); -export const getGroupPath = (groupId: string) => generatePath(GROUP_PATH, { groupId }); -export const getGroupSourcePrioritizationPath = (groupId: string) => +): string => generatePath(isOrganization ? path : `${PERSONAL_PATH}${path}`, { sourceId }); +export const getGroupPath = (groupId: string): string => generatePath(GROUP_PATH, { groupId }); +export const getGroupSourcePrioritizationPath = (groupId: string): string => `${GROUPS_PATH}/${groupId}/source_prioritization`; -export const getSourcesPath = (path: string, isOrganization: boolean) => - isOrganization ? `${ORG_PATH}${path}` : path; +export const getSourcesPath = (path: string, isOrganization: boolean): string => + isOrganization ? path : `${PERSONAL_PATH}${path}`; diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/configured_sources_list.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/configured_sources_list.tsx index a95d5ca75b0b..fbd053f9b837 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/configured_sources_list.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/configured_sources_list.tsx @@ -13,6 +13,7 @@ import { EuiFlexGrid, EuiFlexGroup, EuiFlexItem, + EuiPanel, EuiSpacer, EuiText, EuiTitle, @@ -57,7 +58,7 @@ export const ConfiguredSourcesList: React.FC = ({ {sources.map(({ name, serviceType, addPath, connected, accountContextOnly }, i) => ( -
+ = ({ )} -
+
))} diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/connect_instance.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/connect_instance.tsx index ad183181b4ec..f9123ab4e1cc 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/connect_instance.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/add_source/connect_instance.tsx @@ -240,13 +240,13 @@ export const ConnectInstance: React.FC = ({ gutterSize="xl" responsive={false} > - + {header} {featureBadgeGroup()} {descriptionBlock} {formFields} - + diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/source_sub_nav.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/source_sub_nav.tsx new file mode 100644 index 000000000000..cc68a62b9555 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/components/source_sub_nav.tsx @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { useValues } from 'kea'; + +import { AppLogic } from '../../../app_logic'; +import { NAV, CUSTOM_SERVICE_TYPE } from '../../../constants'; + +import { SourceLogic } from '../source_logic'; + +import { SideNavLink } from '../../../../shared/layout'; + +import { + getContentSourcePath, + SOURCE_DETAILS_PATH, + SOURCE_CONTENT_PATH, + SOURCE_SCHEMAS_PATH, + SOURCE_DISPLAY_SETTINGS_PATH, + SOURCE_SETTINGS_PATH, +} from '../../../routes'; + +export const SourceSubNav: React.FC = () => { + const { isOrganization } = useValues(AppLogic); + const { + contentSource: { id, serviceType }, + } = useValues(SourceLogic); + + if (!id) return null; + + const isCustom = serviceType === CUSTOM_SERVICE_TYPE; + + return ( + <> + + {NAV.OVERVIEW} + + + {NAV.CONTENT} + + {isCustom && ( + <> + + {NAV.SCHEMA} + + + {NAV.DISPLAY_SETTINGS} + + + )} + + {NAV.SETTINGS} + + + ); +}; diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/index.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/index.ts index 0ef2099968f1..f447751e9659 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/index.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/index.ts @@ -5,3 +5,4 @@ */ export { Overview } from './components/overview'; +export { SourcesRouter } from './sources_router'; diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_logic.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_logic.ts index 0a11da02dc78..51b5735f0104 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_logic.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_logic.ts @@ -146,6 +146,7 @@ interface PreContentSourceResponse { } export const SourceLogic = kea>({ + path: ['enterprise_search', 'workplace_search', 'source_logic'], actions: { onInitializeSource: (contentSource: ContentSourceFullData) => contentSource, onUpdateSourceName: (name: string) => name, @@ -601,7 +602,7 @@ export const SourceLogic = kea>({ try { const response = await HttpLogic.values.http.post(route, { - body: JSON.stringify({ params }), + body: JSON.stringify({ ...params }), }); actions.setCustomSourceData(response); successCallback(); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_router.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_router.tsx index b8b8e6e1040a..7161e613247c 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_router.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/source_router.tsx @@ -13,6 +13,11 @@ import { Route, Switch, useHistory, useParams } from 'react-router-dom'; import { EuiButton, EuiCallOut, EuiSpacer } from '@elastic/eui'; +import { SetWorkplaceSearchChrome as SetPageChrome } from '../../../shared/kibana_chrome'; +import { SendWorkplaceSearchTelemetry as SendTelemetry } from '../../../shared/telemetry'; + +import { NAV } from '../../constants'; + import { ENT_SEARCH_LICENSE_MANAGEMENT, REINDEX_JOB_PATH, @@ -99,39 +104,42 @@ export const SourceRouter: React.FC = () => { {/* TODO: Figure out with design how to make this look better */} {pageHeader} - - + + + + + + + + + + {isCustomSource && ( - + + + + + )} {isCustomSource && ( - + + + + + )} {isCustomSource && ( - + + + + + )} - + + + + + ); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources.scss b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources.scss new file mode 100644 index 000000000000..fb0cecc18148 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources.scss @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +.source-grid-configured { + + .source-card-configured { + padding: 8px; + + &__icon { + width: 2em; + height: 2em; + } + + &__not-connected-tooltip { + position: relative; + top: 3px; + left: 4px; + } + } +} diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_logic.ts b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_logic.ts index 600b5871fc49..1757f2a6414f 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_logic.ts +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_logic.ts @@ -78,6 +78,7 @@ interface ISourcesServerResponse { } export const SourcesLogic = kea>({ + path: ['enterprise_search', 'workplace_search', 'sources_logic'], actions: { setServerSourceStatuses: (statuses: ContentSourceStatus[]) => statuses, onInitializeSources: (serverResponse: ISourcesServerResponse) => serverResponse, diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_router.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_router.tsx index e4f15286145f..9f96a13e272d 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_router.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/content_sources/sources_router.tsx @@ -10,18 +10,23 @@ import { Location } from 'history'; import { useActions, useValues } from 'kea'; import { Redirect, Route, Switch, useLocation } from 'react-router-dom'; +import { SetWorkplaceSearchChrome as SetPageChrome } from '../../../shared/kibana_chrome'; +import { SendWorkplaceSearchTelemetry as SendTelemetry } from '../../../shared/telemetry'; + import { LicensingLogic } from '../../../../applications/shared/licensing'; +import { NAV } from '../../constants'; import { ADD_SOURCE_PATH, SOURCE_ADDED_PATH, SOURCE_DETAILS_PATH, - ORG_PATH, - ORG_SOURCES_PATH, + PERSONAL_SOURCES_PATH, SOURCES_PATH, getSourcesPath, } from '../../routes'; +import { FlashMessages } from '../../../shared/flash_messages'; + import { AppLogic } from '../../app_logic'; import { staticSourceData } from './source_data'; import { SourcesLogic } from './sources_logic'; @@ -32,12 +37,15 @@ import { OrganizationSources } from './organization_sources'; import { PrivateSources } from './private_sources'; import { SourceRouter } from './source_router'; +import './sources.scss'; + export const SourcesRouter: React.FC = () => { const { pathname } = useLocation() as Location; const { hasPlatinumLicense } = useValues(LicensingLogic); const { resetSourcesState } = useActions(SourcesLogic); const { account: { canCreatePersonalSources }, + isOrganization, } = useValues(AppLogic); /** @@ -48,61 +56,76 @@ export const SourcesRouter: React.FC = () => { resetSourcesState(); }, [pathname]); - const isOrgRoute = pathname.includes(ORG_PATH); - return ( - - - - {staticSourceData.map(({ addPath, accountContextOnly }, i) => ( - - !hasPlatinumLicense && accountContextOnly ? ( - + <> + + + + + + + + + + + + + {staticSourceData.map(({ addPath, accountContextOnly, name }, i) => ( + + + {!hasPlatinumLicense && accountContextOnly ? ( + ) : ( - ) - } - /> - ))} - {staticSourceData.map(({ addPath }, i) => ( - } - /> - ))} - {staticSourceData.map(({ addPath }, i) => ( - } - /> - ))} - {staticSourceData.map(({ addPath, configuration: { needsConfiguration } }, i) => { - if (needsConfiguration) - return ( - } - /> - ); - })} - {canCreatePersonalSources ? ( - - ) : ( - - )} - : - - - + )} + + ))} + {staticSourceData.map(({ addPath, name }, i) => ( + + + + + ))} + {staticSourceData.map(({ addPath, name }, i) => ( + + + + + ))} + {staticSourceData.map(({ addPath, name, configuration: { needsConfiguration } }, i) => { + if (needsConfiguration) + return ( + + + + + ); + })} + {canCreatePersonalSources ? ( + + + + + + ) : ( + + )} + + + + + + + + + + + + + ); }; diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/groups/components/group_manager_modal.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/groups/components/group_manager_modal.tsx index c0f8bf57989c..cbfb22915c4e 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/groups/components/group_manager_modal.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/groups/components/group_manager_modal.tsx @@ -29,7 +29,7 @@ import { import { EuiButtonTo } from '../../../../shared/react_router_helpers'; import { Group } from '../../../types'; -import { ORG_SOURCES_PATH } from '../../../routes'; +import { SOURCES_PATH } from '../../../routes'; import noSharedSourcesIcon from '../../../assets/share_circle.svg'; @@ -96,7 +96,7 @@ export const GroupManagerModal: React.FC = ({ const handleSelectAll = () => selectAll(allSelected ? [] : allItems); const sourcesButton = ( - + {ADD_SOURCE_BUTTON_TEXT} ); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.test.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.test.tsx index 268e4f8da445..64dc5149decd 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.test.tsx @@ -11,7 +11,7 @@ import { setMockValues } from './__mocks__'; import React from 'react'; import { shallow } from 'enzyme'; -import { ORG_SOURCES_PATH, USERS_PATH } from '../../routes'; +import { SOURCES_PATH, USERS_PATH } from '../../routes'; import { OnboardingSteps, OrgNameOnboarding } from './onboarding_steps'; import { OnboardingCard } from './onboarding_card'; @@ -32,7 +32,7 @@ describe('OnboardingSteps', () => { const wrapper = shallow(); expect(wrapper.find(OnboardingCard)).toHaveLength(1); - expect(wrapper.find(OnboardingCard).prop('actionPath')).toBe(ORG_SOURCES_PATH); + expect(wrapper.find(OnboardingCard).prop('actionPath')).toBe(SOURCES_PATH); expect(wrapper.find(OnboardingCard).prop('description')).toBe( 'Add shared sources for your organization to start searching.' ); diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.tsx index ed5136a6f7a4..4957324aa6bd 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/onboarding_steps.tsx @@ -24,7 +24,7 @@ import { import sharedSourcesIcon from '../../components/shared/assets/source_icons/share_circle.svg'; import { TelemetryLogic } from '../../../shared/telemetry'; import { getWorkplaceSearchUrl } from '../../../shared/enterprise_search_url'; -import { ORG_SOURCES_PATH, USERS_PATH, ORG_SETTINGS_PATH } from '../../routes'; +import { SOURCES_PATH, USERS_PATH, ORG_SETTINGS_PATH } from '../../routes'; import { ContentSection } from '../../components/shared/content_section'; @@ -75,7 +75,7 @@ export const OnboardingSteps: React.FC = () => { const accountsPath = !isFederatedAuth && (canCreateInvitations || isCurated) ? USERS_PATH : undefined; - const sourcesPath = canCreateContentSources || isCurated ? ORG_SOURCES_PATH : undefined; + const sourcesPath = canCreateContentSources || isCurated ? SOURCES_PATH : undefined; const SOURCES_CARD_DESCRIPTION = i18n.translate( 'xpack.enterpriseSearch.workplaceSearch.sourcesOnboardingCard.description', diff --git a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/organization_stats.tsx b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/organization_stats.tsx index 6614ac58b074..06c620ad384e 100644 --- a/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/organization_stats.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/workplace_search/views/overview/organization_stats.tsx @@ -12,7 +12,7 @@ import { FormattedMessage } from '@kbn/i18n/react'; import { i18n } from '@kbn/i18n'; import { ContentSection } from '../../components/shared/content_section'; -import { ORG_SOURCES_PATH, USERS_PATH } from '../../routes'; +import { SOURCES_PATH, USERS_PATH } from '../../routes'; import { AppLogic } from '../../app_logic'; import { OverviewLogic } from './overview_logic'; @@ -43,7 +43,7 @@ export const OrganizationStats: React.FC = () => { { defaultMessage: 'Shared sources' } )} count={sourcesCount} - actionPath={ORG_SOURCES_PATH} + actionPath={SOURCES_PATH} /> {!isFederatedAuth && ( <> diff --git a/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.test.ts b/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.test.ts index 9cf491b79fd2..22e2deaace1d 100644 --- a/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.test.ts +++ b/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.test.ts @@ -328,10 +328,8 @@ describe('sources routes', () => { const mockRequest = { params: { id: '123' }, body: { - query: { - content_source: { - name: 'foo', - }, + content_source: { + name: 'foo', }, }, }; @@ -406,7 +404,7 @@ describe('sources routes', () => { mockRouter.callRoute(mockRequest); expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({ - path: '/ws/pre_content_sources/zendesk', + path: '/ws/sources/zendesk/prepare', }); }); }); @@ -732,10 +730,8 @@ describe('sources routes', () => { const mockRequest = { params: { id: '123' }, body: { - query: { - content_source: { - name: 'foo', - }, + content_source: { + name: 'foo', }, }, }; @@ -810,7 +806,7 @@ describe('sources routes', () => { mockRouter.callRoute(mockRequest); expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({ - path: '/ws/org/pre_content_sources/zendesk', + path: '/ws/org/sources/zendesk/prepare', }); }); }); diff --git a/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.ts b/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.ts index bdd048438dae..24473388c03b 100644 --- a/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.ts +++ b/x-pack/plugins/enterprise_search/server/routes/workplace_search/sources.ts @@ -200,10 +200,8 @@ export function registerAccountSourceSettingsRoute({ path: '/api/workplace_search/account/sources/{id}/settings', validate: { body: schema.object({ - query: schema.object({ - content_source: schema.object({ - name: schema.string(), - }), + content_source: schema.object({ + name: schema.string(), }), }), params: schema.object({ @@ -256,7 +254,7 @@ export function registerAccountPrepareSourcesRoute({ }, async (context, request, response) => { return enterpriseSearchRequestHandler.createRequest({ - path: `/ws/pre_content_sources/${request.params.service_type}`, + path: `/ws/sources/${request.params.service_type}/prepare`, })(context, request, response); } ); @@ -372,7 +370,7 @@ export function registerOrgCreateSourceRoute({ login: schema.maybe(schema.string()), password: schema.maybe(schema.string()), organizations: schema.maybe(schema.arrayOf(schema.string())), - indexPermissions: schema.boolean(), + indexPermissions: schema.maybe(schema.boolean()), }), }, }, @@ -462,10 +460,8 @@ export function registerOrgSourceSettingsRoute({ path: '/api/workplace_search/org/sources/{id}/settings', validate: { body: schema.object({ - query: schema.object({ - content_source: schema.object({ - name: schema.string(), - }), + content_source: schema.object({ + name: schema.string(), }), }), params: schema.object({ @@ -518,7 +514,7 @@ export function registerOrgPrepareSourcesRoute({ }, async (context, request, response) => { return enterpriseSearchRequestHandler.createRequest({ - path: `/ws/org/pre_content_sources/${request.params.service_type}`, + path: `/ws/org/sources/${request.params.service_type}/prepare`, })(context, request, response); } ); From 6ef6c0fa4deeace16f47aaf08195b2c10278150b Mon Sep 17 00:00:00 2001 From: Alexey Antonov Date: Tue, 24 Nov 2020 19:19:06 +0300 Subject: [PATCH 03/18] TSVB should use "histogram:maxBars" and "histogram:barTarget" settings for auto instead of a default 100 buckets (#83628) * TSVB needs a "tsvb:max_buckets" target setting for auto instead of a default 120 buckets Closes: #54012 * remove calculate_auto * max bars -> Level of detail * rename allowLevelofDetail * fix PR comment * Update constants.ts * Update src/plugins/vis_type_timeseries/public/application/components/index_pattern.js Co-authored-by: Wylie Conlon * create LEVEL_OF_DETAIL_MIN_BUCKETS constant * calcAutoIntervalLessThan -> search.aggs.calcAutoIntervalLessThan Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Wylie Conlon --- ...ibana-plugin-plugins-data-server.search.md | 1 + .../data/common/search/aggs/buckets/index.ts | 1 + src/plugins/data/server/index.ts | 2 + src/plugins/data/server/server.api.md | 32 ++--- .../vis_type_timeseries/common/constants.ts | 2 +- .../vis_type_timeseries/common/vis_schema.ts | 2 + .../application/components/index_pattern.js | 115 ++++++++++++++++-- .../components/lib/get_interval.js | 3 +- .../components/panel_config/timeseries.js | 1 + .../components/vis_editor_visualization.js | 2 +- .../components/vis_types/timeseries/config.js | 2 +- .../annotations/get_request_params.js | 8 +- .../vis_data/get_interval_and_timefield.js | 14 ++- .../server/lib/vis_data/get_table_data.js | 8 +- .../lib/vis_data/helpers/calculate_auto.js | 90 -------------- .../lib/vis_data/helpers/get_bucket_size.js | 15 +-- .../vis_data/helpers/get_bucket_size.test.js | 18 ++- ...imerange.test.js => get_timerange.test.ts} | 8 +- .../{get_timerange.js => get_timerange.ts} | 15 +-- .../annotations/date_histogram.js | 10 +- .../request_processors/annotations/query.js | 12 +- .../series/date_histogram.js | 23 +++- .../series/date_histogram.test.js | 70 ++++++++--- .../series/metric_buckets.js | 13 +- .../series/metric_buckets.test.js | 84 +++++++------ .../series/positive_rate.js | 13 +- .../series/positive_rate.test.js | 9 +- .../series/sibling_buckets.js | 6 +- .../series/sibling_buckets.test.js | 10 +- .../table/date_histogram.js | 16 ++- .../table/metric_buckets.js | 12 +- .../request_processors/table/positive_rate.js | 12 +- .../table/sibling_buckets.js | 12 +- .../series/build_request_body.test.ts | 3 +- .../lib/vis_data/series/get_request_params.js | 9 +- 35 files changed, 424 insertions(+), 229 deletions(-) delete mode 100644 src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/calculate_auto.js rename src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/{get_timerange.test.js => get_timerange.test.ts} (92%) rename src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/{get_timerange.js => get_timerange.ts} (75%) diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md index e2a71a7badd4..77abcacd7704 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md @@ -52,6 +52,7 @@ search: { siblingPipelineType: string; termsAggFilter: string[]; toAbsoluteDates: typeof toAbsoluteDates; + calcAutoIntervalLessThan: typeof calcAutoIntervalLessThan; }; getRequestInspectorStats: typeof getRequestInspectorStats; getResponseInspectorStats: typeof getResponseInspectorStats; diff --git a/src/plugins/data/common/search/aggs/buckets/index.ts b/src/plugins/data/common/search/aggs/buckets/index.ts index b16242e51987..04a748bfb196 100644 --- a/src/plugins/data/common/search/aggs/buckets/index.ts +++ b/src/plugins/data/common/search/aggs/buckets/index.ts @@ -35,3 +35,4 @@ export * from './lib/ip_range'; export * from './migrate_include_exclude_format'; export * from './significant_terms'; export * from './terms'; +export * from './lib/time_buckets/calc_auto_interval'; diff --git a/src/plugins/data/server/index.ts b/src/plugins/data/server/index.ts index 9d85caa624e7..b3fe412152c9 100644 --- a/src/plugins/data/server/index.ts +++ b/src/plugins/data/server/index.ts @@ -196,6 +196,7 @@ import { includeTotalLoaded, toKibanaSearchResponse, getTotalLoaded, + calcAutoIntervalLessThan, } from '../common'; export { @@ -282,6 +283,7 @@ export const search = { siblingPipelineType, termsAggFilter, toAbsoluteDates, + calcAutoIntervalLessThan, }, getRequestInspectorStats, getResponseInspectorStats, diff --git a/src/plugins/data/server/server.api.md b/src/plugins/data/server/server.api.md index 6583651e074c..6870ad5e2402 100644 --- a/src/plugins/data/server/server.api.md +++ b/src/plugins/data/server/server.api.md @@ -1084,6 +1084,7 @@ export const search: { siblingPipelineType: string; termsAggFilter: string[]; toAbsoluteDates: typeof toAbsoluteDates; + calcAutoIntervalLessThan: typeof calcAutoIntervalLessThan; }; getRequestInspectorStats: typeof getRequestInspectorStats; getResponseInspectorStats: typeof getResponseInspectorStats; @@ -1246,21 +1247,22 @@ export function usageProvider(core: CoreSetup_2): SearchUsage; // src/plugins/data/server/index.ts:111:26 - (ae-forgotten-export) The symbol "TruncateFormat" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:137:27 - (ae-forgotten-export) The symbol "isFilterable" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:137:27 - (ae-forgotten-export) The symbol "isNestedField" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:253:20 - (ae-forgotten-export) The symbol "getRequestInspectorStats" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:253:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:253:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:253:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:268:5 - (ae-forgotten-export) The symbol "getTotalLoaded" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:269:5 - (ae-forgotten-export) The symbol "toSnakeCase" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:273:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:274:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:283:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:284:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:285:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:289:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:290:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:294:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:297:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:254:20 - (ae-forgotten-export) The symbol "getRequestInspectorStats" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:254:20 - (ae-forgotten-export) The symbol "getResponseInspectorStats" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:254:20 - (ae-forgotten-export) The symbol "tabifyAggResponse" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:254:20 - (ae-forgotten-export) The symbol "tabifyGetColumns" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:269:5 - (ae-forgotten-export) The symbol "getTotalLoaded" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:270:5 - (ae-forgotten-export) The symbol "toSnakeCase" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:274:1 - (ae-forgotten-export) The symbol "CidrMask" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:275:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:284:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:285:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:286:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:290:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:291:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:295:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:298:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:299:1 - (ae-forgotten-export) The symbol "calcAutoIntervalLessThan" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index_patterns/index_patterns_service.ts:58:14 - (ae-forgotten-export) The symbol "IndexPatternsService" needs to be exported by the entry point index.d.ts // src/plugins/data/server/plugin.ts:88:66 - (ae-forgotten-export) The symbol "DataEnhancements" needs to be exported by the entry point index.d.ts // src/plugins/data/server/search/types.ts:104:5 - (ae-forgotten-export) The symbol "ISearchStartSearchSource" needs to be exported by the entry point index.d.ts diff --git a/src/plugins/vis_type_timeseries/common/constants.ts b/src/plugins/vis_type_timeseries/common/constants.ts index 4f24bc273e26..bfcb5e8e15b9 100644 --- a/src/plugins/vis_type_timeseries/common/constants.ts +++ b/src/plugins/vis_type_timeseries/common/constants.ts @@ -19,7 +19,7 @@ export const MAX_BUCKETS_SETTING = 'metrics:max_buckets'; export const INDEXES_SEPARATOR = ','; - +export const AUTO_INTERVAL = 'auto'; export const ROUTES = { VIS_DATA: '/api/metrics/vis/data', }; diff --git a/src/plugins/vis_type_timeseries/common/vis_schema.ts b/src/plugins/vis_type_timeseries/common/vis_schema.ts index 7f17a9c44298..a90fa752ad7d 100644 --- a/src/plugins/vis_type_timeseries/common/vis_schema.ts +++ b/src/plugins/vis_type_timeseries/common/vis_schema.ts @@ -175,6 +175,7 @@ export const seriesItems = schema.object({ separate_axis: numberIntegerOptional, seperate_axis: numberIntegerOptional, series_index_pattern: stringOptionalNullable, + series_max_bars: numberIntegerOptional, series_time_field: stringOptionalNullable, series_interval: stringOptionalNullable, series_drop_last_bucket: numberIntegerOptional, @@ -229,6 +230,7 @@ export const panel = schema.object({ ignore_global_filters: numberOptional, ignore_global_filter: numberOptional, index_pattern: stringRequired, + max_bars: numberIntegerOptional, interval: stringRequired, isModelInvalid: schema.maybe(schema.boolean()), legend_position: stringOptionalNullable, diff --git a/src/plugins/vis_type_timeseries/public/application/components/index_pattern.js b/src/plugins/vis_type_timeseries/public/application/components/index_pattern.js index 85f31285df69..e976519dfe63 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/index_pattern.js +++ b/src/plugins/vis_type_timeseries/public/application/components/index_pattern.js @@ -19,7 +19,7 @@ import { get } from 'lodash'; import PropTypes from 'prop-types'; -import React, { useContext } from 'react'; +import React, { useContext, useCallback } from 'react'; import { htmlIdGenerator, EuiFieldText, @@ -27,7 +27,10 @@ import { EuiFlexItem, EuiFormRow, EuiComboBox, + EuiRange, + EuiIconTip, EuiText, + EuiFormLabel, } from '@elastic/eui'; import { FieldSelect } from './aggs/field_select'; import { createSelectHandler } from './lib/create_select_handler'; @@ -35,19 +38,20 @@ import { createTextHandler } from './lib/create_text_handler'; import { YesNo } from './yes_no'; import { KBN_FIELD_TYPES } from '../../../../../plugins/data/public'; import { FormValidationContext } from '../contexts/form_validation_context'; -import { - isGteInterval, - validateReInterval, - isAutoInterval, - AUTO_INTERVAL, -} from './lib/get_interval'; +import { isGteInterval, validateReInterval, isAutoInterval } from './lib/get_interval'; import { i18n } from '@kbn/i18n'; +import { FormattedMessage } from '@kbn/i18n/react'; import { TIME_RANGE_DATA_MODES, TIME_RANGE_MODE_KEY } from '../../../common/timerange_data_modes'; import { PANEL_TYPES } from '../../../common/panel_types'; import { isTimerangeModeEnabled } from '../lib/check_ui_restrictions'; import { VisDataContext } from '../contexts/vis_data_context'; +import { getUISettings } from '../../services'; +import { AUTO_INTERVAL } from '../../../common/constants'; +import { UI_SETTINGS } from '../../../../data/common'; const RESTRICT_FIELDS = [KBN_FIELD_TYPES.DATE]; +const LEVEL_OF_DETAIL_STEPS = 10; +const LEVEL_OF_DETAIL_MIN_BUCKETS = 1; const validateIntervalValue = (intervalValue) => { const isAutoOrGteInterval = isGteInterval(intervalValue) || isAutoInterval(intervalValue); @@ -65,15 +69,36 @@ const htmlId = htmlIdGenerator(); const isEntireTimeRangeActive = (model, isTimeSeries) => !isTimeSeries && model[TIME_RANGE_MODE_KEY] === TIME_RANGE_DATA_MODES.ENTIRE_TIME_RANGE; -export const IndexPattern = ({ fields, prefix, onChange, disabled, model: _model }) => { +export const IndexPattern = ({ + fields, + prefix, + onChange, + disabled, + model: _model, + allowLevelofDetail, +}) => { + const config = getUISettings(); + const handleSelectChange = createSelectHandler(onChange); const handleTextChange = createTextHandler(onChange); + const timeFieldName = `${prefix}time_field`; const indexPatternName = `${prefix}index_pattern`; const intervalName = `${prefix}interval`; + const maxBarsName = `${prefix}max_bars`; const dropBucketName = `${prefix}drop_last_bucket`; const updateControlValidity = useContext(FormValidationContext); const uiRestrictions = get(useContext(VisDataContext), 'uiRestrictions'); + const maxBarsUiSettings = config.get(UI_SETTINGS.HISTOGRAM_MAX_BARS); + + const handleMaxBarsChange = useCallback( + ({ target }) => { + onChange({ + [maxBarsName]: Math.max(LEVEL_OF_DETAIL_MIN_BUCKETS, target.value), + }); + }, + [onChange, maxBarsName] + ); const timeRangeOptions = [ { @@ -97,10 +122,12 @@ export const IndexPattern = ({ fields, prefix, onChange, disabled, model: _model [indexPatternName]: '*', [intervalName]: AUTO_INTERVAL, [dropBucketName]: 1, + [maxBarsName]: config.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET), [TIME_RANGE_MODE_KEY]: timeRangeOptions[0].value, }; const model = { ...defaults, ..._model }; + const isDefaultIndexPatternUsed = model.default_index_pattern && !model[indexPatternName]; const intervalValidation = validateIntervalValue(model[intervalName]); const selectedTimeRangeOption = timeRangeOptions.find( @@ -229,6 +256,77 @@ export const IndexPattern = ({ fields, prefix, onChange, disabled, model: _model + {allowLevelofDetail && ( + + + + {' '} + + } + type="questionInCircle" + /> + + } + > + + + + + + + + + + + + + + + + + + + )}
); }; @@ -245,4 +343,5 @@ IndexPattern.propTypes = { prefix: PropTypes.string, disabled: PropTypes.bool, className: PropTypes.string, + allowLevelofDetail: PropTypes.bool, }; diff --git a/src/plugins/vis_type_timeseries/public/application/components/lib/get_interval.js b/src/plugins/vis_type_timeseries/public/application/components/lib/get_interval.js index c1d484765f4c..f54d52620e67 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/lib/get_interval.js +++ b/src/plugins/vis_type_timeseries/public/application/components/lib/get_interval.js @@ -22,8 +22,7 @@ import { get } from 'lodash'; import { search } from '../../../../../../plugins/data/public'; const { parseEsInterval } = search.aggs; import { GTE_INTERVAL_RE } from '../../../../common/interval_regexp'; - -export const AUTO_INTERVAL = 'auto'; +import { AUTO_INTERVAL } from '../../../../common/constants'; export const unitLookup = { s: i18n.translate('visTypeTimeseries.getInterval.secondsLabel', { defaultMessage: 'seconds' }), diff --git a/src/plugins/vis_type_timeseries/public/application/components/panel_config/timeseries.js b/src/plugins/vis_type_timeseries/public/application/components/panel_config/timeseries.js index 03da52b10f08..180411dd13a3 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/panel_config/timeseries.js +++ b/src/plugins/vis_type_timeseries/public/application/components/panel_config/timeseries.js @@ -193,6 +193,7 @@ class TimeseriesPanelConfigUi extends Component { fields={this.props.fields} model={this.props.model} onChange={this.props.onChange} + allowLevelofDetail={true} /> diff --git a/src/plugins/vis_type_timeseries/public/application/components/vis_editor_visualization.js b/src/plugins/vis_type_timeseries/public/application/components/vis_editor_visualization.js index 9742d817f7c0..7893d5ba6d15 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/vis_editor_visualization.js +++ b/src/plugins/vis_type_timeseries/public/application/components/vis_editor_visualization.js @@ -26,8 +26,8 @@ import { convertIntervalIntoUnit, isAutoInterval, isGteInterval, - AUTO_INTERVAL, } from './lib/get_interval'; +import { AUTO_INTERVAL } from '../../../common/constants'; import { PANEL_TYPES } from '../../../common/panel_types'; const MIN_CHART_HEIGHT = 300; diff --git a/src/plugins/vis_type_timeseries/public/application/components/vis_types/timeseries/config.js b/src/plugins/vis_type_timeseries/public/application/components/vis_types/timeseries/config.js index 59277257c0c9..25561cfe1dc0 100644 --- a/src/plugins/vis_type_timeseries/public/application/components/vis_types/timeseries/config.js +++ b/src/plugins/vis_type_timeseries/public/application/components/vis_types/timeseries/config.js @@ -554,7 +554,7 @@ export const TimeseriesConfig = injectI18n(function (props) { {...props} prefix="series_" disabled={!model.override_index_pattern} - with-interval={true} + allowLevelofDetail={true} /> diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/annotations/get_request_params.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/annotations/get_request_params.js index d11e9316c959..1b2334c7dea9 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/annotations/get_request_params.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/annotations/get_request_params.js @@ -19,6 +19,7 @@ import { buildAnnotationRequest } from './build_request_body'; import { getEsShardTimeout } from '../helpers/get_es_shard_timeout'; import { getIndexPatternObject } from '../helpers/get_index_pattern'; +import { UI_SETTINGS } from '../../../../../data/common'; export async function getAnnotationRequestParams( req, @@ -27,6 +28,7 @@ export async function getAnnotationRequestParams( esQueryConfig, capabilities ) { + const uiSettings = req.getUiSettingsService(); const esShardTimeout = await getEsShardTimeout(req); const indexPattern = annotation.index_pattern; const { indexPatternObject, indexPatternString } = await getIndexPatternObject(req, indexPattern); @@ -36,7 +38,11 @@ export async function getAnnotationRequestParams( annotation, esQueryConfig, indexPatternObject, - capabilities + capabilities, + { + maxBarsUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_MAX_BARS), + barTargetUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET), + } ); return { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/get_interval_and_timefield.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/get_interval_and_timefield.js index 82a2ef66cb1c..9714b551ea82 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/get_interval_and_timefield.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/get_interval_and_timefield.js @@ -17,6 +17,8 @@ * under the License. */ +import { AUTO_INTERVAL } from '../../../common/constants'; + const DEFAULT_TIME_FIELD = '@timestamp'; export function getIntervalAndTimefield(panel, series = {}, indexPatternObject) { @@ -26,10 +28,18 @@ export function getIntervalAndTimefield(panel, series = {}, indexPatternObject) (series.override_index_pattern && series.series_time_field) || panel.time_field || getDefaultTimeField(); - const interval = (series.override_index_pattern && series.series_interval) || panel.interval; + + let interval = panel.interval; + let maxBars = panel.max_bars; + + if (series.override_index_pattern) { + interval = series.series_interval; + maxBars = series.series_max_bars; + } return { timeField, - interval, + interval: interval || AUTO_INTERVAL, + maxBars, }; } diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/get_table_data.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/get_table_data.js index 3791eb229db5..eaaa5a9605b4 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/get_table_data.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/get_table_data.js @@ -22,6 +22,7 @@ import { get } from 'lodash'; import { processBucket } from './table/process_bucket'; import { getEsQueryConfig } from './helpers/get_es_query_uisettings'; import { getIndexPatternObject } from './helpers/get_index_pattern'; +import { UI_SETTINGS } from '../../../../data/common'; export async function getTableData(req, panel) { const panelIndexPattern = panel.index_pattern; @@ -39,7 +40,12 @@ export async function getTableData(req, panel) { }; try { - const body = buildRequestBody(req, panel, esQueryConfig, indexPatternObject, capabilities); + const uiSettings = req.getUiSettingsService(); + const body = buildRequestBody(req, panel, esQueryConfig, indexPatternObject, capabilities, { + maxBarsUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_MAX_BARS), + barTargetUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET), + }); + const [resp] = await searchStrategy.search(req, [ { body, diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/calculate_auto.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/calculate_auto.js deleted file mode 100644 index 0c3555adff1a..000000000000 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/calculate_auto.js +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import moment from 'moment'; -const d = moment.duration; - -const roundingRules = [ - [d(500, 'ms'), d(100, 'ms')], - [d(5, 'second'), d(1, 'second')], - [d(7.5, 'second'), d(5, 'second')], - [d(15, 'second'), d(10, 'second')], - [d(45, 'second'), d(30, 'second')], - [d(3, 'minute'), d(1, 'minute')], - [d(9, 'minute'), d(5, 'minute')], - [d(20, 'minute'), d(10, 'minute')], - [d(45, 'minute'), d(30, 'minute')], - [d(2, 'hour'), d(1, 'hour')], - [d(6, 'hour'), d(3, 'hour')], - [d(24, 'hour'), d(12, 'hour')], - [d(1, 'week'), d(1, 'd')], - [d(3, 'week'), d(1, 'week')], - [d(1, 'year'), d(1, 'month')], - [Infinity, d(1, 'year')], -]; - -const revRoundingRules = roundingRules.slice(0).reverse(); - -function find(rules, check, last) { - function pick(buckets, duration) { - const target = duration / buckets; - let lastResp = null; - - for (let i = 0; i < rules.length; i++) { - const rule = rules[i]; - const resp = check(rule[0], rule[1], target); - - if (resp == null) { - if (!last) continue; - if (lastResp) return lastResp; - break; - } - - if (!last) return resp; - lastResp = resp; - } - - // fallback to just a number of milliseconds, ensure ms is >= 1 - const ms = Math.max(Math.floor(target), 1); - return moment.duration(ms, 'ms'); - } - - return (buckets, duration) => { - const interval = pick(buckets, duration); - if (interval) return moment.duration(interval._data); - }; -} - -export const calculateAuto = { - near: find( - revRoundingRules, - function near(bound, interval, target) { - if (bound > target) return interval; - }, - true - ), - - lessThan: find(revRoundingRules, function lessThan(_bound, interval, target) { - if (interval < target) return interval; - }), - - atLeast: find(revRoundingRules, function atLeast(_bound, interval, target) { - if (interval <= target) return interval; - }), -}; diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.js index c021ba3cebc6..4384da58fb56 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.js @@ -17,15 +17,15 @@ * under the License. */ -import { calculateAuto } from './calculate_auto'; import { getUnitValue, parseInterval, convertIntervalToUnit, ASCENDING_UNIT_ORDER, } from './unit_to_seconds'; -import { getTimerangeDuration } from './get_timerange'; +import { getTimerange } from './get_timerange'; import { INTERVAL_STRING_RE, GTE_INTERVAL_RE } from '../../../../common/interval_regexp'; +import { search } from '../../../../../data/server'; const calculateBucketData = (timeInterval, capabilities) => { let intervalString = capabilities @@ -65,14 +65,15 @@ const calculateBucketData = (timeInterval, capabilities) => { }; }; -const calculateBucketSizeForAutoInterval = (req) => { - const duration = getTimerangeDuration(req); +const calculateBucketSizeForAutoInterval = (req, maxBars) => { + const { from, to } = getTimerange(req); + const timerange = to.valueOf() - from.valueOf(); - return calculateAuto.near(100, duration).asSeconds(); + return search.aggs.calcAutoIntervalLessThan(maxBars, timerange).asSeconds(); }; -export const getBucketSize = (req, interval, capabilities) => { - const bucketSize = calculateBucketSizeForAutoInterval(req); +export const getBucketSize = (req, interval, capabilities, maxBars) => { + const bucketSize = calculateBucketSizeForAutoInterval(req, maxBars); let intervalString = `${bucketSize}s`; const gteAutoMatch = Boolean(interval) && interval.match(GTE_INTERVAL_RE); diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.test.js index 99bef2de6b72..8810ccd406be 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_bucket_size.test.js @@ -30,37 +30,43 @@ describe('getBucketSize', () => { }; test('returns auto calculated buckets', () => { - const result = getBucketSize(req, 'auto'); + const result = getBucketSize(req, 'auto', undefined, 100); + expect(result).toHaveProperty('bucketSize', 30); expect(result).toHaveProperty('intervalString', '30s'); }); test('returns overridden buckets (1s)', () => { - const result = getBucketSize(req, '1s'); + const result = getBucketSize(req, '1s', undefined, 100); + expect(result).toHaveProperty('bucketSize', 1); expect(result).toHaveProperty('intervalString', '1s'); }); test('returns overridden buckets (10m)', () => { - const result = getBucketSize(req, '10m'); + const result = getBucketSize(req, '10m', undefined, 100); + expect(result).toHaveProperty('bucketSize', 600); expect(result).toHaveProperty('intervalString', '10m'); }); test('returns overridden buckets (1d)', () => { - const result = getBucketSize(req, '1d'); + const result = getBucketSize(req, '1d', undefined, 100); + expect(result).toHaveProperty('bucketSize', 86400); expect(result).toHaveProperty('intervalString', '1d'); }); test('returns overridden buckets (>=2d)', () => { - const result = getBucketSize(req, '>=2d'); + const result = getBucketSize(req, '>=2d', undefined, 100); + expect(result).toHaveProperty('bucketSize', 86400 * 2); expect(result).toHaveProperty('intervalString', '2d'); }); test('returns overridden buckets (>=10s)', () => { - const result = getBucketSize(req, '>=10s'); + const result = getBucketSize(req, '>=10s', undefined, 100); + expect(result).toHaveProperty('bucketSize', 30); expect(result).toHaveProperty('intervalString', '30s'); }); diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.ts similarity index 92% rename from src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.js rename to src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.ts index 1a1b12c65199..183ce50dd4a0 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.test.ts @@ -17,20 +17,22 @@ * under the License. */ -import { getTimerange } from './get_timerange'; import moment from 'moment'; +import { getTimerange } from './get_timerange'; +import { ReqFacade, VisPayload } from '../../..'; describe('getTimerange(req)', () => { test('should return a moment object for to and from', () => { - const req = { + const req = ({ payload: { timerange: { min: '2017-01-01T00:00:00Z', max: '2017-01-01T01:00:00Z', }, }, - }; + } as unknown) as ReqFacade; const { from, to } = getTimerange(req); + expect(moment.isMoment(from)).toEqual(true); expect(moment.isMoment(to)).toEqual(true); expect(moment.utc('2017-01-01T00:00:00Z').isSame(from)).toEqual(true); diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.ts similarity index 75% rename from src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.js rename to src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.ts index 682befe9ab05..54f3110b4580 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/helpers/get_timerange.ts @@ -17,19 +17,14 @@ * under the License. */ -import moment from 'moment'; +import { utc } from 'moment'; +import { ReqFacade, VisPayload } from '../../..'; -export const getTimerange = (req) => { +export const getTimerange = (req: ReqFacade) => { const { min, max } = req.payload.timerange; return { - from: moment.utc(min), - to: moment.utc(max), + from: utc(min), + to: utc(max), }; }; - -export const getTimerangeDuration = (req) => { - const { from, to } = getTimerange(req); - - return moment.duration(to.valueOf() - from.valueOf(), 'ms'); -}; diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/date_histogram.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/date_histogram.js index 4b611e46f158..617a75f6bd59 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/date_histogram.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/date_histogram.js @@ -29,11 +29,17 @@ export function dateHistogram( annotation, esQueryConfig, indexPatternObject, - capabilities + capabilities, + { barTargetUiSettings } ) { return (next) => (doc) => { const timeField = annotation.time_field; - const { bucketSize, intervalString } = getBucketSize(req, 'auto', capabilities); + const { bucketSize, intervalString } = getBucketSize( + req, + 'auto', + capabilities, + barTargetUiSettings + ); const { from, to } = getTimerange(req); const timezone = capabilities.searchTimezone; diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/query.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/query.js index 127687bf11fe..cf02f601ea5f 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/query.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/annotations/query.js @@ -21,10 +21,18 @@ import { getBucketSize } from '../../helpers/get_bucket_size'; import { getTimerange } from '../../helpers/get_timerange'; import { esQuery } from '../../../../../../data/server'; -export function query(req, panel, annotation, esQueryConfig, indexPattern, capabilities) { +export function query( + req, + panel, + annotation, + esQueryConfig, + indexPattern, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const timeField = annotation.time_field; - const { bucketSize } = getBucketSize(req, 'auto', capabilities); + const { bucketSize } = getBucketSize(req, 'auto', capabilities, barTargetUiSettings); const { from, to } = getTimerange(req); doc.size = 0; diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.js index f1e58b8e4af2..98c683bda1fd 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.js @@ -25,10 +25,27 @@ import { isLastValueTimerangeMode } from '../../helpers/get_timerange_mode'; import { search } from '../../../../../../../plugins/data/server'; const { dateHistogramInterval } = search.aggs; -export function dateHistogram(req, panel, series, esQueryConfig, indexPatternObject, capabilities) { +export function dateHistogram( + req, + panel, + series, + esQueryConfig, + indexPatternObject, + capabilities, + { maxBarsUiSettings, barTargetUiSettings } +) { return (next) => (doc) => { - const { timeField, interval } = getIntervalAndTimefield(panel, series, indexPatternObject); - const { bucketSize, intervalString } = getBucketSize(req, interval, capabilities); + const { timeField, interval, maxBars } = getIntervalAndTimefield( + panel, + series, + indexPatternObject + ); + const { bucketSize, intervalString } = getBucketSize( + req, + interval, + capabilities, + maxBars ? Math.min(maxBarsUiSettings, maxBars) : barTargetUiSettings + ); const getDateHistogramForLastBucketMode = () => { const { from, to } = offsetTime(req, series.offset_time); diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.test.js index 45cad1195fc7..aa95a79a6279 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/date_histogram.test.js @@ -27,6 +27,7 @@ describe('dateHistogram(req, panel, series)', () => { let capabilities; let config; let indexPatternObject; + let uiSettings; beforeEach(() => { req = { @@ -50,19 +51,29 @@ describe('dateHistogram(req, panel, series)', () => { }; indexPatternObject = {}; capabilities = new DefaultSearchCapabilities(req); + uiSettings = { maxBarsUiSettings: 100, barTargetUiSettings: 50 }; }); test('calls next when finished', () => { const next = jest.fn(); - dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)({}); + dateHistogram(req, panel, series, config, indexPatternObject, capabilities, uiSettings)(next)( + {} + ); expect(next.mock.calls.length).toEqual(1); }); test('returns valid date histogram', () => { const next = (doc) => doc; - const doc = dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)( - {} - ); + const doc = dateHistogram( + req, + panel, + series, + config, + indexPatternObject, + capabilities, + uiSettings + )(next)({}); + expect(doc).toEqual({ aggs: { test: { @@ -94,9 +105,16 @@ describe('dateHistogram(req, panel, series)', () => { test('returns valid date histogram (offset by 1h)', () => { series.offset_time = '1h'; const next = (doc) => doc; - const doc = dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)( - {} - ); + const doc = dateHistogram( + req, + panel, + series, + config, + indexPatternObject, + capabilities, + uiSettings + )(next)({}); + expect(doc).toEqual({ aggs: { test: { @@ -131,9 +149,16 @@ describe('dateHistogram(req, panel, series)', () => { series.series_time_field = 'timestamp'; series.series_interval = '20s'; const next = (doc) => doc; - const doc = dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)( - {} - ); + const doc = dateHistogram( + req, + panel, + series, + config, + indexPatternObject, + capabilities, + uiSettings + )(next)({}); + expect(doc).toEqual({ aggs: { test: { @@ -168,9 +193,15 @@ describe('dateHistogram(req, panel, series)', () => { panel.type = 'timeseries'; const next = (doc) => doc; - const doc = dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)( - {} - ); + const doc = dateHistogram( + req, + panel, + series, + config, + indexPatternObject, + capabilities, + uiSettings + )(next)({}); expect(doc.aggs.test.aggs.timeseries.auto_date_histogram).toBeUndefined(); expect(doc.aggs.test.aggs.timeseries.date_histogram).toBeDefined(); @@ -180,9 +211,16 @@ describe('dateHistogram(req, panel, series)', () => { panel.time_range_mode = 'entire_time_range'; const next = (doc) => doc; - const doc = dateHistogram(req, panel, series, config, indexPatternObject, capabilities)(next)( - {} - ); + const doc = dateHistogram( + req, + panel, + series, + config, + indexPatternObject, + capabilities, + uiSettings + )(next)({}); + expect(doc).toEqual({ aggs: { test: { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.js index 800145dac546..023ee054a5e1 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.js @@ -21,10 +21,19 @@ import { getBucketSize } from '../../helpers/get_bucket_size'; import { bucketTransform } from '../../helpers/bucket_transform'; import { getIntervalAndTimefield } from '../../get_interval_and_timefield'; -export function metricBuckets(req, panel, series, esQueryConfig, indexPatternObject, capabilities) { +export function metricBuckets( + req, + panel, + series, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, series, indexPatternObject); - const { intervalString } = getBucketSize(req, interval, capabilities); + const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + series.metrics .filter((row) => !/_bucket$/.test(row.type) && !/^series/.test(row.type)) .forEach((metric) => { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.test.js index 1ac4329b60f8..2154d2257815 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/metric_buckets.test.js @@ -20,56 +20,64 @@ import { metricBuckets } from './metric_buckets'; describe('metricBuckets(req, panel, series)', () => { - let panel; - let series; - let req; + let metricBucketsProcessor; + beforeEach(() => { - panel = { - time_field: 'timestamp', - }; - series = { - id: 'test', - split_mode: 'terms', - terms_size: 10, - terms_field: 'host', - metrics: [ - { - id: 'metric-1', - type: 'max', - field: 'io', - }, - { - id: 'metric-2', - type: 'derivative', - field: 'metric-1', - unit: '1s', - }, - { - id: 'metric-3', - type: 'avg_bucket', - field: 'metric-2', - }, - ], - }; - req = { - payload: { - timerange: { - min: '2017-01-01T00:00:00Z', - max: '2017-01-01T01:00:00Z', + metricBucketsProcessor = metricBuckets( + { + payload: { + timerange: { + min: '2017-01-01T00:00:00Z', + max: '2017-01-01T01:00:00Z', + }, }, }, - }; + { + time_field: 'timestamp', + }, + { + id: 'test', + split_mode: 'terms', + terms_size: 10, + terms_field: 'host', + metrics: [ + { + id: 'metric-1', + type: 'max', + field: 'io', + }, + { + id: 'metric-2', + type: 'derivative', + field: 'metric-1', + unit: '1s', + }, + { + id: 'metric-3', + type: 'avg_bucket', + field: 'metric-2', + }, + ], + }, + {}, + {}, + undefined, + { + barTargetUiSettings: 50, + } + ); }); test('calls next when finished', () => { const next = jest.fn(); - metricBuckets(req, panel, series)(next)({}); + metricBucketsProcessor(next)({}); expect(next.mock.calls.length).toEqual(1); }); test('returns metric aggs', () => { const next = (doc) => doc; - const doc = metricBuckets(req, panel, series)(next)({}); + const doc = metricBucketsProcessor(next)({}); + expect(doc).toEqual({ aggs: { test: { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.js index 4a79ec229587..c16e0fd3aaf1 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.js @@ -57,10 +57,19 @@ export const createPositiveRate = (doc, intervalString, aggRoot) => (metric) => overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, positiveOnlyBucket); }; -export function positiveRate(req, panel, series, esQueryConfig, indexPatternObject, capabilities) { +export function positiveRate( + req, + panel, + series, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, series, indexPatternObject); - const { intervalString } = getBucketSize(req, interval, capabilities); + const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + if (series.metrics.some(filter)) { series.metrics .filter(filter) diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.test.js index 7c0f43adf02f..d891fc01bb26 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/positive_rate.test.js @@ -22,6 +22,8 @@ describe('positiveRate(req, panel, series)', () => { let panel; let series; let req; + let uiSettings; + beforeEach(() => { panel = { time_field: 'timestamp', @@ -48,17 +50,20 @@ describe('positiveRate(req, panel, series)', () => { }, }, }; + uiSettings = { + barTargetUiSettings: 50, + }; }); test('calls next when finished', () => { const next = jest.fn(); - positiveRate(req, panel, series)(next)({}); + positiveRate(req, panel, series, {}, {}, undefined, uiSettings)(next)({}); expect(next.mock.calls.length).toEqual(1); }); test('returns positive rate aggs', () => { const next = (doc) => doc; - const doc = positiveRate(req, panel, series)(next)({}); + const doc = positiveRate(req, panel, series, {}, {}, undefined, uiSettings)(next)({}); expect(doc).toEqual({ aggs: { test: { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.js index f2b58822e68b..f69473b613d1 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.js @@ -28,11 +28,13 @@ export function siblingBuckets( series, esQueryConfig, indexPatternObject, - capabilities + capabilities, + { barTargetUiSettings } ) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, series, indexPatternObject); - const { bucketSize } = getBucketSize(req, interval, capabilities); + const { bucketSize } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + series.metrics .filter((row) => /_bucket$/.test(row.type)) .forEach((metric) => { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.test.js index 8f84023ce0c7..48714e83341e 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.test.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/series/sibling_buckets.test.js @@ -23,6 +23,8 @@ describe('siblingBuckets(req, panel, series)', () => { let panel; let series; let req; + let uiSettings; + beforeEach(() => { panel = { time_field: 'timestamp', @@ -53,17 +55,21 @@ describe('siblingBuckets(req, panel, series)', () => { }, }, }; + uiSettings = { + barTargetUiSettings: 50, + }; }); test('calls next when finished', () => { const next = jest.fn(); - siblingBuckets(req, panel, series)(next)({}); + siblingBuckets(req, panel, series, {}, {}, undefined, uiSettings)(next)({}); expect(next.mock.calls.length).toEqual(1); }); test('returns sibling aggs', () => { const next = (doc) => doc; - const doc = siblingBuckets(req, panel, series)(next)({}); + const doc = siblingBuckets(req, panel, series, {}, {}, undefined, uiSettings)(next)({}); + expect(doc).toEqual({ aggs: { test: { diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/date_histogram.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/date_histogram.js index 947e48ed2cab..ba65e583cc09 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/date_histogram.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/date_histogram.js @@ -26,7 +26,14 @@ import { calculateAggRoot } from './calculate_agg_root'; import { search } from '../../../../../../../plugins/data/server'; const { dateHistogramInterval } = search.aggs; -export function dateHistogram(req, panel, esQueryConfig, indexPatternObject, capabilities) { +export function dateHistogram( + req, + panel, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { timeField, interval } = getIntervalAndTimefield(panel, {}, indexPatternObject); const meta = { @@ -34,7 +41,12 @@ export function dateHistogram(req, panel, esQueryConfig, indexPatternObject, cap }; const getDateHistogramForLastBucketMode = () => { - const { bucketSize, intervalString } = getBucketSize(req, interval, capabilities); + const { bucketSize, intervalString } = getBucketSize( + req, + interval, + capabilities, + barTargetUiSettings + ); const { from, to } = getTimerange(req); const timezone = capabilities.searchTimezone; diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/metric_buckets.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/metric_buckets.js index ba2c09e93e7e..fe6a8b537d64 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/metric_buckets.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/metric_buckets.js @@ -23,10 +23,18 @@ import { bucketTransform } from '../../helpers/bucket_transform'; import { getIntervalAndTimefield } from '../../get_interval_and_timefield'; import { calculateAggRoot } from './calculate_agg_root'; -export function metricBuckets(req, panel, esQueryConfig, indexPatternObject) { +export function metricBuckets( + req, + panel, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, {}, indexPatternObject); - const { intervalString } = getBucketSize(req, interval); + const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + panel.series.forEach((column) => { const aggRoot = calculateAggRoot(doc, column); column.metrics diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/positive_rate.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/positive_rate.js index b219f84deef8..6cf165d124e2 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/positive_rate.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/positive_rate.js @@ -22,10 +22,18 @@ import { getIntervalAndTimefield } from '../../get_interval_and_timefield'; import { calculateAggRoot } from './calculate_agg_root'; import { createPositiveRate, filter } from '../series/positive_rate'; -export function positiveRate(req, panel, esQueryConfig, indexPatternObject) { +export function positiveRate( + req, + panel, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, {}, indexPatternObject); - const { intervalString } = getBucketSize(req, interval); + const { intervalString } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + panel.series.forEach((column) => { const aggRoot = calculateAggRoot(doc, column); column.metrics.filter(filter).forEach(createPositiveRate(doc, intervalString, aggRoot)); diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/sibling_buckets.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/sibling_buckets.js index 1b14ffe34a94..ba08b18256de 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/sibling_buckets.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/request_processors/table/sibling_buckets.js @@ -23,10 +23,18 @@ import { bucketTransform } from '../../helpers/bucket_transform'; import { getIntervalAndTimefield } from '../../get_interval_and_timefield'; import { calculateAggRoot } from './calculate_agg_root'; -export function siblingBuckets(req, panel, esQueryConfig, indexPatternObject) { +export function siblingBuckets( + req, + panel, + esQueryConfig, + indexPatternObject, + capabilities, + { barTargetUiSettings } +) { return (next) => (doc) => { const { interval } = getIntervalAndTimefield(panel, {}, indexPatternObject); - const { bucketSize } = getBucketSize(req, interval); + const { bucketSize } = getBucketSize(req, interval, capabilities, barTargetUiSettings); + panel.series.forEach((column) => { const aggRoot = calculateAggRoot(doc, column); column.metrics diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/series/build_request_body.test.ts b/src/plugins/vis_type_timeseries/server/lib/vis_data/series/build_request_body.test.ts index 0c75e6ef1c5b..6b2ef320d54b 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/series/build_request_body.test.ts +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/series/build_request_body.test.ts @@ -97,7 +97,8 @@ describe('buildRequestBody(req)', () => { series, config, indexPatternObject, - capabilities + capabilities, + { barTargetUiSettings: 50 } ); expect(doc).toEqual({ diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/series/get_request_params.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/series/get_request_params.js index 4c653ea49e7c..3804b1407b08 100644 --- a/src/plugins/vis_type_timeseries/server/lib/vis_data/series/get_request_params.js +++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/series/get_request_params.js @@ -19,18 +19,25 @@ import { buildRequestBody } from './build_request_body'; import { getEsShardTimeout } from '../helpers/get_es_shard_timeout'; import { getIndexPatternObject } from '../helpers/get_index_pattern'; +import { UI_SETTINGS } from '../../../../../data/common'; export async function getSeriesRequestParams(req, panel, series, esQueryConfig, capabilities) { + const uiSettings = req.getUiSettingsService(); const indexPattern = (series.override_index_pattern && series.series_index_pattern) || panel.index_pattern; const { indexPatternObject, indexPatternString } = await getIndexPatternObject(req, indexPattern); + const request = buildRequestBody( req, panel, series, esQueryConfig, indexPatternObject, - capabilities + capabilities, + { + maxBarsUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_MAX_BARS), + barTargetUiSettings: await uiSettings.get(UI_SETTINGS.HISTOGRAM_BAR_TARGET), + } ); const esShardTimeout = await getEsShardTimeout(req); From e892b03173a83ebb001f228c2215b160012b9667 Mon Sep 17 00:00:00 2001 From: Tyler Smalley Date: Tue, 24 Nov 2020 08:24:26 -0800 Subject: [PATCH 04/18] [build] Provide ARM build of RE2 (#84163) Signed-off-by: Tyler Smalley --- .../build/tasks/patch_native_modules_task.ts | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/dev/build/tasks/patch_native_modules_task.ts b/src/dev/build/tasks/patch_native_modules_task.ts index c3011fa80988..b6eda2dbfd56 100644 --- a/src/dev/build/tasks/patch_native_modules_task.ts +++ b/src/dev/build/tasks/patch_native_modules_task.ts @@ -46,15 +46,30 @@ const packages: Package[] = [ destinationPath: 'node_modules/re2/build/Release/re2.node', extractMethod: 'gunzip', archives: { - darwin: { + 'darwin-x64': { url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/darwin-x64-72.gz', sha256: '983106049bb86e21b7f823144b2b83e3f1408217401879b3cde0312c803512c9', }, - linux: { + 'linux-x64': { url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/linux-x64-72.gz', sha256: '8b6692037f7b0df24dabc9c9b039038d1c3a3110f62121616b406c482169710a', }, - win32: { + + // ARM build is currently done manually as Github Actions used in upstream project + // do not natively support an ARM target. + + // From a AWS Graviton instance: + // * checkout the node-re2 project, + // * install Node using the same minor used by Kibana + // * npm install, which will also create a build + // * gzip -c build/Release/re2.node > linux-arm64-72.gz + // * upload to kibana-ci-proxy-cache bucket + 'linux-arm64': { + url: + 'https://storage.googleapis.com/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.15.4/linux-arm64-72.gz', + sha256: '5942353ec9cf46a39199818d474f7af137cfbb1bc5727047fe22f31f36602a7e', + }, + 'win32-x64': { url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/win32-x64-72.gz', sha256: '0a6991e693577160c3e9a3f196bd2518368c52d920af331a1a183313e0175604', }, @@ -84,7 +99,7 @@ async function patchModule( `Can't patch ${pkg.name}'s native module, we were expecting version ${pkg.version} and found ${installedVersion}` ); } - const platformName = platform.getName(); + const platformName = platform.getNodeArch(); const archive = pkg.archives[platformName]; const archiveName = path.basename(archive.url); const downloadPath = config.resolveFromRepo(DOWNLOAD_DIRECTORY, pkg.name, archiveName); From 24f262b9ca74f8e3e219ea417c2cd3889696f08c Mon Sep 17 00:00:00 2001 From: James Gowdy Date: Tue, 24 Nov 2020 16:29:32 +0000 Subject: [PATCH 05/18] [ML] Space permision checks for job deletion (#83871) * [ML] Space permision checks for job deletion * updating spaces dependency * updating endpoint comments * adding delete job capabilities check * small change based on review * improving permissions checks * renaming function and endpoint Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- .../plugins/ml/common/types/capabilities.ts | 2 +- .../plugins/ml/common/types/saved_objects.ts | 9 ++ x-pack/plugins/ml/server/lib/spaces_utils.ts | 24 +++- .../models/data_recognizer/data_recognizer.ts | 4 +- x-pack/plugins/ml/server/plugin.ts | 5 +- x-pack/plugins/ml/server/routes/apidoc.json | 1 + .../plugins/ml/server/routes/saved_objects.ts | 56 ++++++++- .../ml/server/routes/schemas/saved_objects.ts | 4 + .../ml/server/saved_objects/authorization.ts | 3 +- .../plugins/ml/server/saved_objects/checks.ts | 108 +++++++++++++++++- .../ml/server/saved_objects/service.ts | 17 ++- x-pack/plugins/ml/server/types.ts | 5 + 12 files changed, 223 insertions(+), 15 deletions(-) diff --git a/x-pack/plugins/ml/common/types/capabilities.ts b/x-pack/plugins/ml/common/types/capabilities.ts index d708cd56b78d..91020eee2660 100644 --- a/x-pack/plugins/ml/common/types/capabilities.ts +++ b/x-pack/plugins/ml/common/types/capabilities.ts @@ -123,7 +123,7 @@ export function getPluginPrivileges() { catalogue: [], savedObject: { all: [], - read: ['ml-job'], + read: [ML_SAVED_OBJECT_TYPE], }, api: apmUserMlCapabilitiesKeys.map((k) => `ml:${k}`), ui: apmUserMlCapabilitiesKeys, diff --git a/x-pack/plugins/ml/common/types/saved_objects.ts b/x-pack/plugins/ml/common/types/saved_objects.ts index 9f4d402ec175..d6c9ad758e8c 100644 --- a/x-pack/plugins/ml/common/types/saved_objects.ts +++ b/x-pack/plugins/ml/common/types/saved_objects.ts @@ -27,3 +27,12 @@ export interface InitializeSavedObjectResponse { success: boolean; error?: any; } + +export interface DeleteJobCheckResponse { + [jobId: string]: DeleteJobPermission; +} + +export interface DeleteJobPermission { + canDelete: boolean; + canUntag: boolean; +} diff --git a/x-pack/plugins/ml/server/lib/spaces_utils.ts b/x-pack/plugins/ml/server/lib/spaces_utils.ts index b96fe6f2d1eb..ecff3b8124cf 100644 --- a/x-pack/plugins/ml/server/lib/spaces_utils.ts +++ b/x-pack/plugins/ml/server/lib/spaces_utils.ts @@ -7,6 +7,7 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../src/core/server'; import { SpacesPluginStart } from '../../../spaces/server'; +import { PLUGIN_ID } from '../../common/constants/app'; export type RequestFacade = KibanaRequest | Legacy.Request; @@ -22,19 +23,34 @@ export function spacesUtilsProvider( const space = await (await getSpacesPlugin()).spacesService.getActiveSpace( request instanceof KibanaRequest ? request : KibanaRequest.from(request) ); - return space.disabledFeatures.includes('ml') === false; + return space.disabledFeatures.includes(PLUGIN_ID) === false; } - async function getAllSpaces(): Promise { + async function getAllSpaces() { if (getSpacesPlugin === undefined) { return null; } const client = (await getSpacesPlugin()).spacesService.createSpacesClient( request instanceof KibanaRequest ? request : KibanaRequest.from(request) ); - const spaces = await client.getAll(); + return await client.getAll(); + } + + async function getAllSpaceIds(): Promise { + const spaces = await getAllSpaces(); + if (spaces === null) { + return null; + } return spaces.map((s) => s.id); } - return { isMlEnabledInSpace, getAllSpaces }; + async function getMlSpaceIds(): Promise { + const spaces = await getAllSpaces(); + if (spaces === null) { + return null; + } + return spaces.filter((s) => s.disabledFeatures.includes(PLUGIN_ID) === false).map((s) => s.id); + } + + return { isMlEnabledInSpace, getAllSpaces, getAllSpaceIds, getMlSpaceIds }; } diff --git a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts index f875788d50c5..aeaf13ebf954 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts +++ b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts @@ -1095,7 +1095,9 @@ export class DataRecognizer { job.config.analysis_limits.model_memory_limit = modelMemoryLimit; } } catch (error) { - mlLog.warn(`Data recognizer could not estimate model memory limit ${error.body}`); + mlLog.warn( + `Data recognizer could not estimate model memory limit ${JSON.stringify(error.body)}` + ); } } diff --git a/x-pack/plugins/ml/server/plugin.ts b/x-pack/plugins/ml/server/plugin.ts index 5e103dbc1806..e48983c1c536 100644 --- a/x-pack/plugins/ml/server/plugin.ts +++ b/x-pack/plugins/ml/server/plugin.ts @@ -178,7 +178,10 @@ export class MlServerPlugin notificationRoutes(routeInit); resultsServiceRoutes(routeInit); jobValidationRoutes(routeInit, this.version); - savedObjectsRoutes(routeInit); + savedObjectsRoutes(routeInit, { + getSpaces, + resolveMlCapabilities, + }); systemRoutes(routeInit, { getSpaces, cloud: plugins.cloud, diff --git a/x-pack/plugins/ml/server/routes/apidoc.json b/x-pack/plugins/ml/server/routes/apidoc.json index c157ae9e8200..5672824f3d04 100644 --- a/x-pack/plugins/ml/server/routes/apidoc.json +++ b/x-pack/plugins/ml/server/routes/apidoc.json @@ -150,6 +150,7 @@ "AssignJobsToSpaces", "RemoveJobsFromSpaces", "JobsSpaces", + "DeleteJobCheck", "TrainedModels", "GetTrainedModel", diff --git a/x-pack/plugins/ml/server/routes/saved_objects.ts b/x-pack/plugins/ml/server/routes/saved_objects.ts index 1c9c975b6626..3ba69b0d6b50 100644 --- a/x-pack/plugins/ml/server/routes/saved_objects.ts +++ b/x-pack/plugins/ml/server/routes/saved_objects.ts @@ -5,14 +5,18 @@ */ import { wrapError } from '../client/error_wrapper'; -import { RouteInitialization } from '../types'; +import { RouteInitialization, SavedObjectsRouteDeps } from '../types'; import { checksFactory, repairFactory } from '../saved_objects'; -import { jobsAndSpaces, repairJobObjects } from './schemas/saved_objects'; +import { jobsAndSpaces, repairJobObjects, jobTypeSchema } from './schemas/saved_objects'; +import { jobIdsSchema } from './schemas/job_service_schema'; /** * Routes for job saved object management */ -export function savedObjectsRoutes({ router, routeGuard }: RouteInitialization) { +export function savedObjectsRoutes( + { router, routeGuard }: RouteInitialization, + { getSpaces, resolveMlCapabilities }: SavedObjectsRouteDeps +) { /** * @apiGroup JobSavedObjects * @@ -220,4 +224,50 @@ export function savedObjectsRoutes({ router, routeGuard }: RouteInitialization) } }) ); + + /** + * @apiGroup JobSavedObjects + * + * @api {get} /api/ml/saved_objects/delete_job_check Check whether user can delete a job + * @apiName DeleteJobCheck + * @apiDescription Check the user's ability to delete jobs. Returns whether they are able + * to fully delete the job and whether they are able to remove it from + * the current space. + * + * @apiSchema (body) jobIdsSchema (params) jobTypeSchema + * + */ + router.post( + { + path: '/api/ml/saved_objects/can_delete_job/{jobType}', + validate: { + params: jobTypeSchema, + body: jobIdsSchema, + }, + options: { + tags: ['access:ml:canGetJobs', 'access:ml:canGetDataFrameAnalytics'], + }, + }, + routeGuard.fullLicenseAPIGuard(async ({ request, response, jobSavedObjectService, client }) => { + try { + const { jobType } = request.params; + const { jobIds }: { jobIds: string[] } = request.body; + + const { canDeleteJobs } = checksFactory(client, jobSavedObjectService); + const body = await canDeleteJobs( + request, + jobType, + jobIds, + getSpaces !== undefined, + resolveMlCapabilities + ); + + return response.ok({ + body, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); } diff --git a/x-pack/plugins/ml/server/routes/schemas/saved_objects.ts b/x-pack/plugins/ml/server/routes/schemas/saved_objects.ts index d7385f6468f4..6b8c64714a82 100644 --- a/x-pack/plugins/ml/server/routes/schemas/saved_objects.ts +++ b/x-pack/plugins/ml/server/routes/schemas/saved_objects.ts @@ -13,3 +13,7 @@ export const jobsAndSpaces = schema.object({ }); export const repairJobObjects = schema.object({ simulate: schema.maybe(schema.boolean()) }); + +export const jobTypeSchema = schema.object({ + jobType: schema.string(), +}); diff --git a/x-pack/plugins/ml/server/saved_objects/authorization.ts b/x-pack/plugins/ml/server/saved_objects/authorization.ts index 815ff29ae010..958ee2091f11 100644 --- a/x-pack/plugins/ml/server/saved_objects/authorization.ts +++ b/x-pack/plugins/ml/server/saved_objects/authorization.ts @@ -6,6 +6,7 @@ import { KibanaRequest } from 'kibana/server'; import type { SecurityPluginSetup } from '../../../security/server'; +import { ML_SAVED_OBJECT_TYPE } from '../../common/types/saved_objects'; export function authorizationProvider(authorization: SecurityPluginSetup['authz']) { async function authorizationCheck(request: KibanaRequest) { @@ -18,7 +19,7 @@ export function authorizationProvider(authorization: SecurityPluginSetup['authz' request ); const createMLJobAuthorizationAction = authorization.actions.savedObject.get( - 'ml-job', + ML_SAVED_OBJECT_TYPE, 'create' ); const canCreateGlobally = ( diff --git a/x-pack/plugins/ml/server/saved_objects/checks.ts b/x-pack/plugins/ml/server/saved_objects/checks.ts index 51269599105d..f682999cd596 100644 --- a/x-pack/plugins/ml/server/saved_objects/checks.ts +++ b/x-pack/plugins/ml/server/saved_objects/checks.ts @@ -4,13 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -import { IScopedClusterClient } from 'kibana/server'; +import Boom from '@hapi/boom'; +import { IScopedClusterClient, KibanaRequest } from 'kibana/server'; import type { JobSavedObjectService } from './service'; -import { JobType } from '../../common/types/saved_objects'; +import { JobType, DeleteJobCheckResponse } from '../../common/types/saved_objects'; import { Job } from '../../common/types/anomaly_detection_jobs'; import { Datafeed } from '../../common/types/anomaly_detection_jobs'; import { DataFrameAnalyticsConfig } from '../../common/types/data_frame_analytics'; +import { ResolveMlCapabilities } from '../../common/types/capabilities'; interface JobSavedObjectStatus { jobId: string; @@ -154,5 +156,105 @@ export function checksFactory( }; } - return { checkStatus }; + async function canDeleteJobs( + request: KibanaRequest, + jobType: JobType, + jobIds: string[], + spacesEnabled: boolean, + resolveMlCapabilities: ResolveMlCapabilities + ) { + if (jobType !== 'anomaly-detector' && jobType !== 'data-frame-analytics') { + throw Boom.badRequest('Job type must be "anomaly-detector" or "data-frame-analytics"'); + } + + const mlCapabilities = await resolveMlCapabilities(request); + if (mlCapabilities === null) { + throw Boom.internal('mlCapabilities is not defined'); + } + + if ( + (jobType === 'anomaly-detector' && mlCapabilities.canDeleteJob === false) || + (jobType === 'data-frame-analytics' && mlCapabilities.canDeleteDataFrameAnalytics === false) + ) { + // user does not have access to delete jobs. + return jobIds.reduce((results, jobId) => { + results[jobId] = { + canDelete: false, + canUntag: false, + }; + return results; + }, {} as DeleteJobCheckResponse); + } + + if (spacesEnabled === false) { + // spaces are disabled, delete only no untagging + return jobIds.reduce((results, jobId) => { + results[jobId] = { + canDelete: true, + canUntag: false, + }; + return results; + }, {} as DeleteJobCheckResponse); + } + const canCreateGlobalJobs = await jobSavedObjectService.canCreateGlobalJobs(request); + + const jobObjects = await Promise.all( + jobIds.map((id) => jobSavedObjectService.getJobObject(jobType, id)) + ); + + return jobIds.reduce((results, jobId) => { + const jobObject = jobObjects.find((j) => j?.attributes.job_id === jobId); + if (jobObject === undefined || jobObject.namespaces === undefined) { + // job saved object not found + results[jobId] = { + canDelete: false, + canUntag: false, + }; + return results; + } + + const { namespaces } = jobObject; + const isGlobalJob = namespaces.includes('*'); + + // job is in * space, user can see all spaces - delete and no option to untag + if (canCreateGlobalJobs && isGlobalJob) { + results[jobId] = { + canDelete: true, + canUntag: false, + }; + return results; + } + + // job is in * space, user cannot see all spaces - no untagging, no deleting + if (isGlobalJob) { + results[jobId] = { + canDelete: false, + canUntag: false, + }; + return results; + } + + // jobs with are in individual spaces can only be untagged + // from current space if the job is in more than 1 space + const canUntag = namespaces.length > 1; + + // job is in individual spaces, user cannot see all of them - untag only, no delete + if (namespaces.includes('?')) { + results[jobId] = { + canDelete: false, + canUntag, + }; + return results; + } + + // job is individual spaces, user can see all of them - delete and option to untag + results[jobId] = { + canDelete: true, + canUntag, + }; + return results; + }, {} as DeleteJobCheckResponse); + } + + return { checkStatus, canDeleteJobs }; } diff --git a/x-pack/plugins/ml/server/saved_objects/service.ts b/x-pack/plugins/ml/server/saved_objects/service.ts index ecaf0869d196..bfc5b165fe55 100644 --- a/x-pack/plugins/ml/server/saved_objects/service.ts +++ b/x-pack/plugins/ml/server/saved_objects/service.ts @@ -5,7 +5,12 @@ */ import RE2 from 're2'; -import { KibanaRequest, SavedObjectsClientContract, SavedObjectsFindOptions } from 'kibana/server'; +import { + KibanaRequest, + SavedObjectsClientContract, + SavedObjectsFindOptions, + SavedObjectsFindResult, +} from 'kibana/server'; import type { SecurityPluginSetup } from '../../../security/server'; import { JobType, ML_SAVED_OBJECT_TYPE } from '../../common/types/saved_objects'; import { MLJobNotFound } from '../lib/ml_client'; @@ -133,6 +138,15 @@ export function jobSavedObjectServiceFactory( return await _getJobObjects(jobType, undefined, undefined, currentSpaceOnly); } + async function getJobObject( + jobType: JobType, + jobId: string, + currentSpaceOnly: boolean = true + ): Promise | undefined> { + const [jobObject] = await _getJobObjects(jobType, jobId, undefined, currentSpaceOnly); + return jobObject; + } + async function getAllJobObjectsForAllSpaces(jobType?: JobType) { await isMlReady(); const filterObject: JobObjectFilter = {}; @@ -307,6 +321,7 @@ export function jobSavedObjectServiceFactory( return { getAllJobObjects, + getJobObject, createAnomalyDetectionJob, createDataFrameAnalyticsJob, deleteAnomalyDetectionJob, diff --git a/x-pack/plugins/ml/server/types.ts b/x-pack/plugins/ml/server/types.ts index df40f5a26b0f..780a4284312e 100644 --- a/x-pack/plugins/ml/server/types.ts +++ b/x-pack/plugins/ml/server/types.ts @@ -31,6 +31,11 @@ export interface SystemRouteDeps { resolveMlCapabilities: ResolveMlCapabilities; } +export interface SavedObjectsRouteDeps { + getSpaces?: () => Promise; + resolveMlCapabilities: ResolveMlCapabilities; +} + export interface PluginsSetup { cloud: CloudSetup; features: FeaturesPluginSetup; From 5e183dd46d9e901827aa8c5246e72bd5e4092067 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 24 Nov 2020 11:57:23 -0500 Subject: [PATCH 06/18] [Security Solution][Resolver] Allow a configurable entity_id field (#81679) * Trying to flesh out new tree route * Working on the descendants query * Almost working descendants * Possible solution for aggs * Working aggregations extraction * Working on the ancestry array for descendants * Making changes to the unique id for ancestr * Implementing ancestry funcitonality * Deleting the multiple edges * Fleshing out the descendants loop for levels * Writing tests for ancestors and descendants * Fixing type errors and writing more tests * Renaming validation variable and deprecating old tree routes * Renaming tree integration test file * Adding some integration tests * Fixing ancestry to handle multiple nodes in the request and writing more tests * Adding more tests * Renaming new tree to handler file * Renaming new tree directory * Adding more unit tests * Using doc value fields and working on types * Adding comments and more tests * Fixing timestamp test issue * Adding more comments * Fixing timestamp test issue take 2 * Adding id, parent, and name fields to the top level response * Fixing generator start and end time generation * Adding more comments * Revert "Fixing generator start and end time generation" This reverts commit 9e9abf68a6612f25ef9c9c85645f2e1bf72c9359. * Adding test for time Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- .../common/endpoint/generate_data.test.ts | 26 +- .../common/endpoint/generate_data.ts | 38 +- .../common/endpoint/schema/resolver.ts | 42 +- .../common/endpoint/types/index.ts | 50 + .../server/endpoint/routes/resolver.ts | 31 +- .../server/endpoint/routes/resolver/tree.ts | 7 +- .../endpoint/routes/resolver/tree/handler.ts | 28 + .../resolver/tree/queries/descendants.ts | 206 +++++ .../routes/resolver/tree/queries/lifecycle.ts | 101 ++ .../routes/resolver/tree/queries/stats.ts | 139 +++ .../routes/resolver/tree/utils/fetch.test.ts | 707 ++++++++++++++ .../routes/resolver/tree/utils/fetch.ts | 334 +++++++ .../routes/resolver/tree/utils/index.ts | 62 ++ .../apis/resolver/common.ts | 361 +++++++- .../apis/resolver/index.ts | 1 + .../apis/resolver/tree.ts | 867 ++++++++++++------ .../apis/resolver/tree_entity_id.ts | 375 ++++++++ 17 files changed, 3082 insertions(+), 293 deletions(-) create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/handler.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/descendants.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/lifecycle.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/stats.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.test.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.ts create mode 100644 x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/index.ts create mode 100644 x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree_entity_id.ts diff --git a/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts b/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts index 66119e098238..ec82f4795158 100644 --- a/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts +++ b/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts @@ -27,7 +27,6 @@ interface Node { } describe('data generator data streams', () => { - // these tests cast the result of the generate methods so that we can specifically compare the `data_stream` fields it('creates a generator with default data streams', () => { const generator = new EndpointDocGenerator('seed'); expect(generator.generateHostMetadata().data_stream).toEqual({ @@ -268,6 +267,31 @@ describe('data generator', () => { } }; + it('sets the start and end times correctly', () => { + const startOfEpoch = new Date(0); + let startTime = new Date(timestampSafeVersion(tree.allEvents[0]) ?? startOfEpoch); + expect(startTime).not.toEqual(startOfEpoch); + let endTime = new Date(timestampSafeVersion(tree.allEvents[0]) ?? startOfEpoch); + expect(startTime).not.toEqual(startOfEpoch); + + for (const event of tree.allEvents) { + const currentEventTime = new Date(timestampSafeVersion(event) ?? startOfEpoch); + expect(currentEventTime).not.toEqual(startOfEpoch); + expect(tree.startTime.getTime()).toBeLessThanOrEqual(currentEventTime.getTime()); + expect(tree.endTime.getTime()).toBeGreaterThanOrEqual(currentEventTime.getTime()); + if (currentEventTime < startTime) { + startTime = currentEventTime; + } + + if (currentEventTime > endTime) { + endTime = currentEventTime; + } + } + expect(startTime).toEqual(tree.startTime); + expect(endTime).toEqual(tree.endTime); + expect(endTime.getTime() - startTime.getTime()).toBeGreaterThanOrEqual(0); + }); + it('creates related events in ascending order', () => { // the order should not change since it should already be in ascending order const relatedEventsAsc = _.cloneDeep(tree.origin.relatedEvents).sort( diff --git a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts index a4bdc4fc59a7..3c508bed5b2f 100644 --- a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts +++ b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts @@ -317,6 +317,8 @@ export interface Tree { * All events from children, ancestry, origin, and the alert in a single array */ allEvents: Event[]; + startTime: Date; + endTime: Date; } export interface TreeOptions { @@ -718,6 +720,35 @@ export class EndpointDocGenerator { }; } + private static getStartEndTimes(events: Event[]): { startTime: Date; endTime: Date } { + let startTime: number; + let endTime: number; + if (events.length > 0) { + startTime = timestampSafeVersion(events[0]) ?? new Date().getTime(); + endTime = startTime; + } else { + startTime = new Date().getTime(); + endTime = startTime; + } + + for (const event of events) { + const eventTimestamp = timestampSafeVersion(event); + if (eventTimestamp !== undefined) { + if (eventTimestamp < startTime) { + startTime = eventTimestamp; + } + + if (eventTimestamp > endTime) { + endTime = eventTimestamp; + } + } + } + return { + startTime: new Date(startTime), + endTime: new Date(endTime), + }; + } + /** * This generates a full resolver tree and keeps the entire tree in memory. This is useful for tests that want * to compare results from elasticsearch with the actual events created by this generator. Because all the events @@ -815,12 +846,17 @@ export class EndpointDocGenerator { const childrenByParent = groupNodesByParent(childrenNodes); const levels = createLevels(childrenByParent, [], childrenByParent.get(origin.id)); + const allEvents = [...ancestry, ...children]; + const { startTime, endTime } = EndpointDocGenerator.getStartEndTimes(allEvents); + return { children: childrenNodes, ancestry: ancestryNodes, - allEvents: [...ancestry, ...children], + allEvents, origin, childrenLevels: levels, + startTime, + endTime, }; } diff --git a/x-pack/plugins/security_solution/common/endpoint/schema/resolver.ts b/x-pack/plugins/security_solution/common/endpoint/schema/resolver.ts index 1dd5668b3177..6777b1dabbd5 100644 --- a/x-pack/plugins/security_solution/common/endpoint/schema/resolver.ts +++ b/x-pack/plugins/security_solution/common/endpoint/schema/resolver.ts @@ -7,9 +7,9 @@ import { schema } from '@kbn/config-schema'; /** - * Used to validate GET requests for a complete resolver tree. + * Used to validate GET requests for a complete resolver tree centered around an entity_id. */ -export const validateTree = { +export const validateTreeEntityID = { params: schema.object({ id: schema.string({ minLength: 1 }) }), query: schema.object({ children: schema.number({ defaultValue: 200, min: 0, max: 10000 }), @@ -23,6 +23,44 @@ export const validateTree = { }), }; +/** + * Used to validate GET requests for a complete resolver tree. + */ +export const validateTree = { + body: schema.object({ + /** + * If the ancestry field is specified this field will be ignored + * + * If the ancestry field is specified we have a much more performant way of retrieving levels so let's not limit + * the number of levels that come back in that scenario. We could still limit it, but what we'd likely have to do + * is get all the levels back like we normally do with the ancestry array, bucket them together by level, and then + * remove the levels that exceeded the requested number which seems kind of wasteful. + */ + descendantLevels: schema.number({ defaultValue: 20, min: 0, max: 1000 }), + descendants: schema.number({ defaultValue: 1000, min: 0, max: 10000 }), + // if the ancestry array isn't specified allowing 200 might be too high + ancestors: schema.number({ defaultValue: 200, min: 0, max: 10000 }), + timerange: schema.object({ + from: schema.string(), + to: schema.string(), + }), + schema: schema.object({ + // the ancestry field is optional + ancestry: schema.maybe(schema.string({ minLength: 1 })), + id: schema.string({ minLength: 1 }), + name: schema.maybe(schema.string({ minLength: 1 })), + parent: schema.string({ minLength: 1 }), + }), + // only allowing strings and numbers for node IDs because Elasticsearch only allows those types for collapsing: + // https://www.elastic.co/guide/en/elasticsearch/reference/current/collapse-search-results.html + // We use collapsing in our Elasticsearch queries for the tree api + nodes: schema.arrayOf(schema.oneOf([schema.string({ minLength: 1 }), schema.number()]), { + minSize: 1, + }), + indexPatterns: schema.arrayOf(schema.string(), { minSize: 1 }), + }), +}; + /** * Used to validate POST requests for `/resolver/events` api. */ diff --git a/x-pack/plugins/security_solution/common/endpoint/types/index.ts b/x-pack/plugins/security_solution/common/endpoint/types/index.ts index e7d060b463ab..cd5c60e2698c 100644 --- a/x-pack/plugins/security_solution/common/endpoint/types/index.ts +++ b/x-pack/plugins/security_solution/common/endpoint/types/index.ts @@ -78,6 +78,56 @@ export interface EventStats { byCategory: Record; } +/** + * Represents the object structure of a returned document when using doc value fields to filter the fields + * returned in a document from an Elasticsearch query. + * + * Here is an example: + * + * { + * "_index": ".ds-logs-endpoint.events.process-default-000001", + * "_id": "bc7brnUBxO0aE7QcCVHo", + * "_score": null, + * "fields": { <----------- The FieldsObject represents this portion + * "@timestamp": [ + * "2020-11-09T21:13:25.246Z" + * ], + * "process.name": "explorer.exe", + * "process.parent.entity_id": [ + * "0i17c2m22c" + * ], + * "process.Ext.ancestry": [ <------------ Notice that the keys are flattened + * "0i17c2m22c", + * "2z9j8dlx72", + * "oj61pr6g62", + * "x0leonbrc9" + * ], + * "process.entity_id": [ + * "6k8waczi22" + * ] + * }, + * "sort": [ + * 0, + * 1604956405246 + * ] + * } + */ +export interface FieldsObject { + [key: string]: ECSField; +} + +/** + * A node in a resolver graph. + */ +export interface ResolverNode { + data: FieldsObject; + id: string | number; + // the very root node might not have the parent field defined + parent?: string | number; + name?: string; + stats: EventStats; +} + /** * Statistical information for a node in a resolver tree. */ diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver.ts index b5d657fe55a1..42a69d7b1e96 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/resolver.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver.ts @@ -7,16 +7,18 @@ import { IRouter } from 'kibana/server'; import { EndpointAppContext } from '../types'; import { - validateTree, + validateTreeEntityID, validateEvents, validateChildren, validateAncestry, validateAlerts, validateEntities, + validateTree, } from '../../../common/endpoint/schema/resolver'; import { handleChildren } from './resolver/children'; import { handleAncestry } from './resolver/ancestry'; -import { handleTree } from './resolver/tree'; +import { handleTree as handleTreeEntityID } from './resolver/tree'; +import { handleTree } from './resolver/tree/handler'; import { handleAlerts } from './resolver/alerts'; import { handleEntities } from './resolver/entity'; import { handleEvents } from './resolver/events'; @@ -24,6 +26,15 @@ import { handleEvents } from './resolver/events'; export function registerResolverRoutes(router: IRouter, endpointAppContext: EndpointAppContext) { const log = endpointAppContext.logFactory.get('resolver'); + router.post( + { + path: '/api/endpoint/resolver/tree', + validate: validateTree, + options: { authRequired: true }, + }, + handleTree(log) + ); + router.post( { path: '/api/endpoint/resolver/events', @@ -33,6 +44,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp handleEvents(log) ); + /** + * @deprecated will be removed because it is not used + */ router.post( { path: '/api/endpoint/resolver/{id}/alerts', @@ -42,6 +56,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp handleAlerts(log, endpointAppContext) ); + /** + * @deprecated use the /resolver/tree api instead + */ router.get( { path: '/api/endpoint/resolver/{id}/children', @@ -51,6 +68,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp handleChildren(log, endpointAppContext) ); + /** + * @deprecated use the /resolver/tree api instead + */ router.get( { path: '/api/endpoint/resolver/{id}/ancestry', @@ -60,13 +80,16 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp handleAncestry(log, endpointAppContext) ); + /** + * @deprecated use the /resolver/tree api instead + */ router.get( { path: '/api/endpoint/resolver/{id}', - validate: validateTree, + validate: validateTreeEntityID, options: { authRequired: true }, }, - handleTree(log, endpointAppContext) + handleTreeEntityID(log, endpointAppContext) ); /** diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree.ts index 02cddc3ddcf6..08cb9b56bf64 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree.ts @@ -7,14 +7,17 @@ import { RequestHandler, Logger } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { eventsIndexPattern, alertsIndexPattern } from '../../../../common/endpoint/constants'; -import { validateTree } from '../../../../common/endpoint/schema/resolver'; +import { validateTreeEntityID } from '../../../../common/endpoint/schema/resolver'; import { Fetcher } from './utils/fetch'; import { EndpointAppContext } from '../../types'; export function handleTree( log: Logger, endpointAppContext: EndpointAppContext -): RequestHandler, TypeOf> { +): RequestHandler< + TypeOf, + TypeOf +> { return async (context, req, res) => { try { const client = context.core.elasticsearch.legacy.client; diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/handler.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/handler.ts new file mode 100644 index 000000000000..8c62cf876298 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/handler.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { RequestHandler, Logger } from 'kibana/server'; +import { TypeOf } from '@kbn/config-schema'; +import { validateTree } from '../../../../../common/endpoint/schema/resolver'; +import { Fetcher } from './utils/fetch'; + +export function handleTree( + log: Logger +): RequestHandler> { + return async (context, req, res) => { + try { + const client = context.core.elasticsearch.client; + const fetcher = new Fetcher(client); + const body = await fetcher.tree(req.body); + return res.ok({ + body, + }); + } catch (err) { + log.warn(err); + return res.internalError({ body: 'Error retrieving tree.' }); + } + }; +} diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/descendants.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/descendants.ts new file mode 100644 index 000000000000..405429cc2419 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/descendants.ts @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { SearchResponse } from 'elasticsearch'; +import { ApiResponse } from '@elastic/elasticsearch'; +import { IScopedClusterClient } from 'src/core/server'; +import { FieldsObject } from '../../../../../../common/endpoint/types'; +import { JsonObject, JsonValue } from '../../../../../../../../../src/plugins/kibana_utils/common'; +import { NodeID, Schema, Timerange, docValueFields } from '../utils/index'; + +interface DescendantsParams { + schema: Schema; + indexPatterns: string | string[]; + timerange: Timerange; +} + +/** + * Builds a query for retrieving descendants of a node. + */ +export class DescendantsQuery { + private readonly schema: Schema; + private readonly indexPatterns: string | string[]; + private readonly timerange: Timerange; + private readonly docValueFields: JsonValue[]; + constructor({ schema, indexPatterns, timerange }: DescendantsParams) { + this.docValueFields = docValueFields(schema); + this.schema = schema; + this.indexPatterns = indexPatterns; + this.timerange = timerange; + } + + private query(nodes: NodeID[], size: number): JsonObject { + return { + _source: false, + docvalue_fields: this.docValueFields, + size, + collapse: { + field: this.schema.id, + }, + sort: [{ '@timestamp': 'asc' }], + query: { + bool: { + filter: [ + { + range: { + '@timestamp': { + gte: this.timerange.from, + lte: this.timerange.to, + format: 'strict_date_optional_time', + }, + }, + }, + { + terms: { [this.schema.parent]: nodes }, + }, + { + exists: { + field: this.schema.id, + }, + }, + { + exists: { + field: this.schema.parent, + }, + }, + { + term: { 'event.category': 'process' }, + }, + { + term: { 'event.kind': 'event' }, + }, + ], + }, + }, + }; + } + + private queryWithAncestryArray(nodes: NodeID[], ancestryField: string, size: number): JsonObject { + return { + _source: false, + docvalue_fields: this.docValueFields, + size, + collapse: { + field: this.schema.id, + }, + sort: [ + { + _script: { + type: 'number', + script: { + /** + * This script is used to sort the returned documents in a breadth first order so that we return all of + * a single level of nodes before returning the next level of nodes. This is needed because using the + * ancestry array could result in the search going deep before going wide depending on when the nodes + * spawned their children. If a node spawns a child before it's sibling is spawned then the child would + * be found before the sibling because by default the sort was on timestamp ascending. + */ + source: ` + Map ancestryToIndex = [:]; + List sourceAncestryArray = params._source.${ancestryField}; + int length = sourceAncestryArray.length; + for (int i = 0; i < length; i++) { + ancestryToIndex[sourceAncestryArray[i]] = i; + } + for (String id : params.ids) { + def index = ancestryToIndex[id]; + if (index != null) { + return index; + } + } + return -1; + `, + params: { + ids: nodes, + }, + }, + }, + }, + { '@timestamp': 'asc' }, + ], + query: { + bool: { + filter: [ + { + range: { + '@timestamp': { + gte: this.timerange.from, + lte: this.timerange.to, + format: 'strict_date_optional_time', + }, + }, + }, + { + terms: { + [ancestryField]: nodes, + }, + }, + { + exists: { + field: this.schema.id, + }, + }, + { + exists: { + field: this.schema.parent, + }, + }, + { + exists: { + field: ancestryField, + }, + }, + { + term: { 'event.category': 'process' }, + }, + { + term: { 'event.kind': 'event' }, + }, + ], + }, + }, + }; + } + + /** + * Searches for descendant nodes matching the specified IDs. + * + * @param client for making requests to Elasticsearch + * @param nodes the unique IDs to search for in Elasticsearch + * @param limit the upper limit of documents to returned + */ + async search( + client: IScopedClusterClient, + nodes: NodeID[], + limit: number + ): Promise { + if (nodes.length <= 0) { + return []; + } + + let response: ApiResponse>; + if (this.schema.ancestry) { + response = await client.asCurrentUser.search({ + body: this.queryWithAncestryArray(nodes, this.schema.ancestry, limit), + index: this.indexPatterns, + }); + } else { + response = await client.asCurrentUser.search({ + body: this.query(nodes, limit), + index: this.indexPatterns, + }); + } + + /** + * The returned values will look like: + * [ + * { 'schema_id_value': , 'schema_parent_value': } + * ] + * + * So the schema fields are flattened ('process.parent.entity_id') + */ + return response.body.hits.hits.map((hit) => hit.fields); + } +} diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/lifecycle.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/lifecycle.ts new file mode 100644 index 000000000000..606a4538ba88 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/lifecycle.ts @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { SearchResponse } from 'elasticsearch'; +import { ApiResponse } from '@elastic/elasticsearch'; +import { IScopedClusterClient } from 'src/core/server'; +import { FieldsObject } from '../../../../../../common/endpoint/types'; +import { JsonObject, JsonValue } from '../../../../../../../../../src/plugins/kibana_utils/common'; +import { NodeID, Schema, Timerange, docValueFields } from '../utils/index'; + +interface LifecycleParams { + schema: Schema; + indexPatterns: string | string[]; + timerange: Timerange; +} + +/** + * Builds a query for retrieving descendants of a node. + */ +export class LifecycleQuery { + private readonly schema: Schema; + private readonly indexPatterns: string | string[]; + private readonly timerange: Timerange; + private readonly docValueFields: JsonValue[]; + constructor({ schema, indexPatterns, timerange }: LifecycleParams) { + this.docValueFields = docValueFields(schema); + this.schema = schema; + this.indexPatterns = indexPatterns; + this.timerange = timerange; + } + + private query(nodes: NodeID[]): JsonObject { + return { + _source: false, + docvalue_fields: this.docValueFields, + size: nodes.length, + collapse: { + field: this.schema.id, + }, + sort: [{ '@timestamp': 'asc' }], + query: { + bool: { + filter: [ + { + range: { + '@timestamp': { + gte: this.timerange.from, + lte: this.timerange.to, + format: 'strict_date_optional_time', + }, + }, + }, + { + terms: { [this.schema.id]: nodes }, + }, + { + exists: { + field: this.schema.id, + }, + }, + { + term: { 'event.category': 'process' }, + }, + { + term: { 'event.kind': 'event' }, + }, + ], + }, + }, + }; + } + + /** + * Searches for lifecycle events matching the specified node IDs. + * + * @param client for making requests to Elasticsearch + * @param nodes the unique IDs to search for in Elasticsearch + */ + async search(client: IScopedClusterClient, nodes: NodeID[]): Promise { + if (nodes.length <= 0) { + return []; + } + + const response: ApiResponse> = await client.asCurrentUser.search({ + body: this.query(nodes), + index: this.indexPatterns, + }); + + /** + * The returned values will look like: + * [ + * { 'schema_id_value': , 'schema_parent_value': } + * ] + * + * So the schema fields are flattened ('process.parent.entity_id') + */ + return response.body.hits.hits.map((hit) => hit.fields); + } +} diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/stats.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/stats.ts new file mode 100644 index 000000000000..33dcdce8987f --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/queries/stats.ts @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { SearchResponse } from 'elasticsearch'; +import { ApiResponse } from '@elastic/elasticsearch'; +import { IScopedClusterClient } from 'src/core/server'; +import { JsonObject } from '../../../../../../../../../src/plugins/kibana_utils/common'; +import { EventStats } from '../../../../../../common/endpoint/types'; +import { NodeID, Schema, Timerange } from '../utils/index'; + +interface AggBucket { + key: string; + doc_count: number; +} + +interface CategoriesAgg extends AggBucket { + /** + * The reason categories is optional here is because if no data was returned in the query the categories aggregation + * will not be defined on the response (because it's a sub aggregation). + */ + categories?: { + buckets?: AggBucket[]; + }; +} + +interface StatsParams { + schema: Schema; + indexPatterns: string | string[]; + timerange: Timerange; +} + +/** + * Builds a query for retrieving descendants of a node. + */ +export class StatsQuery { + private readonly schema: Schema; + private readonly indexPatterns: string | string[]; + private readonly timerange: Timerange; + constructor({ schema, indexPatterns, timerange }: StatsParams) { + this.schema = schema; + this.indexPatterns = indexPatterns; + this.timerange = timerange; + } + + private query(nodes: NodeID[]): JsonObject { + return { + size: 0, + query: { + bool: { + filter: [ + { + range: { + '@timestamp': { + gte: this.timerange.from, + lte: this.timerange.to, + format: 'strict_date_optional_time', + }, + }, + }, + { + terms: { [this.schema.id]: nodes }, + }, + { + term: { 'event.kind': 'event' }, + }, + { + bool: { + must_not: { + term: { + 'event.category': 'process', + }, + }, + }, + }, + ], + }, + }, + aggs: { + ids: { + terms: { field: this.schema.id, size: nodes.length }, + aggs: { + categories: { + terms: { field: 'event.category', size: 1000 }, + }, + }, + }, + }, + }; + } + + private static getEventStats(catAgg: CategoriesAgg): EventStats { + const total = catAgg.doc_count; + if (!catAgg.categories?.buckets) { + return { + total, + byCategory: {}, + }; + } + + const byCategory: Record = catAgg.categories.buckets.reduce( + (cummulative: Record, bucket: AggBucket) => ({ + ...cummulative, + [bucket.key]: bucket.doc_count, + }), + {} + ); + return { + total, + byCategory, + }; + } + + /** + * Returns the related event statistics for a set of nodes. + * @param client used to make requests to Elasticsearch + * @param nodes an array of unique IDs representing nodes in a resolver graph + */ + async search(client: IScopedClusterClient, nodes: NodeID[]): Promise> { + if (nodes.length <= 0) { + return {}; + } + + // leaving unknown here because we don't actually need the hits part of the body + const response: ApiResponse> = await client.asCurrentUser.search({ + body: this.query(nodes), + index: this.indexPatterns, + }); + + return response.body.aggregations?.ids?.buckets.reduce( + (cummulative: Record, bucket: CategoriesAgg) => ({ + ...cummulative, + [bucket.key]: StatsQuery.getEventStats(bucket), + }), + {} + ); + } +} diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.test.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.test.ts new file mode 100644 index 000000000000..8105f1125d01 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.test.ts @@ -0,0 +1,707 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + Fetcher, + getAncestryAsArray, + getIDField, + getLeafNodes, + getNameField, + getParentField, + TreeOptions, +} from './fetch'; +import { LifecycleQuery } from '../queries/lifecycle'; +import { DescendantsQuery } from '../queries/descendants'; +import { StatsQuery } from '../queries/stats'; +import { IScopedClusterClient } from 'src/core/server'; +import { elasticsearchServiceMock } from 'src/core/server/mocks'; +import { FieldsObject, ResolverNode } from '../../../../../../common/endpoint/types'; +import { Schema } from './index'; + +jest.mock('../queries/descendants'); +jest.mock('../queries/lifecycle'); +jest.mock('../queries/stats'); + +function formatResponse(results: FieldsObject[], schema: Schema): ResolverNode[] { + return results.map((node) => { + return { + id: getIDField(node, schema) ?? '', + parent: getParentField(node, schema), + name: getNameField(node, schema), + data: node, + stats: { + total: 0, + byCategory: {}, + }, + }; + }); +} + +describe('fetcher test', () => { + const schemaIDParent = { + id: 'id', + parent: 'parent', + }; + + const schemaIDParentAncestry = { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }; + + const schemaIDParentName = { + id: 'id', + parent: 'parent', + name: 'name', + }; + + let client: jest.Mocked; + beforeAll(() => { + StatsQuery.prototype.search = jest.fn().mockImplementation(async () => { + return {}; + }); + }); + beforeEach(() => { + client = elasticsearchServiceMock.createScopedClusterClient(); + }); + + describe('descendants', () => { + it('correctly exists loop when the search returns no results', async () => { + DescendantsQuery.prototype.search = jest.fn().mockImplementationOnce(async () => { + return []; + }); + const options: TreeOptions = { + descendantLevels: 1, + descendants: 5, + ancestors: 0, + timerange: { + from: '', + to: '', + }, + schema: { + id: '', + parent: '', + }, + indexPatterns: [''], + nodes: ['a'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual([]); + }); + + it('exists the loop when the options specify no descendants', async () => { + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 0, + timerange: { + from: '', + to: '', + }, + schema: { + id: '', + parent: '', + }, + indexPatterns: [''], + nodes: ['a'], + }; + + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual([]); + }); + + it('returns the correct results without the ancestry defined', async () => { + /** + . + └── 0 + ├── 1 + │ └── 2 + └── 3 + ├── 4 + └── 5 + */ + const level1 = [ + { + id: '1', + parent: '0', + }, + { + id: '3', + parent: '0', + }, + ]; + const level2 = [ + { + id: '2', + parent: '1', + }, + + { + id: '4', + parent: '3', + }, + { + id: '5', + parent: '3', + }, + ]; + DescendantsQuery.prototype.search = jest + .fn() + .mockImplementationOnce(async () => { + return level1; + }) + .mockImplementationOnce(async () => { + return level2; + }); + const options: TreeOptions = { + descendantLevels: 2, + descendants: 5, + ancestors: 0, + timerange: { + from: '', + to: '', + }, + schema: schemaIDParent, + indexPatterns: [''], + nodes: ['0'], + }; + + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual( + formatResponse([...level1, ...level2], schemaIDParent) + ); + }); + }); + + describe('ancestors', () => { + it('correctly exits loop when the search returns no results', async () => { + LifecycleQuery.prototype.search = jest.fn().mockImplementationOnce(async () => { + return []; + }); + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 5, + timerange: { + from: '', + to: '', + }, + schema: { + id: '', + parent: '', + }, + indexPatterns: [''], + nodes: ['a'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual([]); + }); + + it('correctly exits loop when the options specify no ancestors', async () => { + LifecycleQuery.prototype.search = jest.fn().mockImplementationOnce(async () => { + throw new Error('should not have called this'); + }); + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 0, + timerange: { + from: '', + to: '', + }, + schema: { + id: '', + parent: '', + }, + indexPatterns: [''], + nodes: ['a'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual([]); + }); + + it('correctly returns the ancestors when the number of levels has been reached', async () => { + LifecycleQuery.prototype.search = jest + .fn() + .mockImplementationOnce(async () => { + return [ + { + id: '3', + parent: '2', + }, + ]; + }) + .mockImplementationOnce(async () => { + return [ + { + id: '2', + parent: '1', + }, + ]; + }); + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 2, + timerange: { + from: '', + to: '', + }, + schema: schemaIDParent, + indexPatterns: [''], + nodes: ['3'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual( + formatResponse( + [ + { id: '3', parent: '2' }, + { id: '2', parent: '1' }, + ], + schemaIDParent + ) + ); + }); + + it('correctly adds name field to response', async () => { + LifecycleQuery.prototype.search = jest + .fn() + .mockImplementationOnce(async () => { + return [ + { + id: '3', + parent: '2', + }, + ]; + }) + .mockImplementationOnce(async () => { + return [ + { + id: '2', + parent: '1', + }, + ]; + }); + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 2, + timerange: { + from: '', + to: '', + }, + schema: schemaIDParentName, + indexPatterns: [''], + nodes: ['3'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual( + formatResponse( + [ + { id: '3', parent: '2' }, + { id: '2', parent: '1' }, + ], + schemaIDParentName + ) + ); + }); + + it('correctly returns the ancestors with ancestry arrays', async () => { + const node3 = { + ancestry: ['2', '1'], + id: '3', + parent: '2', + }; + + const node1 = { + ancestry: ['0'], + id: '1', + parent: '0', + }; + + const node2 = { + ancestry: ['1', '0'], + id: '2', + parent: '1', + }; + LifecycleQuery.prototype.search = jest + .fn() + .mockImplementationOnce(async () => { + return [node3]; + }) + .mockImplementationOnce(async () => { + return [node1, node2]; + }); + const options: TreeOptions = { + descendantLevels: 0, + descendants: 0, + ancestors: 3, + timerange: { + from: '', + to: '', + }, + schema: schemaIDParentAncestry, + indexPatterns: [''], + nodes: ['3'], + }; + const fetcher = new Fetcher(client); + expect(await fetcher.tree(options)).toEqual( + formatResponse([node3, node1, node2], schemaIDParentAncestry) + ); + }); + }); + + describe('retrieving leaf nodes', () => { + it('correctly identifies the leaf nodes in a response without the ancestry field', () => { + /** + . + └── 0 + ├── 1 + ├── 2 + └── 3 + */ + const results = [ + { + id: '1', + parent: '0', + }, + { + id: '2', + parent: '0', + }, + { + id: '3', + parent: '0', + }, + ]; + const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' }); + expect(leaves).toStrictEqual(['1', '2', '3']); + }); + + it('correctly ignores nodes without the proper fields', () => { + /** + . + └── 0 + ├── 1 + ├── 2 + */ + const results = [ + { + id: '1', + parent: '0', + }, + { + id: '2', + parent: '0', + }, + { + idNotReal: '3', + parentNotReal: '0', + }, + ]; + const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' }); + expect(leaves).toStrictEqual(['1', '2']); + }); + + it('returns an empty response when the proper fields are not defined', () => { + const results = [ + { + id: '1', + parentNotReal: '0', + }, + { + id: '2', + parentNotReal: '0', + }, + { + idNotReal: '3', + parent: '0', + }, + ]; + const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' }); + expect(leaves).toStrictEqual([]); + }); + + describe('with the ancestry field defined', () => { + it('correctly identifies the leaf nodes in a response with the ancestry field', () => { + /** + . + ├── 1 + │ └── 2 + └── 3 + */ + const results = [ + { + id: '1', + parent: '0', + ancestry: ['0', 'a'], + }, + { + id: '2', + parent: '1', + ancestry: ['1', '0'], + }, + { + id: '3', + parent: '0', + ancestry: ['0', 'a'], + }, + ]; + const leaves = getLeafNodes(results, ['0'], { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }); + expect(leaves).toStrictEqual(['2']); + }); + + it('falls back to using parent field if it cannot find the ancestry field', () => { + /** + . + ├── 1 + │ └── 2 + └── 3 + */ + const results = [ + { + id: '1', + parent: '0', + ancestryNotValid: ['0', 'a'], + }, + { + id: '2', + parent: '1', + }, + { + id: '3', + parent: '0', + }, + ]; + const leaves = getLeafNodes(results, ['0'], { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }); + expect(leaves).toStrictEqual(['1', '3']); + }); + + it('correctly identifies the leaf nodes with a tree with multiple leaves', () => { + /** + . + └── 0 + ├── 1 + │ └── 2 + └── 3 + ├── 4 + └── 5 + */ + const results = [ + { + id: '1', + parent: '0', + ancestry: ['0', 'a'], + }, + { + id: '2', + parent: '1', + ancestry: ['1', '0'], + }, + { + id: '3', + parent: '0', + ancestry: ['0', 'a'], + }, + { + id: '4', + parent: '3', + ancestry: ['3', '0'], + }, + { + id: '5', + parent: '3', + ancestry: ['3', '0'], + }, + ]; + const leaves = getLeafNodes(results, ['0'], { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }); + expect(leaves).toStrictEqual(['2', '4', '5']); + }); + + it('correctly identifies the leaf nodes with multiple queried nodes', () => { + /** + . + ├── 0 + │ ├── 1 + │ │ └── 2 + │ └── 3 + │ ├── 4 + │ └── 5 + └── a + └── b + ├── c + └── d + */ + const results = [ + { + id: '1', + parent: '0', + ancestry: ['0'], + }, + { + id: '2', + parent: '1', + ancestry: ['1', '0'], + }, + { + id: '3', + parent: '0', + ancestry: ['0'], + }, + { + id: '4', + parent: '3', + ancestry: ['3', '0'], + }, + { + id: '5', + parent: '3', + ancestry: ['3', '0'], + }, + { + id: 'b', + parent: 'a', + ancestry: ['a'], + }, + { + id: 'c', + parent: 'b', + ancestry: ['b', 'a'], + }, + { + id: 'd', + parent: 'b', + ancestry: ['b', 'a'], + }, + ]; + const leaves = getLeafNodes(results, ['0', 'a'], { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }); + expect(leaves).toStrictEqual(['2', '4', '5', 'c', 'd']); + }); + + it('correctly identifies the leaf nodes with an unbalanced tree', () => { + /** + . + ├── 0 + │ ├── 1 + │ │ └── 2 + │ └── 3 + │ ├── 4 + │ └── 5 + └── a + └── b + */ + const results = [ + { + id: '1', + parent: '0', + ancestry: ['0'], + }, + { + id: '2', + parent: '1', + ancestry: ['1', '0'], + }, + { + id: '3', + parent: '0', + ancestry: ['0'], + }, + { + id: '4', + parent: '3', + ancestry: ['3', '0'], + }, + { + id: '5', + parent: '3', + ancestry: ['3', '0'], + }, + { + id: 'b', + parent: 'a', + ancestry: ['a'], + }, + ]; + const leaves = getLeafNodes(results, ['0', 'a'], { + id: 'id', + parent: 'parent', + ancestry: 'ancestry', + }); + // the reason b is not identified here is because the ancestry array + // size is 2, which means that if b had a descendant, then it would have been found + // using our query which found 2, 4, 5. So either we hit the size limit or there are no + // children of b + expect(leaves).toStrictEqual(['2', '4', '5']); + }); + }); + }); + + describe('getIDField', () => { + it('returns undefined if the field does not exist', () => { + expect(getIDField({}, { id: 'a', parent: 'b' })).toBeUndefined(); + }); + + it('returns the first value if the field is an array', () => { + expect(getIDField({ 'a.b': ['1', '2'] }, { id: 'a.b', parent: 'b' })).toStrictEqual('1'); + }); + }); + + describe('getParentField', () => { + it('returns undefined if the field does not exist', () => { + expect(getParentField({}, { id: 'a', parent: 'b' })).toBeUndefined(); + }); + + it('returns the first value if the field is an array', () => { + expect(getParentField({ 'a.b': ['1', '2'] }, { id: 'z', parent: 'a.b' })).toStrictEqual('1'); + }); + }); + + describe('getAncestryAsArray', () => { + it('returns an empty array if the field does not exist', () => { + expect(getAncestryAsArray({}, { id: 'a', parent: 'b', ancestry: 'z' })).toStrictEqual([]); + }); + + it('returns the full array if the field exists', () => { + expect( + getAncestryAsArray({ 'a.b': ['1', '2'] }, { id: 'z', parent: 'f', ancestry: 'a.b' }) + ).toStrictEqual(['1', '2']); + }); + + it('returns a built array using the parent field if ancestry field is empty', () => { + expect( + getAncestryAsArray( + { 'aParent.bParent': ['1', '2'], ancestry: [] }, + { id: 'z', parent: 'aParent.bParent', ancestry: 'ancestry' } + ) + ).toStrictEqual(['1']); + }); + + it('returns a built array using the parent field if ancestry field does not exist', () => { + expect( + getAncestryAsArray( + { 'aParent.bParent': '1' }, + { id: 'z', parent: 'aParent.bParent', ancestry: 'ancestry' } + ) + ).toStrictEqual(['1']); + }); + }); +}); diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.ts new file mode 100644 index 000000000000..eaecad6c4797 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch.ts @@ -0,0 +1,334 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { IScopedClusterClient } from 'kibana/server'; +import { + firstNonNullValue, + values, +} from '../../../../../../common/endpoint/models/ecs_safety_helpers'; +import { ECSField, ResolverNode, FieldsObject } from '../../../../../../common/endpoint/types'; +import { DescendantsQuery } from '../queries/descendants'; +import { Schema, NodeID } from './index'; +import { LifecycleQuery } from '../queries/lifecycle'; +import { StatsQuery } from '../queries/stats'; + +/** + * The query parameters passed in from the request. These define the limits for the ES requests for retrieving the + * resolver tree. + */ +export interface TreeOptions { + descendantLevels: number; + descendants: number; + ancestors: number; + timerange: { + from: string; + to: string; + }; + schema: Schema; + nodes: NodeID[]; + indexPatterns: string[]; +} + +/** + * Handles retrieving nodes of a resolver tree. + */ +export class Fetcher { + constructor(private readonly client: IScopedClusterClient) {} + + /** + * This method retrieves the ancestors and descendants of a resolver tree. + * + * @param options the options for retrieving the structure of the tree. + */ + public async tree(options: TreeOptions): Promise { + const treeParts = await Promise.all([ + this.retrieveAncestors(options), + this.retrieveDescendants(options), + ]); + + const tree = treeParts.reduce((results, partArray) => { + results.push(...partArray); + return results; + }, []); + + return this.formatResponse(tree, options); + } + + private async formatResponse( + treeNodes: FieldsObject[], + options: TreeOptions + ): Promise { + const statsIDs: NodeID[] = []; + for (const node of treeNodes) { + const id = getIDField(node, options.schema); + if (id) { + statsIDs.push(id); + } + } + + const query = new StatsQuery({ + indexPatterns: options.indexPatterns, + schema: options.schema, + timerange: options.timerange, + }); + + const eventStats = await query.search(this.client, statsIDs); + const statsNodes: ResolverNode[] = []; + for (const node of treeNodes) { + const id = getIDField(node, options.schema); + const parent = getParentField(node, options.schema); + const name = getNameField(node, options.schema); + + // at this point id should never be undefined, it should be enforced by the Elasticsearch query + // but let's check anyway + if (id !== undefined) { + statsNodes.push({ + id, + parent, + name, + data: node, + stats: eventStats[id] ?? { total: 0, byCategory: {} }, + }); + } + } + return statsNodes; + } + + private static getNextAncestorsToFind( + results: FieldsObject[], + schema: Schema, + levelsLeft: number + ): NodeID[] { + const nodesByID = results.reduce((accMap: Map, result: FieldsObject) => { + const id = getIDField(result, schema); + if (id) { + accMap.set(id, result); + } + return accMap; + }, new Map()); + + const nodes: NodeID[] = []; + // Find all the nodes that don't have their parent in the result set, we will use these + // nodes to find the additional ancestry + for (const result of results) { + const parent = getParentField(result, schema); + if (parent) { + const parentNode = nodesByID.get(parent); + if (!parentNode) { + // it's ok if the nodes array is larger than the levelsLeft because the query + // will have the size set to the levelsLeft which will restrict the number of results + nodes.push(...getAncestryAsArray(result, schema).slice(0, levelsLeft)); + } + } + } + return nodes; + } + + private async retrieveAncestors(options: TreeOptions): Promise { + const ancestors: FieldsObject[] = []; + const query = new LifecycleQuery({ + schema: options.schema, + indexPatterns: options.indexPatterns, + timerange: options.timerange, + }); + + let nodes = options.nodes; + let numLevelsLeft = options.ancestors; + + while (numLevelsLeft > 0) { + const results: FieldsObject[] = await query.search(this.client, nodes); + if (results.length <= 0) { + return ancestors; + } + + /** + * This array (this.ancestry.ancestors) is the accumulated ancestors of the node of interest. This array is different + * from the ancestry array of a specific document. The order of this array is going to be weird, it will look like this + * [most distant ancestor...closer ancestor, next recursive call most distant ancestor...closer ancestor] + * + * Here is an example of why this happens + * Consider the following tree: + * A -> B -> C -> D -> E -> Origin + * Where A was spawn before B, which was before C, etc + * + * Let's assume the ancestry array limit is 2 so Origin's array would be: [E, D] + * E's ancestry array would be: [D, C] etc + * + * If a request comes in to retrieve all the ancestors in this tree, the accumulate results will be: + * [D, E, B, C, A] + * + * The first iteration would retrieve D and E in that order because they are sorted in ascending order by timestamp. + * The next iteration would get the ancestors of D (since that's the most distant ancestor from Origin) which are + * [B, C] + * The next iteration would get the ancestors of B which is A + * Hence: [D, E, B, C, A] + */ + ancestors.push(...results); + numLevelsLeft -= results.length; + nodes = Fetcher.getNextAncestorsToFind(results, options.schema, numLevelsLeft); + } + return ancestors; + } + + private async retrieveDescendants(options: TreeOptions): Promise { + const descendants: FieldsObject[] = []; + const query = new DescendantsQuery({ + schema: options.schema, + indexPatterns: options.indexPatterns, + timerange: options.timerange, + }); + + let nodes: NodeID[] = options.nodes; + let numNodesLeftToRequest: number = options.descendants; + let levelsLeftToRequest: number = options.descendantLevels; + // if the ancestry was specified then ignore the levels + while ( + numNodesLeftToRequest > 0 && + (options.schema.ancestry !== undefined || levelsLeftToRequest > 0) + ) { + const results: FieldsObject[] = await query.search(this.client, nodes, numNodesLeftToRequest); + if (results.length <= 0) { + return descendants; + } + + nodes = getLeafNodes(results, nodes, options.schema); + + numNodesLeftToRequest -= results.length; + levelsLeftToRequest -= 1; + descendants.push(...results); + } + + return descendants; + } +} + +/** + * This functions finds the leaf nodes for a given response from an Elasticsearch query. + * + * Exporting so it can be tested. + * + * @param results the doc values portion of the documents returned from an Elasticsearch query + * @param nodes an array of unique IDs that were used to find the returned documents + * @param schema the field definitions for how nodes are represented in the resolver graph + */ +export function getLeafNodes( + results: FieldsObject[], + nodes: Array, + schema: Schema +): NodeID[] { + let largestAncestryArray = 0; + const nodesToQueryNext: Map> = new Map(); + const queriedNodes = new Set(nodes); + const isDistantGrandchild = (event: FieldsObject) => { + const ancestry = getAncestryAsArray(event, schema); + return ancestry.length > 0 && queriedNodes.has(ancestry[ancestry.length - 1]); + }; + + for (const result of results) { + const ancestry = getAncestryAsArray(result, schema); + // This is to handle the following unlikely but possible scenario: + // if an alert was generated by the kernel process (parent process of all other processes) then + // the direct children of that process would only have an ancestry array of [parent_kernel], a single value in the array. + // The children of those children would have two values in their array [direct parent, parent_kernel] + // we need to determine which nodes are the most distant grandchildren of the queriedNodes because those should + // be used for the next query if more nodes should be retrieved. To generally determine the most distant grandchildren + // we can use the last entry in the ancestry array because of its ordering. The problem with that is in the scenario above + // the direct children of parent_kernel will also meet that criteria even though they are not actually the most + // distant grandchildren. To get around that issue we'll bucket all the nodes by the size of their ancestry array + // and then only return the nodes in the largest bucket because those should be the most distant grandchildren + // from the queried nodes that were passed in. + if (ancestry.length > largestAncestryArray) { + largestAncestryArray = ancestry.length; + } + + // a grandchild must have an array of > 0 and have it's last parent be in the set of previously queried nodes + // this is one of the furthest descendants from the queried nodes + if (isDistantGrandchild(result)) { + let levelOfNodes = nodesToQueryNext.get(ancestry.length); + if (!levelOfNodes) { + levelOfNodes = new Set(); + nodesToQueryNext.set(ancestry.length, levelOfNodes); + } + const nodeID = getIDField(result, schema); + if (nodeID) { + levelOfNodes.add(nodeID); + } + } + } + const nextNodes = nodesToQueryNext.get(largestAncestryArray); + + return nextNodes !== undefined ? Array.from(nextNodes) : []; +} + +/** + * Retrieves the unique ID field from a document. + * + * Exposed for testing. + * @param obj the doc value fields retrieved from a document returned by Elasticsearch + * @param schema the schema used for identifying connections between documents + */ +export function getIDField(obj: FieldsObject, schema: Schema): NodeID | undefined { + const id: ECSField = obj[schema.id]; + return firstNonNullValue(id); +} + +/** + * Retrieves the name field from a document. + * + * Exposed for testing. + * @param obj the doc value fields retrieved from a document returned by Elasticsearch + * @param schema the schema used for identifying connections between documents + */ +export function getNameField(obj: FieldsObject, schema: Schema): string | undefined { + if (!schema.name) { + return undefined; + } + + const name: ECSField = obj[schema.name]; + return String(firstNonNullValue(name)); +} + +/** + * Retrieves the unique parent ID field from a document. + * + * Exposed for testing. + * @param obj the doc value fields retrieved from a document returned by Elasticsearch + * @param schema the schema used for identifying connections between documents + */ +export function getParentField(obj: FieldsObject, schema: Schema): NodeID | undefined { + const parent: ECSField = obj[schema.parent]; + return firstNonNullValue(parent); +} + +function getAncestryField(obj: FieldsObject, schema: Schema): NodeID[] | undefined { + if (!schema.ancestry) { + return undefined; + } + + const ancestry: ECSField = obj[schema.ancestry]; + if (!ancestry) { + return undefined; + } + + return values(ancestry); +} + +/** + * Retrieves the ancestry array field if it exists. If it doesn't exist or if it is empty it reverts to + * creating an array using the parent field. If the parent field doesn't exist, it returns + * an empty array. + * + * Exposed for testing. + * @param obj the doc value fields retrieved from a document returned by Elasticsearch + * @param schema the schema used for identifying connections between documents + */ +export function getAncestryAsArray(obj: FieldsObject, schema: Schema): NodeID[] { + const ancestry = getAncestryField(obj, schema); + if (!ancestry || ancestry.length <= 0) { + const parentField = getParentField(obj, schema); + return parentField !== undefined ? [parentField] : []; + } + return ancestry; +} diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/index.ts b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/index.ts new file mode 100644 index 000000000000..21a49e268310 --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/routes/resolver/tree/utils/index.ts @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Represents a time range filter + */ +export interface Timerange { + from: string; + to: string; +} + +/** + * An array of unique IDs to identify nodes within the resolver tree. + */ +export type NodeID = string | number; + +/** + * The fields to use to identify nodes within a resolver tree. + */ +export interface Schema { + /** + * the ancestry field should be set to a field that contains an order array representing + * the ancestors of a node. + */ + ancestry?: string; + /** + * id represents the field to use as the unique ID for a node. + */ + id: string; + /** + * field to use for the name of the node + */ + name?: string; + /** + * parent represents the field that is the edge between two nodes. + */ + parent: string; +} + +/** + * Returns the doc value fields filter to use in queries to limit the number of fields returned in the + * query response. + * + * See for more info: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#docvalue-fields + * + * @param schema is the node schema information describing how relationships are formed between nodes + * in the resolver graph. + */ +export function docValueFields(schema: Schema): Array<{ field: string }> { + const filter = [{ field: '@timestamp' }, { field: schema.id }, { field: schema.parent }]; + if (schema.ancestry) { + filter.push({ field: schema.ancestry }); + } + + if (schema.name) { + filter.push({ field: schema.name }); + } + return filter; +} diff --git a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/common.ts b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/common.ts index 2c59863099ae..b4e98d7d4b95 100644 --- a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/common.ts +++ b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/common.ts @@ -5,16 +5,24 @@ */ import _ from 'lodash'; import expect from '@kbn/expect'; +import { firstNonNullValue } from '../../../../plugins/security_solution/common/endpoint/models/ecs_safety_helpers'; +import { + NodeID, + Schema, +} from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils'; import { SafeResolverChildNode, SafeResolverLifecycleNode, SafeResolverEvent, ResolverNodeStats, + ResolverNode, } from '../../../../plugins/security_solution/common/endpoint/types'; import { parentEntityIDSafeVersion, entityIDSafeVersion, eventIDSafeVersion, + timestampSafeVersion, + timestampAsDateSafeVersion, } from '../../../../plugins/security_solution/common/endpoint/models/event'; import { Event, @@ -24,6 +32,344 @@ import { categoryMapping, } from '../../../../plugins/security_solution/common/endpoint/generate_data'; +const createLevels = ({ + descendantsByParent, + levels, + currentNodes, + schema, +}: { + descendantsByParent: Map>; + levels: Array>; + currentNodes: Map | undefined; + schema: Schema; +}): Array> => { + if (!currentNodes || currentNodes.size === 0) { + return levels; + } + levels.push(currentNodes); + const nextLevel: Map = new Map(); + for (const node of currentNodes.values()) { + const id = getID(node, schema); + const children = descendantsByParent.get(id); + if (children) { + for (const child of children.values()) { + const childID = getID(child, schema); + nextLevel.set(childID, child); + } + } + } + return createLevels({ descendantsByParent, levels, currentNodes: nextLevel, schema }); +}; + +interface TreeExpectation { + origin: NodeID; + nodeExpectations: NodeExpectations; +} + +interface NodeExpectations { + ancestors?: number; + descendants?: number; + descendantLevels?: number; +} + +interface APITree { + // entries closer to the beginning of the array are more direct parents of the origin aka + // ancestors[0] = the origin's parent, ancestors[1] = the origin's grandparent + ancestors: ResolverNode[]; + // if no ancestors were retrieved then the origin will be undefined + origin: ResolverNode | undefined; + descendantLevels: Array>; + nodeExpectations: NodeExpectations; +} + +/** + * Represents a utility structure for making it easier to perform expect calls on the response + * from the /tree api. This can represent multiple trees, since the tree api can return multiple trees. + */ +export interface APIResponse { + nodesByID: Map; + trees: Map; + allNodes: ResolverNode[]; +} + +/** + * Gets the ID field from a resolver node. Throws an error if the ID doesn't exist. + * + * @param node a resolver node + * @param schema the schema that was used to retrieve this resolver node + */ +export const getID = (node: ResolverNode | undefined, schema: Schema): NodeID => { + const id = firstNonNullValue(node?.data[schema.id]); + if (!id) { + throw new Error(`Unable to find id ${schema.id} in node: ${JSON.stringify(node)}`); + } + return id; +}; + +const getParentInternal = (node: ResolverNode | undefined, schema: Schema): NodeID | undefined => { + if (node) { + return firstNonNullValue(node?.data[schema.parent]); + } + return undefined; +}; + +/** + * Gets the parent ID field from a resolver node. Throws an error if the ID doesn't exist. + * + * @param node a resolver node + * @param schema the schema that was used to retrieve this resolver node + */ +export const getParent = (node: ResolverNode | undefined, schema: Schema): NodeID => { + const parent = getParentInternal(node, schema); + if (!parent) { + throw new Error(`Unable to find parent ${schema.parent} in node: ${JSON.stringify(node)}`); + } + return parent; +}; + +/** + * Reformats the tree's response to make it easier to perform testing on the results. + * + * @param treeExpectations the node IDs used to retrieve the trees and the expected number of ancestors/descendants in the + * resulting trees + * @param nodes the response from the tree api + * @param schema the schema used when calling the tree api + */ +const createTreeFromResponse = ( + treeExpectations: TreeExpectation[], + nodes: ResolverNode[], + schema: Schema +) => { + const nodesByID = new Map(); + const nodesByParent = new Map>(); + + for (const node of nodes) { + const id = getID(node, schema); + const parent = getParentInternal(node, schema); + + nodesByID.set(id, node); + + if (parent) { + let groupedChildren = nodesByParent.get(parent); + if (!groupedChildren) { + groupedChildren = new Map(); + nodesByParent.set(parent, groupedChildren); + } + + groupedChildren.set(id, node); + } + } + + const trees: Map = new Map(); + + for (const expectation of treeExpectations) { + const descendantLevels = createLevels({ + descendantsByParent: nodesByParent, + levels: [], + currentNodes: nodesByParent.get(expectation.origin), + schema, + }); + + const ancestors: ResolverNode[] = []; + const originNode = nodesByID.get(expectation.origin); + if (originNode) { + let currentID: NodeID | undefined = getParentInternal(originNode, schema); + // construct an array with all the ancestors from the response. We'll use this to verify that + // all the expected ancestors were returned in the response. + while (currentID !== undefined) { + const parentNode = nodesByID.get(currentID); + if (parentNode) { + ancestors.push(parentNode); + } + currentID = getParentInternal(parentNode, schema); + } + } + + trees.set(expectation.origin, { + ancestors, + origin: originNode, + descendantLevels, + nodeExpectations: expectation.nodeExpectations, + }); + } + + return { + nodesByID, + trees, + allNodes: nodes, + }; +}; + +const verifyAncestry = ({ + responseTrees, + schema, + genTree, +}: { + responseTrees: APIResponse; + schema: Schema; + genTree: Tree; +}) => { + const allGenNodes = new Map([ + ...genTree.ancestry, + ...genTree.children, + [genTree.origin.id, genTree.origin], + ]); + + for (const tree of responseTrees.trees.values()) { + if (tree.nodeExpectations.ancestors !== undefined) { + expect(tree.ancestors.length).to.be(tree.nodeExpectations.ancestors); + } + + if (tree.origin !== undefined) { + // make sure the origin node from the request exists in the generated data and has the same fields + const originID = getID(tree.origin, schema); + const originParentID = getParent(tree.origin, schema); + expect(tree.origin.id).to.be(originID); + expect(tree.origin.parent).to.be(originParentID); + expect(allGenNodes.get(String(originID))?.id).to.be(String(originID)); + expect(allGenNodes.get(String(originParentID))?.id).to.be(String(originParentID)); + expect(originID).to.be(entityIDSafeVersion(allGenNodes.get(String(originID))!.lifecycle[0])); + expect(originParentID).to.be( + parentEntityIDSafeVersion(allGenNodes.get(String(originID))!.lifecycle[0]) + ); + // make sure the lifecycle events are sorted by timestamp in ascending order because the + // event that will be returned that we need to compare to should be the earliest event + // found + const originLifecycleSorted = [...allGenNodes.get(String(originID))!.lifecycle].sort( + (a: Event, b: Event) => { + const aTime: number | undefined = timestampSafeVersion(a); + const bTime = timestampSafeVersion(b); + if (aTime !== undefined && bTime !== undefined) { + return aTime - bTime; + } else { + return 0; + } + } + ); + + const ts = timestampAsDateSafeVersion(tree.origin?.data); + expect(ts).to.not.be(undefined); + expect(ts).to.eql(timestampAsDateSafeVersion(originLifecycleSorted[0])); + } + + // check the constructed ancestors array to see if we're missing any nodes in the ancestry + for (let i = 0; i < tree.ancestors.length; i++) { + const id = getID(tree.ancestors[i], schema); + const parent = getParentInternal(tree.ancestors[i], schema); + // only compare to the parent if this is not the last entry in the array + if (i < tree.ancestors.length - 1) { + // the current node's parent ID should match the parent's ID field + expect(parent).to.be(getID(tree.ancestors[i + 1], schema)); + expect(parent).to.not.be(undefined); + expect(tree.ancestors[i].parent).to.not.be(undefined); + expect(tree.ancestors[i].parent).to.be(parent); + } + // the current node's ID must exist in the generated tree + expect(allGenNodes.get(String(id))?.id).to.be(id); + expect(tree.ancestors[i].id).to.be(id); + } + } +}; + +const verifyChildren = ({ + responseTrees, + schema, + genTree, +}: { + responseTrees: APIResponse; + schema: Schema; + genTree: Tree; +}) => { + const allGenNodes = new Map([ + ...genTree.ancestry, + ...genTree.children, + [genTree.origin.id, genTree.origin], + ]); + for (const tree of responseTrees.trees.values()) { + if (tree.nodeExpectations.descendantLevels !== undefined) { + expect(tree.nodeExpectations.descendantLevels).to.be(tree.descendantLevels.length); + } + let totalDescendants = 0; + + for (const level of tree.descendantLevels) { + for (const node of level.values()) { + totalDescendants += 1; + const id = getID(node, schema); + const parent = getParent(node, schema); + const genNode = allGenNodes.get(String(id)); + expect(id).to.be(node.id); + expect(parent).to.be(node.parent); + expect(node.parent).to.not.be(undefined); + // make sure the id field is the same in the returned node as the generated one + expect(id).to.be(entityIDSafeVersion(genNode!.lifecycle[0])); + // make sure the parent field is the same in the returned node as the generated one + expect(parent).to.be(parentEntityIDSafeVersion(genNode!.lifecycle[0])); + } + } + if (tree.nodeExpectations.descendants !== undefined) { + expect(tree.nodeExpectations.descendants).to.be(totalDescendants); + } + } +}; + +const verifyStats = ({ + responseTrees, + relatedEventsCategories, +}: { + responseTrees: APIResponse; + relatedEventsCategories: RelatedEventInfo[]; +}) => { + for (const node of responseTrees.allNodes) { + let totalExpEvents = 0; + for (const cat of relatedEventsCategories) { + const ecsCategories = categoryMapping[cat.category]; + if (Array.isArray(ecsCategories)) { + // if there are multiple ecs categories used to define a related event, the count for all of them should be the same + // and they should equal what is defined in the categories used to generate the related events + for (const ecsCat of ecsCategories) { + expect(node.stats.byCategory[ecsCat]).to.be(cat.count); + } + } else { + expect(node.stats.byCategory[ecsCategories]).to.be(cat.count); + } + + totalExpEvents += cat.count; + } + expect(node.stats.total).to.be(totalExpEvents); + } +}; + +/** + * Verify the ancestry of multiple trees. + * + * @param expectations array of expectations based on the origin that built a particular tree + * @param response the nodes returned from the api + * @param schema the schema fields passed to the tree api + * @param genTree the generated tree that was inserted in Elasticsearch that we are querying + * @param relatedEventsCategories an optional array to instruct the verification to check the stats + * on each node returned + */ +export const verifyTree = ({ + expectations, + response, + schema, + genTree, + relatedEventsCategories, +}: { + expectations: TreeExpectation[]; + response: ResolverNode[]; + schema: Schema; + genTree: Tree; + relatedEventsCategories?: RelatedEventInfo[]; +}) => { + const responseTrees = createTreeFromResponse(expectations, response, schema); + verifyAncestry({ responseTrees, schema, genTree }); + verifyChildren({ responseTrees, schema, genTree }); + if (relatedEventsCategories !== undefined) { + verifyStats({ responseTrees, relatedEventsCategories }); + } +}; + /** * Creates the ancestry array based on an array of events. The order of the ancestry array will match the order * of the events passed in. @@ -44,6 +390,7 @@ export const createAncestryArray = (events: Event[]) => { /** * Check that the given lifecycle is in the resolver tree's corresponding map * + * @deprecated use verifyTree * @param node a lifecycle node containing the start and end events for a node * @param nodeMap a map of entity_ids to nodes to look for the passed in `node` */ @@ -59,12 +406,13 @@ const expectLifecycleNodeInMap = ( /** * Verify that all the ancestor nodes are valid and optionally have parents. * + * @deprecated use verifyTree * @param ancestors an array of ancestors * @param tree the generated resolver tree as the source of truth * @param verifyLastParent a boolean indicating whether to check the last ancestor. If the ancestors array intentionally * does not contain all the ancestors, the last one will not have the parent */ -export const verifyAncestry = ( +export const checkAncestryFromEntityTreeAPI = ( ancestors: SafeResolverLifecycleNode[], tree: Tree, verifyLastParent: boolean @@ -114,6 +462,7 @@ export const verifyAncestry = ( /** * Retrieves the most distant ancestor in the given array. * + * @deprecated use verifyTree * @param ancestors an array of ancestor nodes */ export const retrieveDistantAncestor = (ancestors: SafeResolverLifecycleNode[]) => { @@ -137,12 +486,13 @@ export const retrieveDistantAncestor = (ancestors: SafeResolverLifecycleNode[]) /** * Verify that the children nodes are correct * + * @deprecated use verifyTree * @param children the children nodes * @param tree the generated resolver tree as the source of truth * @param numberOfParents an optional number to compare that are a certain number of parents in the children array * @param childrenPerParent an optional number to compare that there are a certain number of children for each parent */ -export const verifyChildren = ( +export const verifyChildrenFromEntityTreeAPI = ( children: SafeResolverChildNode[], tree: Tree, numberOfParents?: number, @@ -200,10 +550,11 @@ export const compareArrays = ( /** * Verifies that the stats received from ES for a node reflect the categories of events that the generator created. * + * @deprecated use verifyTree * @param relatedEvents the related events received for a particular node * @param categories the related event info used when generating the resolver tree */ -export const verifyStats = ( +export const verifyEntityTreeStats = ( stats: ResolverNodeStats | undefined, categories: RelatedEventInfo[], relatedAlerts: number @@ -225,12 +576,12 @@ export const verifyStats = ( totalExpEvents += cat.count; } expect(stats?.events.total).to.be(totalExpEvents); - expect(stats?.totalAlerts); }; /** * A helper function for verifying the stats information an array of nodes. * + * @deprecated use verifyTree * @param nodes an array of lifecycle nodes that should have a stats field defined * @param categories the related event info used when generating the resolver tree */ @@ -240,6 +591,6 @@ export const verifyLifecycleStats = ( relatedAlerts: number ) => { for (const node of nodes) { - verifyStats(node.stats, categories, relatedAlerts); + verifyEntityTreeStats(node.stats, categories, relatedAlerts); } }; diff --git a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/index.ts b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/index.ts index ecfc1ef5bb7f..0ba5460f09d9 100644 --- a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/index.ts +++ b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/index.ts @@ -12,6 +12,7 @@ export default function (providerContext: FtrProviderContext) { loadTestFile(require.resolve('./entity_id')); loadTestFile(require.resolve('./entity')); loadTestFile(require.resolve('./children')); + loadTestFile(require.resolve('./tree_entity_id')); loadTestFile(require.resolve('./tree')); loadTestFile(require.resolve('./alerts')); loadTestFile(require.resolve('./events')); diff --git a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree.ts b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree.ts index 7a95bf7bab88..646a666629ac 100644 --- a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree.ts +++ b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree.ts @@ -4,31 +4,23 @@ * you may not use this file except in compliance with the Elastic License. */ import expect from '@kbn/expect'; +import { getNameField } from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch'; +import { Schema } from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils'; +import { ResolverNode } from '../../../../plugins/security_solution/common/endpoint/types'; import { - SafeResolverAncestry, - SafeResolverChildren, - SafeResolverTree, - SafeLegacyEndpointEvent, -} from '../../../../plugins/security_solution/common/endpoint/types'; -import { parentEntityIDSafeVersion } from '../../../../plugins/security_solution/common/endpoint/models/event'; + parentEntityIDSafeVersion, + timestampSafeVersion, +} from '../../../../plugins/security_solution/common/endpoint/models/event'; import { FtrProviderContext } from '../../ftr_provider_context'; import { Tree, RelatedEventCategory, } from '../../../../plugins/security_solution/common/endpoint/generate_data'; import { Options, GeneratedTrees } from '../../services/resolver'; -import { - compareArrays, - verifyAncestry, - retrieveDistantAncestor, - verifyChildren, - verifyLifecycleStats, - verifyStats, -} from './common'; +import { verifyTree } from './common'; export default function ({ getService }: FtrProviderContext) { const supertest = getService('supertest'); - const esArchiver = getService('esArchiver'); const resolver = getService('resolverGenerator'); const relatedEventsToGen = [ @@ -52,322 +44,641 @@ export default function ({ getService }: FtrProviderContext) { ancestryArraySize: 2, }; + const schemaWithAncestry: Schema = { + ancestry: 'process.Ext.ancestry', + id: 'process.entity_id', + parent: 'process.parent.entity_id', + }; + + const schemaWithoutAncestry: Schema = { + id: 'process.entity_id', + parent: 'process.parent.entity_id', + }; + + const schemaWithName: Schema = { + id: 'process.entity_id', + parent: 'process.parent.entity_id', + name: 'process.name', + }; + describe('Resolver tree', () => { before(async () => { - await esArchiver.load('endpoint/resolver/api_feature'); resolverTrees = await resolver.createTrees(treeOptions); // we only requested a single alert so there's only 1 tree tree = resolverTrees.trees[0]; }); after(async () => { await resolver.deleteData(resolverTrees); - // this unload is for an endgame-* index so it does not use data streams - await esArchiver.unload('endpoint/resolver/api_feature'); }); - describe('ancestry events route', () => { - describe('legacy events', () => { - const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; - const entityID = '94042'; - - it('should return details for the root node', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get( - `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=5` - ) - .expect(200); - expect(body.ancestors[0].lifecycle.length).to.eql(2); - expect(body.ancestors.length).to.eql(2); - expect(body.nextAncestor).to.eql(null); - }); - - it('should have a populated next parameter', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get( - `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0` - ) - .expect(200); - expect(body.nextAncestor).to.eql('94041'); + describe('ancestry events', () => { + it('should return the correct ancestor nodes for the tree', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 9, + schema: schemaWithAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 5 } }], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); + }); - it('should handle an ancestors param request', async () => { - let { body }: { body: SafeResolverAncestry } = await supertest - .get( - `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0` - ) - .expect(200); - const next = body.nextAncestor; + it('should handle an invalid id', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 9, + schema: schemaWithAncestry, + nodes: ['bogus id'], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + expect(body).to.be.empty(); + }); - ({ body } = await supertest - .get( - `/api/endpoint/resolver/${next}/ancestry?legacyEndpointID=${endpointID}&ancestors=1` - ) - .expect(200)); - expect(body.ancestors[0].lifecycle.length).to.eql(1); - expect(body.nextAncestor).to.eql(null); + it('should return a subset of the ancestors', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + // 3 ancestors means 1 origin and 2 ancestors of the origin + ancestors: 3, + schema: schemaWithAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 2 } }], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); }); - describe('endpoint events', () => { - it('should return the origin node at the front of the array', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`) - .expect(200); - expect(body.ancestors[0].entityID).to.eql(tree.origin.id); + it('should return ancestors without the ancestry array', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 5 } }], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('should return details for the root node', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`) - .expect(200); - // the tree we generated had 5 ancestors + 1 origin node - expect(body.ancestors.length).to.eql(6); - expect(body.ancestors[0].entityID).to.eql(tree.origin.id); - verifyAncestry(body.ancestors, tree, true); - expect(body.nextAncestor).to.eql(null); + it('should respect the time range specified and only return the origin node', async () => { + const from = new Date( + timestampSafeVersion(tree.origin.lifecycle[0]) ?? new Date() + ).toISOString(); + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from, + to: from, + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 0 } }], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('should handle an invalid id', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get(`/api/endpoint/resolver/alskdjflasj/ancestry`) - .expect(200); - expect(body.ancestors).to.be.empty(); - expect(body.nextAncestor).to.eql(null); + it('should support returning multiple ancestor trees when multiple nodes are requested', async () => { + // There should be 2 levels of descendants under the origin, grab the bottom one, and the first node's id + const bottomMostDescendant = Array.from(tree.childrenLevels[1].values())[0].id; + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id, bottomMostDescendant], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 5 ancestors above the origin + { origin: tree.origin.id, nodeExpectations: { ancestors: 5 } }, + // there are 2 levels below the origin so the bottom node's ancestry should be + // all the ancestors (5) + one level + the origin = 7 + { origin: bottomMostDescendant, nodeExpectations: { ancestors: 7 } }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('should have a populated next parameter', async () => { - const { body }: { body: SafeResolverAncestry } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=2`) - .expect(200); - // it should have 2 ancestors + 1 origin - expect(body.ancestors.length).to.eql(3); - verifyAncestry(body.ancestors, tree, false); - const distantGrandparent = retrieveDistantAncestor(body.ancestors); - expect(body.nextAncestor).to.eql( - parentEntityIDSafeVersion(distantGrandparent.lifecycle[0]) - ); + it('should return a single ancestry when two nodes a the same level and from same parent are requested', async () => { + // there are 2 levels after the origin, let's get the first level, there will be three + // children so get the left and right most ones + const level0Nodes = Array.from(tree.childrenLevels[0].values()); + const leftNode = level0Nodes[0].id; + const rightNode = level0Nodes[2].id; + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [leftNode, rightNode], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // We should be 1 level below the origin so the node's ancestry should be + // all the ancestors (5) + the origin = 6 + { origin: leftNode, nodeExpectations: { ancestors: 6 } }, + // these nodes should be at the same level so the ancestors should be the same number + { origin: rightNode, nodeExpectations: { ancestors: 6 } }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('should handle multiple ancestor requests', async () => { - let { body }: { body: SafeResolverAncestry } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=3`) - .expect(200); - expect(body.ancestors.length).to.eql(4); - const next = body.nextAncestor; - - ({ body } = await supertest - .get(`/api/endpoint/resolver/${next}/ancestry?ancestors=1`) - .expect(200)); - expect(body.ancestors.length).to.eql(2); - verifyAncestry(body.ancestors, tree, true); - // the highest node in the generated tree will not have a parent ID which causes the server to return - // without setting the pagination so nextAncestor will be null - expect(body.nextAncestor).to.eql(null); - }); + it('should not return any nodes when the search index does not have any data', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['metrics-*'], + }) + .expect(200); + expect(body).to.be.empty(); }); }); - describe('children route', () => { - describe('legacy events', () => { - const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; - const entityID = '94041'; - - it('returns child process lifecycle events', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}`) - .expect(200); - expect(body.childNodes.length).to.eql(1); - expect(body.childNodes[0].lifecycle.length).to.eql(2); - expect( - // for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent - // here, so to avoid it complaining we'll just force it - (body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid - ).to.eql(94042); + describe('descendant events', () => { + it('returns all descendants for the origin without using the ancestry field', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 2, + ancestors: 0, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 2 levels in the descendant part of the tree and 3 nodes for each + // descendant = 3 children for the origin + 3 children for each of the origin's children = 12 + { origin: tree.origin.id, nodeExpectations: { descendants: 12, descendantLevels: 2 } }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('returns multiple levels of child process lifecycle events', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/93802/children?legacyEndpointID=${endpointID}&children=10`) - .expect(200); - expect(body.childNodes.length).to.eql(10); - expect(body.nextChild).to.be(null); - expect(body.childNodes[0].lifecycle.length).to.eql(1); - expect( - // for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent - // here, so to avoid it complaining we'll just force it - (body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid - ).to.eql(93932); + it('returns all descendants for the origin using the ancestry field', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + // should be ignored when using the ancestry array + descendantLevels: 0, + ancestors: 0, + schema: schemaWithAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 2 levels in the descendant part of the tree and 3 nodes for each + // descendant = 3 children for the origin + 3 children for each of the origin's children = 12 + { origin: tree.origin.id, nodeExpectations: { descendants: 12, descendantLevels: 2 } }, + ], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); + }); - it('returns no values when there is no more data', async () => { - let { body }: { body: SafeResolverChildren } = await supertest - .get( - // there should only be a single child for this node - `/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&children=1` - ) - .expect(200); - expect(body.nextChild).to.not.be(null); - - ({ body } = await supertest - .get( - `/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&afterChild=${body.nextChild}` - ) - .expect(200)); - expect(body.childNodes).be.empty(); - expect(body.nextChild).to.eql(null); - }); + it('should handle an invalid id', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 100, + ancestors: 0, + schema: schemaWithAncestry, + nodes: ['bogus id'], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + expect(body).to.be.empty(); + }); - it('returns the first page of information when the cursor is invalid', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get( - `/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}&afterChild=blah` - ) - .expect(200); - expect(body.childNodes.length).to.eql(1); - expect(body.nextChild).to.be(null); + it('returns a single generation of children', async () => { + // this gets a node should have 3 children which were created in succession so that the timestamps + // are ordered correctly to be retrieved in a single call + const childID = Array.from(tree.childrenLevels[0].values())[0].id; + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 1, + ancestors: 0, + schema: schemaWithoutAncestry, + nodes: [childID], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // a single generation should be three nodes + { origin: childID, nodeExpectations: { descendants: 3, descendantLevels: 1 } }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); - it('errors on invalid pagination values', async () => { - await supertest.get(`/api/endpoint/resolver/${entityID}/children?children=0`).expect(400); - await supertest - .get(`/api/endpoint/resolver/${entityID}/children?children=20000`) - .expect(400); - await supertest - .get(`/api/endpoint/resolver/${entityID}/children?children=-1`) - .expect(400); + it('should support returning multiple descendant trees when multiple nodes are requested', async () => { + // there are 2 levels after the origin, let's get the first level, there will be three + // children so get the left and right most ones + const level0Nodes = Array.from(tree.childrenLevels[0].values()); + const leftNodeID = level0Nodes[0].id; + const rightNodeID = level0Nodes[2].id; + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 6, + descendantLevels: 0, + ancestors: 0, + schema: schemaWithAncestry, + nodes: [leftNodeID, rightNodeID], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + { origin: leftNodeID, nodeExpectations: { descendantLevels: 1, descendants: 3 } }, + { origin: rightNodeID, nodeExpectations: { descendantLevels: 1, descendants: 3 } }, + ], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); + }); - it('returns empty events without a matching entity id', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/5555/children`) - .expect(200); - expect(body.nextChild).to.eql(null); - expect(body.childNodes).to.be.empty(); + it('should support returning multiple descendant trees when multiple nodes are requested at different levels', async () => { + const originParent = parentEntityIDSafeVersion(tree.origin.lifecycle[0]) ?? ''; + expect(originParent).to.not.be(''); + const originGrandparent = + parentEntityIDSafeVersion(tree.ancestry.get(originParent)!.lifecycle[0]) ?? ''; + expect(originGrandparent).to.not.be(''); + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 2, + descendantLevels: 0, + ancestors: 0, + schema: schemaWithAncestry, + nodes: [tree.origin.id, originGrandparent], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + { origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 1 } }, + // the origin's grandparent should only have the origin's parent as a descendant + { + origin: originGrandparent, + nodeExpectations: { descendantLevels: 1, descendants: 1 }, + }, + ], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); + }); - it('returns empty events with an invalid endpoint id', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=foo`) - .expect(200); - expect(body.nextChild).to.eql(null); - expect(body.childNodes).to.be.empty(); + it('should support returning multiple descendant trees when multiple nodes are requested at different levels without ancestry field', async () => { + const originParent = parentEntityIDSafeVersion(tree.origin.lifecycle[0]) ?? ''; + expect(originParent).to.not.be(''); + const originGrandparent = + parentEntityIDSafeVersion(tree.ancestry.get(originParent)!.lifecycle[0]) ?? ''; + expect(originGrandparent).to.not.be(''); + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 6, + descendantLevels: 1, + ancestors: 0, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id, originGrandparent], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + { origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 3 } }, + // the origin's grandparent should only have the origin's parent as a descendant + { + origin: originGrandparent, + nodeExpectations: { descendantLevels: 1, descendants: 1 }, + }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); }); - describe('endpoint events', () => { - it('returns all children for the origin', async () => { - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/children?children=100`) - .expect(200); - // there are 2 levels in the children part of the tree and 3 nodes for each = - // 3 children for the origin + 3 children for each of the origin's children = 12 - expect(body.childNodes.length).to.eql(12); - // there will be 4 parents, the origin of the tree, and it's 3 children - verifyChildren(body.childNodes, tree, 4, 3); - expect(body.nextChild).to.eql(null); + it('should respect the time range specified and only return one descendant', async () => { + const level0Node = Array.from(tree.childrenLevels[0].values())[0]; + const end = new Date( + timestampSafeVersion(level0Node.lifecycle[0]) ?? new Date() + ).toISOString(); + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 5, + ancestors: 0, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: end, + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + { origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 1 } }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, }); + }); + }); - it('returns a single generation of children', async () => { - // this gets a node should have 3 children which were created in succession so that the timestamps - // are ordered correctly to be retrieved in a single call - const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id; - const { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=3`) - .expect(200); - expect(body.childNodes.length).to.eql(3); - verifyChildren(body.childNodes, tree, 1, 3); - expect(body.nextChild).to.not.eql(null); + describe('ancestry and descendants', () => { + it('returns all descendants and ancestors without the ancestry field and they should have the name field', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 10, + ancestors: 50, + schema: schemaWithName, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 2 levels in the descendant part of the tree and 3 nodes for each + // descendant = 3 children for the origin + 3 children for each of the origin's children = 12 + { + origin: tree.origin.id, + nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 }, + }, + ], + response: body, + schema: schemaWithName, + genTree: tree, + relatedEventsCategories: relatedEventsToGen, }); - it('paginates the children', async () => { - // this gets a node should have 3 children which were created in succession so that the timestamps - // are ordered correctly to be retrieved in a single call - const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id; - let { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=1`) - .expect(200); - expect(body.childNodes.length).to.eql(1); - verifyChildren(body.childNodes, tree, 1, 1); - expect(body.nextChild).to.not.be(null); - - ({ body } = await supertest - .get( - `/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}` - ) - .expect(200)); - expect(body.childNodes.length).to.eql(2); - verifyChildren(body.childNodes, tree, 1, 2); - expect(body.nextChild).to.not.be(null); + for (const node of body) { + expect(node.name).to.be(getNameField(node.data, schemaWithName)); + expect(node.name).to.not.be(undefined); + } + }); - ({ body } = await supertest - .get( - `/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}` - ) - .expect(200)); - expect(body.childNodes.length).to.eql(0); - expect(body.nextChild).to.be(null); + it('returns all descendants and ancestors without the ancestry field', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 10, + ancestors: 50, + schema: schemaWithoutAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 2 levels in the descendant part of the tree and 3 nodes for each + // descendant = 3 children for the origin + 3 children for each of the origin's children = 12 + { + origin: tree.origin.id, + nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 }, + }, + ], + response: body, + schema: schemaWithoutAncestry, + genTree: tree, + relatedEventsCategories: relatedEventsToGen, }); - it('gets all children in two queries', async () => { - // should get all the children of the origin - let { body }: { body: SafeResolverChildren } = await supertest - .get(`/api/endpoint/resolver/${tree.origin.id}/children?children=3`) - .expect(200); - expect(body.childNodes.length).to.eql(3); - verifyChildren(body.childNodes, tree); - expect(body.nextChild).to.not.be(null); - const firstNodes = [...body.childNodes]; - - ({ body } = await supertest - .get( - `/api/endpoint/resolver/${tree.origin.id}/children?children=10&afterChild=${body.nextChild}` - ) - .expect(200)); - expect(body.childNodes.length).to.eql(9); - // put all the results together and we should have all the children - verifyChildren([...firstNodes, ...body.childNodes], tree, 4, 3); - expect(body.nextChild).to.be(null); - }); + for (const node of body) { + expect(node.name).to.be(getNameField(node.data, schemaWithoutAncestry)); + expect(node.name).to.be(undefined); + } }); - }); - - describe('tree api', () => { - describe('legacy events', () => { - const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; - it('returns ancestors, events, children, and current process lifecycle', async () => { - const { body }: { body: SafeResolverTree } = await supertest - .get(`/api/endpoint/resolver/93933?legacyEndpointID=${endpointID}`) - .expect(200); - expect(body.ancestry.nextAncestor).to.equal(null); - expect(body.children.nextChild).to.equal(null); - expect(body.children.childNodes.length).to.equal(0); - expect(body.lifecycle.length).to.equal(2); + it('returns all descendants and ancestors with the ancestry field', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 100, + descendantLevels: 10, + ancestors: 50, + schema: schemaWithAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + verifyTree({ + expectations: [ + // there are 2 levels in the descendant part of the tree and 3 nodes for each + // descendant = 3 children for the origin + 3 children for each of the origin's children = 12 + { + origin: tree.origin.id, + nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 }, + }, + ], + response: body, + schema: schemaWithAncestry, + genTree: tree, + relatedEventsCategories: relatedEventsToGen, }); - }); - - describe('endpoint events', () => { - it('returns a tree', async () => { - const { body }: { body: SafeResolverTree } = await supertest - .get( - `/api/endpoint/resolver/${tree.origin.id}?children=100&ancestors=5&events=5&alerts=5` - ) - .expect(200); - - expect(body.children.nextChild).to.equal(null); - expect(body.children.childNodes.length).to.equal(12); - verifyChildren(body.children.childNodes, tree, 4, 3); - verifyLifecycleStats(body.children.childNodes, relatedEventsToGen, relatedAlerts); - expect(body.ancestry.nextAncestor).to.equal(null); - verifyAncestry(body.ancestry.ancestors, tree, true); - verifyLifecycleStats(body.ancestry.ancestors, relatedEventsToGen, relatedAlerts); - - expect(body.relatedAlerts.nextAlert).to.equal(null); - compareArrays(tree.origin.relatedAlerts, body.relatedAlerts.alerts, true); + for (const node of body) { + expect(node.name).to.be(getNameField(node.data, schemaWithAncestry)); + expect(node.name).to.be(undefined); + } + }); - compareArrays(tree.origin.lifecycle, body.lifecycle, true); - verifyStats(body.stats, relatedEventsToGen, relatedAlerts); + it('returns an empty response when limits are zero', async () => { + const { body }: { body: ResolverNode[] } = await supertest + .post('/api/endpoint/resolver/tree') + .set('kbn-xsrf', 'xxx') + .send({ + descendants: 0, + descendantLevels: 0, + ancestors: 0, + schema: schemaWithAncestry, + nodes: [tree.origin.id], + timerange: { + from: tree.startTime.toISOString(), + to: tree.endTime.toISOString(), + }, + indexPatterns: ['logs-*'], + }) + .expect(200); + expect(body).to.be.empty(); + verifyTree({ + expectations: [ + { + origin: tree.origin.id, + nodeExpectations: { descendants: 0, descendantLevels: 0, ancestors: 0 }, + }, + ], + response: body, + schema: schemaWithAncestry, + genTree: tree, }); }); }); diff --git a/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree_entity_id.ts b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree_entity_id.ts new file mode 100644 index 000000000000..39cce77b8cc9 --- /dev/null +++ b/x-pack/test/security_solution_endpoint_api_int/apis/resolver/tree_entity_id.ts @@ -0,0 +1,375 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import expect from '@kbn/expect'; +import { + SafeResolverAncestry, + SafeResolverChildren, + SafeResolverTree, + SafeLegacyEndpointEvent, +} from '../../../../plugins/security_solution/common/endpoint/types'; +import { parentEntityIDSafeVersion } from '../../../../plugins/security_solution/common/endpoint/models/event'; +import { FtrProviderContext } from '../../ftr_provider_context'; +import { + Tree, + RelatedEventCategory, +} from '../../../../plugins/security_solution/common/endpoint/generate_data'; +import { Options, GeneratedTrees } from '../../services/resolver'; +import { + compareArrays, + checkAncestryFromEntityTreeAPI, + retrieveDistantAncestor, + verifyChildrenFromEntityTreeAPI, + verifyLifecycleStats, + verifyEntityTreeStats, +} from './common'; + +export default function ({ getService }: FtrProviderContext) { + const supertest = getService('supertest'); + const esArchiver = getService('esArchiver'); + const resolver = getService('resolverGenerator'); + + const relatedEventsToGen = [ + { category: RelatedEventCategory.Driver, count: 2 }, + { category: RelatedEventCategory.File, count: 1 }, + { category: RelatedEventCategory.Registry, count: 1 }, + ]; + const relatedAlerts = 4; + let resolverTrees: GeneratedTrees; + let tree: Tree; + const treeOptions: Options = { + ancestors: 5, + relatedEvents: relatedEventsToGen, + relatedAlerts, + children: 3, + generations: 2, + percentTerminated: 100, + percentWithRelated: 100, + numTrees: 1, + alwaysGenMaxChildrenPerNode: true, + ancestryArraySize: 2, + }; + + describe('Resolver entity tree api', () => { + before(async () => { + await esArchiver.load('endpoint/resolver/api_feature'); + resolverTrees = await resolver.createTrees(treeOptions); + // we only requested a single alert so there's only 1 tree + tree = resolverTrees.trees[0]; + }); + after(async () => { + await resolver.deleteData(resolverTrees); + // this unload is for an endgame-* index so it does not use data streams + await esArchiver.unload('endpoint/resolver/api_feature'); + }); + + describe('ancestry events route', () => { + describe('legacy events', () => { + const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; + const entityID = '94042'; + + it('should return details for the root node', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get( + `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=5` + ) + .expect(200); + expect(body.ancestors[0].lifecycle.length).to.eql(2); + expect(body.ancestors.length).to.eql(2); + expect(body.nextAncestor).to.eql(null); + }); + + it('should have a populated next parameter', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get( + `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0` + ) + .expect(200); + expect(body.nextAncestor).to.eql('94041'); + }); + + it('should handle an ancestors param request', async () => { + let { body }: { body: SafeResolverAncestry } = await supertest + .get( + `/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0` + ) + .expect(200); + const next = body.nextAncestor; + + ({ body } = await supertest + .get( + `/api/endpoint/resolver/${next}/ancestry?legacyEndpointID=${endpointID}&ancestors=1` + ) + .expect(200)); + expect(body.ancestors[0].lifecycle.length).to.eql(1); + expect(body.nextAncestor).to.eql(null); + }); + }); + + describe('endpoint events', () => { + it('should return the origin node at the front of the array', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`) + .expect(200); + expect(body.ancestors[0].entityID).to.eql(tree.origin.id); + }); + + it('should return details for the root node', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`) + .expect(200); + // the tree we generated had 5 ancestors + 1 origin node + expect(body.ancestors.length).to.eql(6); + expect(body.ancestors[0].entityID).to.eql(tree.origin.id); + checkAncestryFromEntityTreeAPI(body.ancestors, tree, true); + expect(body.nextAncestor).to.eql(null); + }); + + it('should handle an invalid id', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get(`/api/endpoint/resolver/alskdjflasj/ancestry`) + .expect(200); + expect(body.ancestors).to.be.empty(); + expect(body.nextAncestor).to.eql(null); + }); + + it('should have a populated next parameter', async () => { + const { body }: { body: SafeResolverAncestry } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=2`) + .expect(200); + // it should have 2 ancestors + 1 origin + expect(body.ancestors.length).to.eql(3); + checkAncestryFromEntityTreeAPI(body.ancestors, tree, false); + const distantGrandparent = retrieveDistantAncestor(body.ancestors); + expect(body.nextAncestor).to.eql( + parentEntityIDSafeVersion(distantGrandparent.lifecycle[0]) + ); + }); + + it('should handle multiple ancestor requests', async () => { + let { body }: { body: SafeResolverAncestry } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=3`) + .expect(200); + expect(body.ancestors.length).to.eql(4); + const next = body.nextAncestor; + + ({ body } = await supertest + .get(`/api/endpoint/resolver/${next}/ancestry?ancestors=1`) + .expect(200)); + expect(body.ancestors.length).to.eql(2); + checkAncestryFromEntityTreeAPI(body.ancestors, tree, true); + // the highest node in the generated tree will not have a parent ID which causes the server to return + // without setting the pagination so nextAncestor will be null + expect(body.nextAncestor).to.eql(null); + }); + }); + }); + + describe('children route', () => { + describe('legacy events', () => { + const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; + const entityID = '94041'; + + it('returns child process lifecycle events', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}`) + .expect(200); + expect(body.childNodes.length).to.eql(1); + expect(body.childNodes[0].lifecycle.length).to.eql(2); + expect( + // for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent + // here, so to avoid it complaining we'll just force it + (body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid + ).to.eql(94042); + }); + + it('returns multiple levels of child process lifecycle events', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/93802/children?legacyEndpointID=${endpointID}&children=10`) + .expect(200); + expect(body.childNodes.length).to.eql(10); + expect(body.nextChild).to.be(null); + expect(body.childNodes[0].lifecycle.length).to.eql(1); + expect( + // for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent + // here, so to avoid it complaining we'll just force it + (body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid + ).to.eql(93932); + }); + + it('returns no values when there is no more data', async () => { + let { body }: { body: SafeResolverChildren } = await supertest + .get( + // there should only be a single child for this node + `/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&children=1` + ) + .expect(200); + expect(body.nextChild).to.not.be(null); + + ({ body } = await supertest + .get( + `/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&afterChild=${body.nextChild}` + ) + .expect(200)); + expect(body.childNodes).be.empty(); + expect(body.nextChild).to.eql(null); + }); + + it('returns the first page of information when the cursor is invalid', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get( + `/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}&afterChild=blah` + ) + .expect(200); + expect(body.childNodes.length).to.eql(1); + expect(body.nextChild).to.be(null); + }); + + it('errors on invalid pagination values', async () => { + await supertest.get(`/api/endpoint/resolver/${entityID}/children?children=0`).expect(400); + await supertest + .get(`/api/endpoint/resolver/${entityID}/children?children=20000`) + .expect(400); + await supertest + .get(`/api/endpoint/resolver/${entityID}/children?children=-1`) + .expect(400); + }); + + it('returns empty events without a matching entity id', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/5555/children`) + .expect(200); + expect(body.nextChild).to.eql(null); + expect(body.childNodes).to.be.empty(); + }); + + it('returns empty events with an invalid endpoint id', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=foo`) + .expect(200); + expect(body.nextChild).to.eql(null); + expect(body.childNodes).to.be.empty(); + }); + }); + + describe('endpoint events', () => { + it('returns all children for the origin', async () => { + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/children?children=100`) + .expect(200); + // there are 2 levels in the children part of the tree and 3 nodes for each = + // 3 children for the origin + 3 children for each of the origin's children = 12 + expect(body.childNodes.length).to.eql(12); + // there will be 4 parents, the origin of the tree, and it's 3 children + verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 4, 3); + expect(body.nextChild).to.eql(null); + }); + + it('returns a single generation of children', async () => { + // this gets a node should have 3 children which were created in succession so that the timestamps + // are ordered correctly to be retrieved in a single call + const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id; + const { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=3`) + .expect(200); + expect(body.childNodes.length).to.eql(3); + verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 3); + expect(body.nextChild).to.not.eql(null); + }); + + it('paginates the children', async () => { + // this gets a node should have 3 children which were created in succession so that the timestamps + // are ordered correctly to be retrieved in a single call + const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id; + let { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=1`) + .expect(200); + expect(body.childNodes.length).to.eql(1); + verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 1); + expect(body.nextChild).to.not.be(null); + + ({ body } = await supertest + .get( + `/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}` + ) + .expect(200)); + expect(body.childNodes.length).to.eql(2); + verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 2); + expect(body.nextChild).to.not.be(null); + + ({ body } = await supertest + .get( + `/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}` + ) + .expect(200)); + expect(body.childNodes.length).to.eql(0); + expect(body.nextChild).to.be(null); + }); + + it('gets all children in two queries', async () => { + // should get all the children of the origin + let { body }: { body: SafeResolverChildren } = await supertest + .get(`/api/endpoint/resolver/${tree.origin.id}/children?children=3`) + .expect(200); + expect(body.childNodes.length).to.eql(3); + verifyChildrenFromEntityTreeAPI(body.childNodes, tree); + expect(body.nextChild).to.not.be(null); + const firstNodes = [...body.childNodes]; + + ({ body } = await supertest + .get( + `/api/endpoint/resolver/${tree.origin.id}/children?children=10&afterChild=${body.nextChild}` + ) + .expect(200)); + expect(body.childNodes.length).to.eql(9); + // put all the results together and we should have all the children + verifyChildrenFromEntityTreeAPI([...firstNodes, ...body.childNodes], tree, 4, 3); + expect(body.nextChild).to.be(null); + }); + }); + }); + + describe('tree api', () => { + describe('legacy events', () => { + const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a'; + + it('returns ancestors, events, children, and current process lifecycle', async () => { + const { body }: { body: SafeResolverTree } = await supertest + .get(`/api/endpoint/resolver/93933?legacyEndpointID=${endpointID}`) + .expect(200); + expect(body.ancestry.nextAncestor).to.equal(null); + expect(body.children.nextChild).to.equal(null); + expect(body.children.childNodes.length).to.equal(0); + expect(body.lifecycle.length).to.equal(2); + }); + }); + + describe('endpoint events', () => { + it('returns a tree', async () => { + const { body }: { body: SafeResolverTree } = await supertest + .get( + `/api/endpoint/resolver/${tree.origin.id}?children=100&ancestors=5&events=5&alerts=5` + ) + .expect(200); + + expect(body.children.nextChild).to.equal(null); + expect(body.children.childNodes.length).to.equal(12); + verifyChildrenFromEntityTreeAPI(body.children.childNodes, tree, 4, 3); + verifyLifecycleStats(body.children.childNodes, relatedEventsToGen, relatedAlerts); + + expect(body.ancestry.nextAncestor).to.equal(null); + checkAncestryFromEntityTreeAPI(body.ancestry.ancestors, tree, true); + verifyLifecycleStats(body.ancestry.ancestors, relatedEventsToGen, relatedAlerts); + + expect(body.relatedAlerts.nextAlert).to.equal(null); + compareArrays(tree.origin.relatedAlerts, body.relatedAlerts.alerts, true); + + compareArrays(tree.origin.lifecycle, body.lifecycle, true); + verifyEntityTreeStats(body.stats, relatedEventsToGen, relatedAlerts); + }); + }); + }); + }); +} From 5bc4d75256afc543aec79d93f023fc75158a56a0 Mon Sep 17 00:00:00 2001 From: Jonathan Budzenski Date: Tue, 24 Nov 2020 11:26:46 -0600 Subject: [PATCH 07/18] [deb/rpm] Move systemd service to /usr/lib/systemd/system (#83571) Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- .../systemd/{etc => usr/lib}/systemd/system/kibana.service | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/dev/build/tasks/os_packages/service_templates/systemd/{etc => usr/lib}/systemd/system/kibana.service (100%) diff --git a/src/dev/build/tasks/os_packages/service_templates/systemd/etc/systemd/system/kibana.service b/src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service similarity index 100% rename from src/dev/build/tasks/os_packages/service_templates/systemd/etc/systemd/system/kibana.service rename to src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service From 38a09b99c4314d2a2aa50a9e2780caad94e5e91c Mon Sep 17 00:00:00 2001 From: Joe Reuter Date: Tue, 24 Nov 2020 18:42:02 +0100 Subject: [PATCH 08/18] Expression: Add render mode and use it for canvas interactivity (#83559) --- ...c.expressionrenderhandler._constructor_.md | 4 +- ...ressions-public.expressionrenderhandler.md | 2 +- ...ressions-public.iexpressionloaderparams.md | 1 + ...blic.iexpressionloaderparams.rendermode.md | 11 ++ ...interpreterrenderhandlers.getrendermode.md | 11 ++ ...sions-public.iinterpreterrenderhandlers.md | 1 + ...interpreterrenderhandlers.getrendermode.md | 11 ++ ...sions-server.iinterpreterrenderhandlers.md | 1 + .../common/expression_renderers/types.ts | 13 ++ src/plugins/expressions/public/loader.test.ts | 28 ++- src/plugins/expressions/public/loader.ts | 1 + src/plugins/expressions/public/public.api.md | 8 +- src/plugins/expressions/public/render.ts | 8 +- src/plugins/expressions/public/types/index.ts | 2 + src/plugins/expressions/server/server.api.md | 4 + .../functions/external/saved_lens.ts | 1 + .../renderers/__stories__/render.tsx | 1 + .../canvas/public/lib/create_handlers.ts | 3 + .../embeddable/embeddable.test.tsx | 39 ++++ .../embeddable/embeddable.tsx | 3 + .../embeddable/expression_wrapper.tsx | 4 + .../public/pie_visualization/expression.tsx | 1 + .../render_function.test.tsx | 9 + .../pie_visualization/render_function.tsx | 17 +- .../xy_visualization/expression.test.tsx | 84 +++++++++ .../public/xy_visualization/expression.tsx | 172 +++++++++--------- 26 files changed, 344 insertions(+), 96 deletions(-) create mode 100644 docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md create mode 100644 docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md create mode 100644 docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md index fb6ba7ee2621..fcccd3f6b961 100644 --- a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md @@ -9,7 +9,7 @@ Constructs a new instance of the `ExpressionRenderHandler` class Signature: ```typescript -constructor(element: HTMLElement, { onRenderError }?: Partial); +constructor(element: HTMLElement, { onRenderError, renderMode }?: Partial); ``` ## Parameters @@ -17,5 +17,5 @@ constructor(element: HTMLElement, { onRenderError }?: PartialHTMLElement | | -| { onRenderError } | Partial<ExpressionRenderHandlerParams> | | +| { onRenderError, renderMode } | Partial<ExpressionRenderHandlerParams> | | diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler.md index 7f7d5792ba68..12c663273bd8 100644 --- a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler.md +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderhandler.md @@ -14,7 +14,7 @@ export declare class ExpressionRenderHandler | Constructor | Modifiers | Description | | --- | --- | --- | -| [(constructor)(element, { onRenderError })](./kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md) | | Constructs a new instance of the ExpressionRenderHandler class | +| [(constructor)(element, { onRenderError, renderMode })](./kibana-plugin-plugins-expressions-public.expressionrenderhandler._constructor_.md) | | Constructs a new instance of the ExpressionRenderHandler class | ## Properties diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md index 2dfc67d2af5f..54eecad0deb5 100644 --- a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md @@ -21,6 +21,7 @@ export interface IExpressionLoaderParams | [disableCaching](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.disablecaching.md) | boolean | | | [inspectorAdapters](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.inspectoradapters.md) | Adapters | | | [onRenderError](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.onrendererror.md) | RenderErrorHandlerFnType | | +| [renderMode](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md) | RenderMode | | | [searchContext](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.searchcontext.md) | SerializableState | | | [searchSessionId](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.searchsessionid.md) | string | | | [uiState](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.uistate.md) | unknown | | diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md new file mode 100644 index 000000000000..2986b81fc67c --- /dev/null +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-expressions-public](./kibana-plugin-plugins-expressions-public.md) > [IExpressionLoaderParams](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md) > [renderMode](./kibana-plugin-plugins-expressions-public.iexpressionloaderparams.rendermode.md) + +## IExpressionLoaderParams.renderMode property + +Signature: + +```typescript +renderMode?: RenderMode; +``` diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md new file mode 100644 index 000000000000..8cddec1a5359 --- /dev/null +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-expressions-public](./kibana-plugin-plugins-expressions-public.md) > [IInterpreterRenderHandlers](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.md) > [getRenderMode](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md) + +## IInterpreterRenderHandlers.getRenderMode property + +Signature: + +```typescript +getRenderMode: () => RenderMode; +``` diff --git a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.md b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.md index ab0273be7140..a65e02545163 100644 --- a/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.md +++ b/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.md @@ -16,6 +16,7 @@ export interface IInterpreterRenderHandlers | --- | --- | --- | | [done](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.done.md) | () => void | Done increments the number of rendering successes | | [event](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.event.md) | (event: any) => void | | +| [getRenderMode](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.getrendermode.md) | () => RenderMode | | | [onDestroy](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.ondestroy.md) | (fn: () => void) => void | | | [reload](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.reload.md) | () => void | | | [uiState](./kibana-plugin-plugins-expressions-public.iinterpreterrenderhandlers.uistate.md) | PersistedState | | diff --git a/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md b/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md new file mode 100644 index 000000000000..16db25ab244f --- /dev/null +++ b/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-expressions-server](./kibana-plugin-plugins-expressions-server.md) > [IInterpreterRenderHandlers](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.md) > [getRenderMode](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md) + +## IInterpreterRenderHandlers.getRenderMode property + +Signature: + +```typescript +getRenderMode: () => RenderMode; +``` diff --git a/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.md b/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.md index ccf6271f712b..b1496386944f 100644 --- a/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.md +++ b/docs/development/plugins/expressions/server/kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.md @@ -16,6 +16,7 @@ export interface IInterpreterRenderHandlers | --- | --- | --- | | [done](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.done.md) | () => void | Done increments the number of rendering successes | | [event](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.event.md) | (event: any) => void | | +| [getRenderMode](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.getrendermode.md) | () => RenderMode | | | [onDestroy](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.ondestroy.md) | (fn: () => void) => void | | | [reload](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.reload.md) | () => void | | | [uiState](./kibana-plugin-plugins-expressions-server.iinterpreterrenderhandlers.uistate.md) | PersistedState | | diff --git a/src/plugins/expressions/common/expression_renderers/types.ts b/src/plugins/expressions/common/expression_renderers/types.ts index 0ea3d72e7560..dd3124c7d17e 100644 --- a/src/plugins/expressions/common/expression_renderers/types.ts +++ b/src/plugins/expressions/common/expression_renderers/types.ts @@ -61,6 +61,18 @@ export interface ExpressionRenderDefinition { export type AnyExpressionRenderDefinition = ExpressionRenderDefinition; +/** + * Mode of the expression render environment. + * This value can be set from a consumer embedding an expression renderer and is accessible + * from within the active render function as part of the handlers. + * The following modes are supported: + * * display (default): The chart is rendered in a container with the main purpose of viewing the chart (e.g. in a container like dashboard or canvas) + * * preview: The chart is rendered in very restricted space (below 100px width and height) and should only show a rough outline + * * edit: The chart is rendered within an editor and configuration elements within the chart should be displayed + * * noInteractivity: The chart is rendered in a non-interactive environment and should not provide any affordances for interaction like brushing + */ +export type RenderMode = 'noInteractivity' | 'edit' | 'preview' | 'display'; + export interface IInterpreterRenderHandlers { /** * Done increments the number of rendering successes @@ -70,5 +82,6 @@ export interface IInterpreterRenderHandlers { reload: () => void; update: (params: any) => void; event: (event: any) => void; + getRenderMode: () => RenderMode; uiState?: PersistedState; } diff --git a/src/plugins/expressions/public/loader.test.ts b/src/plugins/expressions/public/loader.test.ts index bf8b44276956..598b614a326a 100644 --- a/src/plugins/expressions/public/loader.test.ts +++ b/src/plugins/expressions/public/loader.test.ts @@ -20,17 +20,24 @@ import { first, skip, toArray } from 'rxjs/operators'; import { loader, ExpressionLoader } from './loader'; import { Observable } from 'rxjs'; -import { parseExpression, IInterpreterRenderHandlers } from '../common'; +import { + parseExpression, + IInterpreterRenderHandlers, + RenderMode, + AnyExpressionFunctionDefinition, +} from '../common'; // eslint-disable-next-line -const { __getLastExecution } = require('./services'); +const { __getLastExecution, __getLastRenderMode } = require('./services'); const element: HTMLElement = null as any; jest.mock('./services', () => { + let renderMode: RenderMode | undefined; const renderers: Record = { test: { render: (el: HTMLElement, value: unknown, handlers: IInterpreterRenderHandlers) => { + renderMode = handlers.getRenderMode(); handlers.done(); }, }, @@ -39,9 +46,18 @@ jest.mock('./services', () => { // eslint-disable-next-line const service = new (require('../common/service/expressions_services').ExpressionsService as any)(); + const testFn: AnyExpressionFunctionDefinition = { + fn: () => ({ type: 'render', as: 'test' }), + name: 'testrender', + args: {}, + help: '', + }; + service.registerFunction(testFn); + const moduleMock = { __execution: undefined, __getLastExecution: () => moduleMock.__execution, + __getLastRenderMode: () => renderMode, getRenderersRegistry: () => ({ get: (id: string) => renderers[id], }), @@ -130,6 +146,14 @@ describe('ExpressionLoader', () => { expect(response).toBe(2); }); + it('passes mode to the renderer', async () => { + const expressionLoader = new ExpressionLoader(element, 'testrender', { + renderMode: 'edit', + }); + await expressionLoader.render$.pipe(first()).toPromise(); + expect(__getLastRenderMode()).toEqual('edit'); + }); + it('cancels the previous request when the expression is updated', () => { const expressionLoader = new ExpressionLoader(element, 'var foo', {}); const execution = __getLastExecution(); diff --git a/src/plugins/expressions/public/loader.ts b/src/plugins/expressions/public/loader.ts index 91c482621de3..983a344c0e1a 100644 --- a/src/plugins/expressions/public/loader.ts +++ b/src/plugins/expressions/public/loader.ts @@ -63,6 +63,7 @@ export class ExpressionLoader { this.renderHandler = new ExpressionRenderHandler(element, { onRenderError: params && params.onRenderError, + renderMode: params?.renderMode, }); this.render$ = this.renderHandler.render$; this.update$ = this.renderHandler.update$; diff --git a/src/plugins/expressions/public/public.api.md b/src/plugins/expressions/public/public.api.md index 17f8e6255f6b..2a73cd6e208d 100644 --- a/src/plugins/expressions/public/public.api.md +++ b/src/plugins/expressions/public/public.api.md @@ -530,7 +530,7 @@ export interface ExpressionRenderError extends Error { // @public (undocumented) export class ExpressionRenderHandler { // Warning: (ae-forgotten-export) The symbol "ExpressionRenderHandlerParams" needs to be exported by the entry point index.d.ts - constructor(element: HTMLElement, { onRenderError }?: Partial); + constructor(element: HTMLElement, { onRenderError, renderMode }?: Partial); // (undocumented) destroy: () => void; // (undocumented) @@ -891,6 +891,10 @@ export interface IExpressionLoaderParams { // // (undocumented) onRenderError?: RenderErrorHandlerFnType; + // Warning: (ae-forgotten-export) The symbol "RenderMode" needs to be exported by the entry point index.d.ts + // + // (undocumented) + renderMode?: RenderMode; // (undocumented) searchContext?: SerializableState_2; // (undocumented) @@ -909,6 +913,8 @@ export interface IInterpreterRenderHandlers { // (undocumented) event: (event: any) => void; // (undocumented) + getRenderMode: () => RenderMode; + // (undocumented) onDestroy: (fn: () => void) => void; // (undocumented) reload: () => void; diff --git a/src/plugins/expressions/public/render.ts b/src/plugins/expressions/public/render.ts index 924f8d4830f7..4390033b5be6 100644 --- a/src/plugins/expressions/public/render.ts +++ b/src/plugins/expressions/public/render.ts @@ -22,7 +22,7 @@ import { Observable } from 'rxjs'; import { filter } from 'rxjs/operators'; import { ExpressionRenderError, RenderErrorHandlerFnType, IExpressionLoaderParams } from './types'; import { renderErrorHandler as defaultRenderErrorHandler } from './render_error_handler'; -import { IInterpreterRenderHandlers, ExpressionAstExpression } from '../common'; +import { IInterpreterRenderHandlers, ExpressionAstExpression, RenderMode } from '../common'; import { getRenderersRegistry } from './services'; @@ -30,6 +30,7 @@ export type IExpressionRendererExtraHandlers = Record; export interface ExpressionRenderHandlerParams { onRenderError: RenderErrorHandlerFnType; + renderMode: RenderMode; } export interface ExpressionRendererEvent { @@ -58,7 +59,7 @@ export class ExpressionRenderHandler { constructor( element: HTMLElement, - { onRenderError }: Partial = {} + { onRenderError, renderMode }: Partial = {} ) { this.element = element; @@ -92,6 +93,9 @@ export class ExpressionRenderHandler { event: (data) => { this.eventsSubject.next(data); }, + getRenderMode: () => { + return renderMode || 'display'; + }, }; } diff --git a/src/plugins/expressions/public/types/index.ts b/src/plugins/expressions/public/types/index.ts index 4af36fea169a..5bae98569947 100644 --- a/src/plugins/expressions/public/types/index.ts +++ b/src/plugins/expressions/public/types/index.ts @@ -23,6 +23,7 @@ import { ExpressionValue, ExpressionsService, SerializableState, + RenderMode, } from '../../common'; /** @@ -54,6 +55,7 @@ export interface IExpressionLoaderParams { inspectorAdapters?: Adapters; onRenderError?: RenderErrorHandlerFnType; searchSessionId?: string; + renderMode?: RenderMode; } export interface ExpressionRenderError extends Error { diff --git a/src/plugins/expressions/server/server.api.md b/src/plugins/expressions/server/server.api.md index e5b499206ebd..33ff759faa3b 100644 --- a/src/plugins/expressions/server/server.api.md +++ b/src/plugins/expressions/server/server.api.md @@ -729,6 +729,10 @@ export interface IInterpreterRenderHandlers { done: () => void; // (undocumented) event: (event: any) => void; + // Warning: (ae-forgotten-export) The symbol "RenderMode" needs to be exported by the entry point index.d.ts + // + // (undocumented) + getRenderMode: () => RenderMode; // (undocumented) onDestroy: (fn: () => void) => void; // (undocumented) diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts index 765ff5072822..380d07972ca4 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts @@ -83,6 +83,7 @@ export function savedLens(): ExpressionFunctionDefinition< title: args.title === null ? undefined : args.title, disableTriggers: true, palette: args.palette, + renderMode: 'noInteractivity', }, embeddableType: EmbeddableTypes.lens, generatedAt: Date.now(), diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/__stories__/render.tsx b/x-pack/plugins/canvas/canvas_plugin_src/renderers/__stories__/render.tsx index 647c63c2c104..54702f265483 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/__stories__/render.tsx +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/__stories__/render.tsx @@ -11,6 +11,7 @@ export const defaultHandlers: RendererHandlers = { destroy: () => action('destroy'), getElementId: () => 'element-id', getFilter: () => 'filter', + getRenderMode: () => 'display', onComplete: (fn) => undefined, onEmbeddableDestroyed: action('onEmbeddableDestroyed'), onEmbeddableInputChange: action('onEmbeddableInputChange'), diff --git a/x-pack/plugins/canvas/public/lib/create_handlers.ts b/x-pack/plugins/canvas/public/lib/create_handlers.ts index ae0956ee2128..9bc4bd5e78fd 100644 --- a/x-pack/plugins/canvas/public/lib/create_handlers.ts +++ b/x-pack/plugins/canvas/public/lib/create_handlers.ts @@ -23,6 +23,9 @@ export const createHandlers = (): RendererHandlers => ({ getFilter() { return ''; }, + getRenderMode() { + return 'display'; + }, onComplete(fn: () => void) { this.done = fn; }, diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx index 9f9d7fef9c7b..3a3258a79c59 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx @@ -262,6 +262,45 @@ describe('embeddable', () => { expect(expressionRenderer.mock.calls[0][0].searchSessionId).toBe(input.searchSessionId); }); + it('should pass render mode to expression', async () => { + const timeRange: TimeRange = { from: 'now-15d', to: 'now' }; + const query: Query = { language: 'kquery', query: '' }; + const filters: Filter[] = [{ meta: { alias: 'test', negate: false, disabled: false } }]; + + const input = { + savedObjectId: '123', + timeRange, + query, + filters, + renderMode: 'noInteractivity', + } as LensEmbeddableInput; + + const embeddable = new Embeddable( + { + timefilter: dataPluginMock.createSetupContract().query.timefilter.timefilter, + attributeService, + expressionRenderer, + basePath, + indexPatternService: {} as IndexPatternsContract, + editable: true, + getTrigger, + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), + }, + input + ); + await embeddable.initializeSavedVis(input); + embeddable.render(mountpoint); + + expect(expressionRenderer.mock.calls[0][0].renderMode).toEqual('noInteractivity'); + }); + it('should merge external context with query and filters of the saved object', async () => { const timeRange: TimeRange = { from: 'now-15d', to: 'now' }; const query: Query = { language: 'kquery', query: 'external filter' }; diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx index 8139631daa97..76276f8b4c82 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx @@ -20,6 +20,7 @@ import { PaletteOutput } from 'src/plugins/charts/public'; import { Subscription } from 'rxjs'; import { toExpression, Ast } from '@kbn/interpreter/common'; +import { RenderMode } from 'src/plugins/expressions'; import { ExpressionRendererEvent, ReactExpressionRendererType, @@ -53,6 +54,7 @@ export type LensByValueInput = { export type LensByReferenceInput = SavedObjectEmbeddableInput & EmbeddableInput; export type LensEmbeddableInput = (LensByValueInput | LensByReferenceInput) & { palette?: PaletteOutput; + renderMode?: RenderMode; }; export interface LensEmbeddableOutput extends EmbeddableOutput { @@ -192,6 +194,7 @@ export class Embeddable variables={input.palette ? { theme: { palette: input.palette } } : {}} searchSessionId={this.input.searchSessionId} handleEvent={this.handleEvent} + renderMode={input.renderMode} />, domNode ); diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/expression_wrapper.tsx b/x-pack/plugins/lens/public/editor_frame_service/embeddable/expression_wrapper.tsx index 4a3ba971381f..d18372246b0e 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/expression_wrapper.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/expression_wrapper.tsx @@ -13,6 +13,7 @@ import { ReactExpressionRendererType, } from 'src/plugins/expressions/public'; import { ExecutionContextSearch } from 'src/plugins/data/public'; +import { RenderMode } from 'src/plugins/expressions'; import { getOriginalRequestErrorMessage } from '../error_helper'; export interface ExpressionWrapperProps { @@ -22,6 +23,7 @@ export interface ExpressionWrapperProps { searchContext: ExecutionContextSearch; searchSessionId?: string; handleEvent: (event: ExpressionRendererEvent) => void; + renderMode?: RenderMode; } export function ExpressionWrapper({ @@ -31,6 +33,7 @@ export function ExpressionWrapper({ variables, handleEvent, searchSessionId, + renderMode, }: ExpressionWrapperProps) { return ( @@ -57,6 +60,7 @@ export function ExpressionWrapper({ expression={expression} searchContext={searchContext} searchSessionId={searchSessionId} + renderMode={renderMode} renderError={(errorMessage, error) => (
diff --git a/x-pack/plugins/lens/public/pie_visualization/expression.tsx b/x-pack/plugins/lens/public/pie_visualization/expression.tsx index 3b5226eaa8e1..5f18ef7c7f63 100644 --- a/x-pack/plugins/lens/public/pie_visualization/expression.tsx +++ b/x-pack/plugins/lens/public/pie_visualization/expression.tsx @@ -139,6 +139,7 @@ export const getPieRenderer = (dependencies: { chartsThemeService={dependencies.chartsThemeService} paletteService={dependencies.paletteService} onClickValue={onClickValue} + renderMode={handlers.getRenderMode()} /> , domNode, diff --git a/x-pack/plugins/lens/public/pie_visualization/render_function.test.tsx b/x-pack/plugins/lens/public/pie_visualization/render_function.test.tsx index c44179ccd8df..458b1a75c4c1 100644 --- a/x-pack/plugins/lens/public/pie_visualization/render_function.test.tsx +++ b/x-pack/plugins/lens/public/pie_visualization/render_function.test.tsx @@ -70,6 +70,7 @@ describe('PieVisualization component', () => { onClickValue: jest.fn(), chartsThemeService, paletteService: chartPluginMock.createPaletteRegistry(), + renderMode: 'display' as const, }; } @@ -266,6 +267,14 @@ describe('PieVisualization component', () => { `); }); + test('does not set click listener on noInteractivity render mode', () => { + const defaultArgs = getDefaultArgs(); + const component = shallow( + + ); + expect(component.find(Settings).first().prop('onElementClick')).toBeUndefined(); + }); + test('it shows emptyPlaceholder for undefined grouped data', () => { const defaultData = getDefaultArgs().data; const emptyData: LensMultiTable = { diff --git a/x-pack/plugins/lens/public/pie_visualization/render_function.tsx b/x-pack/plugins/lens/public/pie_visualization/render_function.tsx index 39743a355fd7..20d558fefc3d 100644 --- a/x-pack/plugins/lens/public/pie_visualization/render_function.tsx +++ b/x-pack/plugins/lens/public/pie_visualization/render_function.tsx @@ -20,7 +20,9 @@ import { RecursivePartial, Position, Settings, + ElementClickListener, } from '@elastic/charts'; +import { RenderMode } from 'src/plugins/expressions'; import { FormatFactory, LensFilterEvent } from '../types'; import { VisualizationContainer } from '../visualization_container'; import { CHART_NAMES, DEFAULT_PERCENT_DECIMALS } from './constants'; @@ -44,6 +46,7 @@ export function PieComponent( chartsThemeService: ChartsPluginSetup['theme']; paletteService: PaletteRegistry; onClickValue: (data: LensFilterEvent['data']) => void; + renderMode: RenderMode; } ) { const [firstTable] = Object.values(props.data.tables); @@ -228,6 +231,12 @@ export function PieComponent( ); } + + const onElementClickHandler: ElementClickListener = (args) => { + const context = getFilterContext(args[0][0] as LayerValue[], groups, firstTable); + + onClickValue(desanitizeFilterContext(context)); + }; return ( { - const context = getFilterContext(args[0][0] as LayerValue[], groups, firstTable); - - onClickValue(desanitizeFilterContext(context)); - }} + onElementClick={ + props.renderMode !== 'noInteractivity' ? onElementClickHandler : undefined + } theme={{ ...chartTheme, background: { diff --git a/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx b/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx index a4b5d741c80f..0e2b47410c3f 100644 --- a/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx +++ b/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx @@ -427,6 +427,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -451,6 +452,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'line' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -504,6 +506,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={undefined} @@ -541,6 +544,7 @@ describe('xy_expression', () => { args={multiLayerArgs} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -578,6 +582,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -596,6 +601,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'bar' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -617,6 +623,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'area' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -638,6 +645,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'bar_horizontal' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -664,6 +672,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -688,6 +697,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -773,6 +783,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -791,6 +802,27 @@ describe('xy_expression', () => { }); }); + test('onBrushEnd is not set on noInteractivity mode', () => { + const { args, data } = sampleArgs(); + + const wrapper = mountWithIntl( + + ); + + expect(wrapper.find(Settings).first().prop('onBrushEnd')).toBeUndefined(); + }); + test('onElementClick returns correct context data', () => { const geometry: GeometryValue = { x: 5, y: 1, accessor: 'y1', mark: null, datum: {} }; const series = { @@ -825,6 +857,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -855,6 +888,27 @@ describe('xy_expression', () => { }); }); + test('onElementClick is not triggering event on noInteractivity mode', () => { + const { args, data } = sampleArgs(); + + const wrapper = mountWithIntl( + + ); + + expect(wrapper.find(Settings).first().prop('onElementClick')).toBeUndefined(); + }); + test('it renders stacked bar', () => { const { data, args } = sampleArgs(); const component = shallow( @@ -863,6 +917,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'bar_stacked' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -884,6 +939,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], seriesType: 'area_stacked' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -908,6 +964,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -941,6 +998,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -961,6 +1019,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="CEST" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -987,6 +1046,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [firstLayer] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1007,6 +1067,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [firstLayer] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1030,6 +1091,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [firstLayer, secondLayer] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1058,6 +1120,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1080,6 +1143,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1481,6 +1545,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], xScaleType: 'ordinal' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1501,6 +1566,7 @@ describe('xy_expression', () => { args={{ ...args, layers: [{ ...args.layers[0], yScaleType: 'sqrt' }] }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1521,6 +1587,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1544,6 +1611,7 @@ describe('xy_expression', () => { paletteService={paletteService} minInterval={50} timeZone="UTC" + renderMode="display" onClickValue={onClickValue} onSelectRange={onSelectRange} /> @@ -1563,6 +1631,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1598,6 +1667,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1631,6 +1701,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1664,6 +1735,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1697,6 +1769,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1797,6 +1870,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1871,6 +1945,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1943,6 +2018,7 @@ describe('xy_expression', () => { args={args} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1967,6 +2043,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -1990,6 +2067,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2013,6 +2091,7 @@ describe('xy_expression', () => { }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2048,6 +2127,7 @@ describe('xy_expression', () => { args={{ ...args, fittingFunction: 'Carry' }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2075,6 +2155,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2097,6 +2178,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2124,6 +2206,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} @@ -2157,6 +2240,7 @@ describe('xy_expression', () => { args={{ ...args }} formatFactory={getFormatSpy} timeZone="UTC" + renderMode="display" chartsThemeService={chartsThemeService} paletteService={paletteService} minInterval={50} diff --git a/x-pack/plugins/lens/public/xy_visualization/expression.tsx b/x-pack/plugins/lens/public/xy_visualization/expression.tsx index 54ae3bb759d2..790416a6c920 100644 --- a/x-pack/plugins/lens/public/xy_visualization/expression.tsx +++ b/x-pack/plugins/lens/public/xy_visualization/expression.tsx @@ -21,6 +21,8 @@ import { StackMode, VerticalAlignment, HorizontalAlignment, + ElementClickListener, + BrushEndListener, } from '@elastic/charts'; import { I18nProvider } from '@kbn/i18n/react'; import { @@ -31,6 +33,7 @@ import { } from 'src/plugins/expressions/public'; import { IconType } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; +import { RenderMode } from 'src/plugins/expressions'; import { LensMultiTable, FormatFactory, @@ -81,6 +84,7 @@ type XYChartRenderProps = XYChartProps & { minInterval: number | undefined; onClickValue: (data: LensFilterEvent['data']) => void; onSelectRange: (data: LensBrushEvent['data']) => void; + renderMode: RenderMode; }; export const xyChart: ExpressionFunctionDefinition< @@ -235,6 +239,7 @@ export const getXyChartRenderer = (dependencies: { minInterval={await calculateMinInterval(config, dependencies.getIntervalByColumn)} onClickValue={onClickValue} onSelectRange={onSelectRange} + renderMode={handlers.getRenderMode()} /> , domNode, @@ -303,6 +308,7 @@ export function XYChart({ minInterval, onClickValue, onSelectRange, + renderMode, }: XYChartRenderProps) { const { legend, layers, fittingFunction, gridlinesVisibilitySettings, valueLabels } = args; const chartTheme = chartsThemeService.useChartsTheme(); @@ -415,6 +421,87 @@ export function XYChart({ const colorAssignments = getColorAssignments(args.layers, data, formatFactory); + const clickHandler: ElementClickListener = ([[geometry, series]]) => { + // for xyChart series is always XYChartSeriesIdentifier and geometry is always type of GeometryValue + const xySeries = series as XYChartSeriesIdentifier; + const xyGeometry = geometry as GeometryValue; + + const layer = filteredLayers.find((l) => + xySeries.seriesKeys.some((key: string | number) => l.accessors.includes(key.toString())) + ); + if (!layer) { + return; + } + + const table = data.tables[layer.layerId]; + + const points = [ + { + row: table.rows.findIndex((row) => { + if (layer.xAccessor) { + if (layersAlreadyFormatted[layer.xAccessor]) { + // stringify the value to compare with the chart value + return xAxisFormatter.convert(row[layer.xAccessor]) === xyGeometry.x; + } + return row[layer.xAccessor] === xyGeometry.x; + } + }), + column: table.columns.findIndex((col) => col.id === layer.xAccessor), + value: xyGeometry.x, + }, + ]; + + if (xySeries.seriesKeys.length > 1) { + const pointValue = xySeries.seriesKeys[0]; + + points.push({ + row: table.rows.findIndex( + (row) => layer.splitAccessor && row[layer.splitAccessor] === pointValue + ), + column: table.columns.findIndex((col) => col.id === layer.splitAccessor), + value: pointValue, + }); + } + + const xAxisFieldName = table.columns.find((el) => el.id === layer.xAccessor)?.meta?.field; + const timeFieldName = xDomain && xAxisFieldName; + + const context: LensFilterEvent['data'] = { + data: points.map((point) => ({ + row: point.row, + column: point.column, + value: point.value, + table, + })), + timeFieldName, + }; + onClickValue(desanitizeFilterContext(context)); + }; + + const brushHandler: BrushEndListener = ({ x }) => { + if (!x) { + return; + } + const [min, max] = x; + if (!xAxisColumn || !isHistogramViz) { + return; + } + + const table = data.tables[filteredLayers[0].layerId]; + + const xAxisColumnIndex = table.columns.findIndex((el) => el.id === filteredLayers[0].xAccessor); + + const timeFieldName = isTimeViz ? table.columns[xAxisColumnIndex]?.meta?.field : undefined; + + const context: LensBrushEvent['data'] = { + range: [min, max], + table, + column: xAxisColumnIndex, + timeFieldName, + }; + onSelectRange(context); + }; + return ( { - if (!x) { - return; - } - const [min, max] = x; - if (!xAxisColumn || !isHistogramViz) { - return; - } - - const table = data.tables[filteredLayers[0].layerId]; - - const xAxisColumnIndex = table.columns.findIndex( - (el) => el.id === filteredLayers[0].xAccessor - ); - - const timeFieldName = isTimeViz - ? table.columns[xAxisColumnIndex]?.meta?.field - : undefined; - - const context: LensBrushEvent['data'] = { - range: [min, max], - table, - column: xAxisColumnIndex, - timeFieldName, - }; - onSelectRange(context); - }} - onElementClick={([[geometry, series]]) => { - // for xyChart series is always XYChartSeriesIdentifier and geometry is always type of GeometryValue - const xySeries = series as XYChartSeriesIdentifier; - const xyGeometry = geometry as GeometryValue; - - const layer = filteredLayers.find((l) => - xySeries.seriesKeys.some((key: string | number) => l.accessors.includes(key.toString())) - ); - if (!layer) { - return; - } - - const table = data.tables[layer.layerId]; - - const points = [ - { - row: table.rows.findIndex((row) => { - if (layer.xAccessor) { - if (layersAlreadyFormatted[layer.xAccessor]) { - // stringify the value to compare with the chart value - return xAxisFormatter.convert(row[layer.xAccessor]) === xyGeometry.x; - } - return row[layer.xAccessor] === xyGeometry.x; - } - }), - column: table.columns.findIndex((col) => col.id === layer.xAccessor), - value: xyGeometry.x, - }, - ]; - - if (xySeries.seriesKeys.length > 1) { - const pointValue = xySeries.seriesKeys[0]; - - points.push({ - row: table.rows.findIndex( - (row) => layer.splitAccessor && row[layer.splitAccessor] === pointValue - ), - column: table.columns.findIndex((col) => col.id === layer.splitAccessor), - value: pointValue, - }); - } - - const xAxisFieldName = table.columns.find((el) => el.id === layer.xAccessor)?.meta?.field; - const timeFieldName = xDomain && xAxisFieldName; - - const context: LensFilterEvent['data'] = { - data: points.map((point) => ({ - row: point.row, - column: point.column, - value: point.value, - table, - })), - timeFieldName, - }; - onClickValue(desanitizeFilterContext(context)); - }} + onBrushEnd={renderMode !== 'noInteractivity' ? brushHandler : undefined} + onElementClick={renderMode !== 'noInteractivity' ? clickHandler : undefined} /> Date: Tue, 24 Nov 2020 12:42:46 -0500 Subject: [PATCH 09/18] Remove expressions.legacy from README (#79681) Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- x-pack/plugins/canvas/README.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/canvas/README.md b/x-pack/plugins/canvas/README.md index 7bd9a1994ba7..f77585b5b062 100644 --- a/x-pack/plugins/canvas/README.md +++ b/x-pack/plugins/canvas/README.md @@ -149,7 +149,7 @@ yarn start #### Adding a server-side function -> Server side functions may be deprecated in a later version of Kibana as they require using an API marked _legacy_ +> Server side functions may be deprecated in a later version of Kibana Now, let's add a function which runs on the server. @@ -206,9 +206,7 @@ And then in our setup method, register it with the Expressions plugin: ```typescript setup(core: CoreSetup, plugins: CanvasExamplePluginsSetup) { - // .register requires serverFunctions and types, so pass an empty array - // if you don't have any custom types to register - plugins.expressions.__LEGACY.register({ serverFunctions, types: [] }); + serverFunctions.forEach((f) => plugins.expressions.registerFunction(f)); } ``` From c2026dfa7aa90a031f34b884a6e4dbf25c96aeb4 Mon Sep 17 00:00:00 2001 From: Anton Dosov Date: Tue, 24 Nov 2020 18:54:49 +0100 Subject: [PATCH 10/18] Unskip "Copy dashboards to space" (#84115) --- .../dashboard/drilldowns/dashboard_to_dashboard_drilldown.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/test/functional/apps/dashboard/drilldowns/dashboard_to_dashboard_drilldown.ts b/x-pack/test/functional/apps/dashboard/drilldowns/dashboard_to_dashboard_drilldown.ts index 03765f5aa603..9326f7e240e3 100644 --- a/x-pack/test/functional/apps/dashboard/drilldowns/dashboard_to_dashboard_drilldown.ts +++ b/x-pack/test/functional/apps/dashboard/drilldowns/dashboard_to_dashboard_drilldown.ts @@ -166,7 +166,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { await spaces.delete(destinationSpaceId); }); - it.skip('Dashboards linked by a drilldown are both copied to a space', async () => { + it('Dashboards linked by a drilldown are both copied to a space', async () => { await PageObjects.copySavedObjectsToSpace.openCopyToSpaceFlyoutForObject( dashboardDrilldownsManage.DASHBOARD_WITH_AREA_CHART_NAME ); From d47460d08d15c71ffcb61f2be642d44ec807d581 Mon Sep 17 00:00:00 2001 From: Mikhail Shustov Date: Tue, 24 Nov 2020 20:56:35 +0300 Subject: [PATCH 11/18] Attempt to fix incremental build error (#84152) * make fetch compatible with CollectorFetchMethod * use Alejandros suggestion --- src/plugins/usage_collection/server/collector/collector_set.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plugins/usage_collection/server/collector/collector_set.ts b/src/plugins/usage_collection/server/collector/collector_set.ts index fe4f3536ffed..cda4ce36d4e2 100644 --- a/src/plugins/usage_collection/server/collector/collector_set.ts +++ b/src/plugins/usage_collection/server/collector/collector_set.ts @@ -29,7 +29,7 @@ import { import { Collector, CollectorOptions } from './collector'; import { UsageCollector, UsageCollectorOptions } from './usage_collector'; -type AnyCollector = Collector; +type AnyCollector = Collector; type AnyUsageCollector = UsageCollector; interface CollectorSetConfig { From 31a5b15250a59cd5cafcc320c88f5020bf184c62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Louv-Jansen?= Date: Tue, 24 Nov 2020 19:27:24 +0100 Subject: [PATCH 12/18] [APM] Use `asTransactionRate` consistently everywhere (#84213) --- .../apm/common/utils/formatters/formatters.ts | 11 ---- .../ServiceMap/Popover/ServiceStatsList.tsx | 8 +-- .../app/TraceOverview/TraceList.tsx | 13 ++--- .../TransactionDetails/Distribution/index.tsx | 53 +++++-------------- .../service_inventory/ServiceList/index.tsx | 21 +------- .../TransactionList/index.tsx | 10 +--- .../charts/transaction_charts/index.tsx | 15 +----- .../public/selectors/chart_selectors.test.ts | 2 +- .../apm/public/selectors/chart_selectors.ts | 4 +- .../translations/translations/ja-JP.json | 7 --- .../translations/translations/zh-CN.json | 7 --- 11 files changed, 29 insertions(+), 122 deletions(-) diff --git a/x-pack/plugins/apm/common/utils/formatters/formatters.ts b/x-pack/plugins/apm/common/utils/formatters/formatters.ts index 2314e915e316..50ce9db09661 100644 --- a/x-pack/plugins/apm/common/utils/formatters/formatters.ts +++ b/x-pack/plugins/apm/common/utils/formatters/formatters.ts @@ -4,7 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ import numeral from '@elastic/numeral'; -import { i18n } from '@kbn/i18n'; import { Maybe } from '../../../typings/common'; import { NOT_AVAILABLE_LABEL } from '../../i18n'; import { isFiniteNumber } from '../is_finite_number'; @@ -17,16 +16,6 @@ export function asInteger(value: number) { return numeral(value).format('0,0'); } -export function tpmUnit(type?: string) { - return type === 'request' - ? i18n.translate('xpack.apm.formatters.requestsPerMinLabel', { - defaultMessage: 'rpm', - }) - : i18n.translate('xpack.apm.formatters.transactionsPerMinLabel', { - defaultMessage: 'tpm', - }); -} - export function asPercent( numerator: Maybe, denominator: number | undefined, diff --git a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/ServiceStatsList.tsx b/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/ServiceStatsList.tsx index 8463da0824bd..adbcf897669a 100644 --- a/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/ServiceStatsList.tsx +++ b/x-pack/plugins/apm/public/components/app/ServiceMap/Popover/ServiceStatsList.tsx @@ -11,7 +11,7 @@ import styled from 'styled-components'; import { asDuration, asPercent, - tpmUnit, + asTransactionRate, } from '../../../../../common/utils/formatters'; import { ServiceNodeStats } from '../../../../../common/service_map'; @@ -55,11 +55,7 @@ export function ServiceStatsList({ defaultMessage: 'Req. per minute (avg.)', } ), - description: isNumber(transactionStats.avgRequestsPerMinute) - ? `${transactionStats.avgRequestsPerMinute.toFixed(2)} ${tpmUnit( - 'request' - )}` - : null, + description: asTransactionRate(transactionStats.avgRequestsPerMinute), }, { title: i18n.translate('xpack.apm.serviceMap.errorRatePopoverStat', { diff --git a/x-pack/plugins/apm/public/components/app/TraceOverview/TraceList.tsx b/x-pack/plugins/apm/public/components/app/TraceOverview/TraceList.tsx index 4704230d7c68..e68f8a9809bf 100644 --- a/x-pack/plugins/apm/public/components/app/TraceOverview/TraceList.tsx +++ b/x-pack/plugins/apm/public/components/app/TraceOverview/TraceList.tsx @@ -8,7 +8,10 @@ import { EuiIcon, EuiToolTip } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import React from 'react'; import styled from 'styled-components'; -import { asMillisecondDuration } from '../../../../common/utils/formatters'; +import { + asMillisecondDuration, + asTransactionRate, +} from '../../../../common/utils/formatters'; import { fontSizes, truncate } from '../../../style/variables'; import { EmptyMessage } from '../../shared/EmptyMessage'; import { ImpactBar } from '../../shared/ImpactBar'; @@ -78,13 +81,7 @@ const traceListColumns: Array> = [ }), sortable: true, dataType: 'number', - render: (value: number) => - `${value.toLocaleString()} ${i18n.translate( - 'xpack.apm.tracesTable.tracesPerMinuteUnitLabel', - { - defaultMessage: 'tpm', - } - )}`, + render: (value: number) => asTransactionRate(value), }, { field: 'impact', diff --git a/x-pack/plugins/apm/public/components/app/TransactionDetails/Distribution/index.tsx b/x-pack/plugins/apm/public/components/app/TransactionDetails/Distribution/index.tsx index e92a6c7db844..bbc99fb122fc 100644 --- a/x-pack/plugins/apm/public/components/app/TransactionDetails/Distribution/index.tsx +++ b/x-pack/plugins/apm/public/components/app/TransactionDetails/Distribution/index.tsx @@ -22,7 +22,7 @@ import { EuiIconTip, EuiTitle } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import d3 from 'd3'; import { isEmpty } from 'lodash'; -import React, { useCallback } from 'react'; +import React from 'react'; import { ValuesType } from 'utility-types'; import { APIReturnType } from '../../../../services/rest/createCallApmApi'; import { useTheme } from '../../../../../../observability/public'; @@ -70,46 +70,29 @@ export function getFormattedBuckets( ); } -const getFormatYShort = (transactionType: string | undefined) => ( - t: number -) => { +const formatYShort = (t: number) => { return i18n.translate( 'xpack.apm.transactionDetails.transactionsDurationDistributionChart.unitShortLabel', + { + defaultMessage: '{transCount} trans.', + values: { transCount: t }, + } + ); +}; + +const formatYLong = (t: number) => { + return i18n.translate( + 'xpack.apm.transactionDetails.transactionsDurationDistributionChart.transactionTypeUnitLongLabel', { defaultMessage: - '{transCount} {transType, select, request {req.} other {trans.}}', + '{transCount, plural, =0 {transactions} one {transaction} other {transactions}}', values: { transCount: t, - transType: transactionType, }, } ); }; -const getFormatYLong = (transactionType: string | undefined) => (t: number) => { - return transactionType === 'request' - ? i18n.translate( - 'xpack.apm.transactionDetails.transactionsDurationDistributionChart.requestTypeUnitLongLabel', - { - defaultMessage: - '{transCount, plural, =0 {request} one {request} other {requests}}', - values: { - transCount: t, - }, - } - ) - : i18n.translate( - 'xpack.apm.transactionDetails.transactionsDurationDistributionChart.transactionTypeUnitLongLabel', - { - defaultMessage: - '{transCount, plural, =0 {transaction} one {transaction} other {transactions}}', - values: { - transCount: t, - }, - } - ); -}; - interface Props { distribution?: TransactionDistributionAPIResponse; urlParams: IUrlParams; @@ -129,16 +112,6 @@ export function TransactionDistribution({ }: Props) { const theme = useTheme(); - /* eslint-disable-next-line react-hooks/exhaustive-deps */ - const formatYShort = useCallback(getFormatYShort(transactionType), [ - transactionType, - ]); - - /* eslint-disable-next-line react-hooks/exhaustive-deps */ - const formatYLong = useCallback(getFormatYLong(transactionType), [ - transactionType, - ]); - // no data in response if ( (!distribution || distribution.noHits) && diff --git a/x-pack/plugins/apm/public/components/app/service_inventory/ServiceList/index.tsx b/x-pack/plugins/apm/public/components/app/service_inventory/ServiceList/index.tsx index 547a0938bc24..a4c93f95dc53 100644 --- a/x-pack/plugins/apm/public/components/app/service_inventory/ServiceList/index.tsx +++ b/x-pack/plugins/apm/public/components/app/service_inventory/ServiceList/index.tsx @@ -14,8 +14,8 @@ import { APIReturnType } from '../../../../services/rest/createCallApmApi'; import { ServiceHealthStatus } from '../../../../../common/service_health_status'; import { asPercent, - asDecimal, asMillisecondDuration, + asTransactionRate, } from '../../../../../common/utils/formatters'; import { NOT_AVAILABLE_LABEL } from '../../../../../common/i18n'; import { fontSizes, px, truncate, unit } from '../../../../style/variables'; @@ -35,16 +35,6 @@ interface Props { } type ServiceListItem = ValuesType; -function formatNumber(value: number) { - if (value === 0) { - return '0'; - } else if (value <= 0.1) { - return '< 0.1'; - } else { - return asDecimal(value); - } -} - function formatString(value?: string | null) { return value || NOT_AVAILABLE_LABEL; } @@ -154,14 +144,7 @@ export const SERVICE_COLUMNS: Array> = [ ), align: 'left', diff --git a/x-pack/plugins/apm/public/components/app/transaction_overview/TransactionList/index.tsx b/x-pack/plugins/apm/public/components/app/transaction_overview/TransactionList/index.tsx index ece923631a2f..9774538b2a7a 100644 --- a/x-pack/plugins/apm/public/components/app/transaction_overview/TransactionList/index.tsx +++ b/x-pack/plugins/apm/public/components/app/transaction_overview/TransactionList/index.tsx @@ -10,8 +10,8 @@ import React, { useMemo } from 'react'; import styled from 'styled-components'; import { APIReturnType } from '../../../../services/rest/createCallApmApi'; import { - asDecimal, asMillisecondDuration, + asTransactionRate, } from '../../../../../common/utils/formatters'; import { fontFamilyCode, truncate } from '../../../../style/variables'; import { ImpactBar } from '../../../shared/ImpactBar'; @@ -103,13 +103,7 @@ export function TransactionList({ items, isLoading }: Props) { ), sortable: true, dataType: 'number', - render: (value: number) => - `${asDecimal(value)} ${i18n.translate( - 'xpack.apm.transactionsTable.transactionsPerMinuteUnitLabel', - { - defaultMessage: 'tpm', - } - )}`, + render: (value: number) => asTransactionRate(value), }, { field: 'impact', diff --git a/x-pack/plugins/apm/public/components/shared/charts/transaction_charts/index.tsx b/x-pack/plugins/apm/public/components/shared/charts/transaction_charts/index.tsx index 61d834abda79..3af081c11c9b 100644 --- a/x-pack/plugins/apm/public/components/shared/charts/transaction_charts/index.tsx +++ b/x-pack/plugins/apm/public/components/shared/charts/transaction_charts/index.tsx @@ -14,20 +14,17 @@ import { } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import React from 'react'; -import { NOT_AVAILABLE_LABEL } from '../../../../../common/i18n'; import { TRANSACTION_PAGE_LOAD, TRANSACTION_REQUEST, TRANSACTION_ROUTE_CHANGE, } from '../../../../../common/transaction_types'; -import { asDecimal, tpmUnit } from '../../../../../common/utils/formatters'; -import { Coordinate } from '../../../../../typings/timeseries'; +import { asTransactionRate } from '../../../../../common/utils/formatters'; import { ChartsSyncContextProvider } from '../../../../context/charts_sync_context'; import { LicenseContext } from '../../../../context/LicenseContext'; import { IUrlParams } from '../../../../context/UrlParamsContext/types'; import { FETCH_STATUS } from '../../../../hooks/useFetcher'; import { ITransactionChartData } from '../../../../selectors/chart_selectors'; -import { isValidCoordinateValue } from '../../../../utils/isValidCoordinateValue'; import { TransactionBreakdownChart } from '../transaction_breakdown_chart'; import { TimeseriesChart } from '../timeseries_chart'; import { TransactionErrorRateChart } from '../transaction_error_rate_chart/'; @@ -46,14 +43,6 @@ export function TransactionCharts({ urlParams, fetchStatus, }: TransactionChartProps) { - const getTPMFormatter = (t: number) => { - return `${asDecimal(t)} ${tpmUnit(urlParams.transactionType)}`; - }; - - const getTPMTooltipFormatter = (y: Coordinate['y']) => { - return isValidCoordinateValue(y) ? getTPMFormatter(y) : NOT_AVAILABLE_LABEL; - }; - const { transactionType } = urlParams; const { responseTimeSeries, tpmSeries } = charts; @@ -104,7 +93,7 @@ export function TransactionCharts({ fetchStatus={fetchStatus} id="requestPerMinutes" timeseries={tpmSeries || []} - yLabelFormat={getTPMTooltipFormatter} + yLabelFormat={asTransactionRate} /> diff --git a/x-pack/plugins/apm/public/selectors/chart_selectors.test.ts b/x-pack/plugins/apm/public/selectors/chart_selectors.test.ts index 4269ec0e6c0f..a17faebc9aef 100644 --- a/x-pack/plugins/apm/public/selectors/chart_selectors.test.ts +++ b/x-pack/plugins/apm/public/selectors/chart_selectors.test.ts @@ -144,7 +144,7 @@ describe('chart selectors', () => { { color: errorColor, data: [{ x: 0, y: 0 }], - legendValue: '0.0 tpm', + legendValue: '0 tpm', title: 'HTTP 5xx', type: 'linemark', }, diff --git a/x-pack/plugins/apm/public/selectors/chart_selectors.ts b/x-pack/plugins/apm/public/selectors/chart_selectors.ts index 8330df07c21e..663fbc902810 100644 --- a/x-pack/plugins/apm/public/selectors/chart_selectors.ts +++ b/x-pack/plugins/apm/public/selectors/chart_selectors.ts @@ -20,7 +20,7 @@ import { import { IUrlParams } from '../context/UrlParamsContext/types'; import { getEmptySeries } from '../components/shared/charts/helper/get_empty_series'; import { httpStatusCodeToColor } from '../utils/httpStatusCodeToColor'; -import { asDecimal, asDuration, tpmUnit } from '../../common/utils/formatters'; +import { asDuration, asTransactionRate } from '../../common/utils/formatters'; export interface ITpmBucket { title: string; @@ -171,7 +171,7 @@ export function getTpmSeries( return { title: bucket.key, data: bucket.dataPoints, - legendValue: `${asDecimal(bucket.avg)} ${tpmUnit(transactionType || '')}`, + legendValue: asTransactionRate(bucket.avg), type: 'linemark', color: getColor(bucket.key), }; diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index 9a28e0e53bef..ed514eda000a 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -4874,9 +4874,7 @@ "xpack.apm.formatters.microsTimeUnitLabel": "マイクロ秒", "xpack.apm.formatters.millisTimeUnitLabel": "ミリ秒", "xpack.apm.formatters.minutesTimeUnitLabel": "最低", - "xpack.apm.formatters.requestsPerMinLabel": "1分あたりリクエスト数", "xpack.apm.formatters.secondsTimeUnitLabel": "秒", - "xpack.apm.formatters.transactionsPerMinLabel": "1分あたりトランザクション数", "xpack.apm.header.badge.readOnly.text": "読み込み専用", "xpack.apm.header.badge.readOnly.tooltip": "を保存できませんでした", "xpack.apm.helpMenu.upgradeAssistantLink": "アップグレードアシスタント", @@ -5052,7 +5050,6 @@ "xpack.apm.servicesTable.notFoundLabel": "サービスが見つかりません", "xpack.apm.servicesTable.transactionErrorRate": "エラー率%", "xpack.apm.servicesTable.transactionsPerMinuteColumnLabel": "1 分あたりのトランザクション", - "xpack.apm.servicesTable.transactionsPerMinuteUnitLabel": "1分あたりトランザクション数", "xpack.apm.servicesTable.UpgradeAssistantLink": "Kibana アップグレードアシスタントで詳細をご覧ください", "xpack.apm.settings.agentConfig": "エージェントの編集", "xpack.apm.settings.anomaly_detection.legacy_jobs.body": "以前の統合のレガシー機械学習ジョブが見つかりました。これは、APMアプリでは使用されていません。", @@ -5155,7 +5152,6 @@ "xpack.apm.tracesTable.notFoundLabel": "このクエリのトレースが見つかりません", "xpack.apm.tracesTable.originatingServiceColumnLabel": "発生元サービス", "xpack.apm.tracesTable.tracesPerMinuteColumnLabel": "1 分あたりのトレース", - "xpack.apm.tracesTable.tracesPerMinuteUnitLabel": "1分あたりトランザクション数", "xpack.apm.transactionActionMenu.actionsButtonLabel": "アクション", "xpack.apm.transactionActionMenu.container.subtitle": "このコンテナーのログとインデックスを表示し、さらに詳細を確認できます。", "xpack.apm.transactionActionMenu.container.title": "コンテナーの詳細", @@ -5206,9 +5202,7 @@ "xpack.apm.transactionDetails.traceNotFound": "選択されたトレースが見つかりません", "xpack.apm.transactionDetails.traceSampleTitle": "トレースのサンプル", "xpack.apm.transactionDetails.transactionLabel": "トランザクション", - "xpack.apm.transactionDetails.transactionsDurationDistributionChart.requestTypeUnitLongLabel": "{transCount, plural, =0 {# request} 1 {# 件のリクエスト} other {# 件のリクエスト}}", "xpack.apm.transactionDetails.transactionsDurationDistributionChart.transactionTypeUnitLongLabel": "{transCount, plural, =0 {# transaction} 1 {# 件のトランザクション} other {# 件のトランザクション}}", - "xpack.apm.transactionDetails.transactionsDurationDistributionChart.unitShortLabel": "{transCount} {transType, select, request {件のリクエスト} other {件のトランザクション}}", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTitle": "トラザクション時間の分布", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTooltip.samplingDescription": "各バケットはサンプルトランザクションを示します。利用可能なサンプルがない場合、恐らくエージェントの構成で設定されたサンプリング制限が原因です。", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTooltip.samplingLabel": "サンプリング", @@ -5241,7 +5235,6 @@ "xpack.apm.transactionsTable.nameColumnLabel": "名前", "xpack.apm.transactionsTable.notFoundLabel": "トランザクションが見つかりませんでした。", "xpack.apm.transactionsTable.transactionsPerMinuteColumnLabel": "1 分あたりのトランザクション", - "xpack.apm.transactionsTable.transactionsPerMinuteUnitLabel": "1分あたりトランザクション数", "xpack.apm.tutorial.apmServer.title": "APM Server", "xpack.apm.tutorial.elasticCloud.textPre": "APM Server を有効にするには、[the Elastic Cloud console](https://cloud.elastic.co/deployments?q={cloudId}) に移動し、展開設定で APM を有効にします。有効になったら、このページを更新してください。", "xpack.apm.tutorial.elasticCloudInstructions.title": "APM エージェント", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index 66a00c30bd3b..a500b63fbf86 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -4876,9 +4876,7 @@ "xpack.apm.formatters.microsTimeUnitLabel": "μs", "xpack.apm.formatters.millisTimeUnitLabel": "ms", "xpack.apm.formatters.minutesTimeUnitLabel": "分钟", - "xpack.apm.formatters.requestsPerMinLabel": "rpm", "xpack.apm.formatters.secondsTimeUnitLabel": "s", - "xpack.apm.formatters.transactionsPerMinLabel": "tpm", "xpack.apm.header.badge.readOnly.text": "只读", "xpack.apm.header.badge.readOnly.tooltip": "无法保存", "xpack.apm.helpMenu.upgradeAssistantLink": "升级助手", @@ -5056,7 +5054,6 @@ "xpack.apm.servicesTable.notFoundLabel": "未找到任何服务", "xpack.apm.servicesTable.transactionErrorRate": "错误率 %", "xpack.apm.servicesTable.transactionsPerMinuteColumnLabel": "每分钟事务数", - "xpack.apm.servicesTable.transactionsPerMinuteUnitLabel": "tpm", "xpack.apm.servicesTable.UpgradeAssistantLink": "通过访问 Kibana 升级助手来了解详情", "xpack.apm.settings.agentConfig": "代理配置", "xpack.apm.settings.anomaly_detection.legacy_jobs.body": "我们在以前的集成中发现 APM 应用中不再使用的旧版 Machine Learning 作业", @@ -5159,7 +5156,6 @@ "xpack.apm.tracesTable.notFoundLabel": "未找到与此查询的任何追溯信息", "xpack.apm.tracesTable.originatingServiceColumnLabel": "发起服务", "xpack.apm.tracesTable.tracesPerMinuteColumnLabel": "每分钟追溯次数", - "xpack.apm.tracesTable.tracesPerMinuteUnitLabel": "tpm", "xpack.apm.transactionActionMenu.actionsButtonLabel": "操作", "xpack.apm.transactionActionMenu.container.subtitle": "查看此容器的日志和指标以获取进一步详情。", "xpack.apm.transactionActionMenu.container.title": "容器详情", @@ -5210,9 +5206,7 @@ "xpack.apm.transactionDetails.traceNotFound": "找不到所选跟踪", "xpack.apm.transactionDetails.traceSampleTitle": "跟踪样例", "xpack.apm.transactionDetails.transactionLabel": "事务", - "xpack.apm.transactionDetails.transactionsDurationDistributionChart.requestTypeUnitLongLabel": "{transCount, plural, =0 {# 个请求} one {# 个请求} other {# 个请求}}", "xpack.apm.transactionDetails.transactionsDurationDistributionChart.transactionTypeUnitLongLabel": "{transCount, plural, =0 {# 个事务} one {# 个事务} other {# 个事务}}", - "xpack.apm.transactionDetails.transactionsDurationDistributionChart.unitShortLabel": "{transCount} 个{transType, select, request {请求} other {事务}}", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTitle": "事务持续时间分布", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTooltip.samplingDescription": "每个存储桶将显示一个样例事务。如果没有可用的样例,很可能是在代理配置设置了采样限制。", "xpack.apm.transactionDetails.transactionsDurationDistributionChartTooltip.samplingLabel": "采样", @@ -5245,7 +5239,6 @@ "xpack.apm.transactionsTable.nameColumnLabel": "名称", "xpack.apm.transactionsTable.notFoundLabel": "未找到任何事务。", "xpack.apm.transactionsTable.transactionsPerMinuteColumnLabel": "每分钟事务数", - "xpack.apm.transactionsTable.transactionsPerMinuteUnitLabel": "tpm", "xpack.apm.tutorial.apmServer.title": "APM Server", "xpack.apm.tutorial.elasticCloud.textPre": "要启用 APM Server,请前往 [Elastic Cloud 控制台](https://cloud.elastic.co/deployments?q={cloudId}) 并在部署设置中启用 APM。启用后,请刷新此页面。", "xpack.apm.tutorial.elasticCloudInstructions.title": "APM 代理", From 2634009a5b33ca1d6800f66fce0b6cb727f92ec0 Mon Sep 17 00:00:00 2001 From: Dmitry Date: Tue, 24 Nov 2020 20:32:35 +0100 Subject: [PATCH 13/18] [code coverage] collect for oss integration tests (#83907) * [code coverage] collect for oss integration tests * do not run snapshot test modified with coverage * skip failures * remove debug msg * update file names * Update packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts Co-authored-by: Spencer Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Spencer --- .../src/integration_tests/basic_optimization.test.ts | 4 ++++ .../shell_scripts/fix_html_reports_parallel.sh | 4 ++-- test/scripts/jenkins_unit.sh | 12 ++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts b/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts index 46660f0dd958..16baaddcb84b 100644 --- a/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts +++ b/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts @@ -233,6 +233,10 @@ it('uses cache on second run and exist cleanly', async () => { }); it('prepares assets for distribution', async () => { + if (process.env.CODE_COVERAGE) { + // test fails when testing coverage because source includes instrumentation, so skip it + return; + } const config = OptimizerConfig.create({ repoRoot: MOCK_REPO_DIR, pluginScanDirs: [Path.resolve(MOCK_REPO_DIR, 'plugins'), Path.resolve(MOCK_REPO_DIR, 'x-pack')], diff --git a/src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh b/src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh index 098737eb2f80..01003b6dc880 100644 --- a/src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh +++ b/src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh @@ -8,8 +8,8 @@ PWD=$(pwd) du -sh $COMBINED_EXRACT_DIR echo "### Jest: replacing path in json files" -for i in coverage-final xpack-coverage-final; do - sed -i "s|/dev/shm/workspace/kibana|${PWD}|g" $COMBINED_EXRACT_DIR/jest/${i}.json & +for i in oss oss-integration xpack; do + sed -i "s|/dev/shm/workspace/kibana|${PWD}|g" $COMBINED_EXRACT_DIR/jest/${i}-coverage-final.json & done wait diff --git a/test/scripts/jenkins_unit.sh b/test/scripts/jenkins_unit.sh index a9751003e842..1f6a3d440734 100755 --- a/test/scripts/jenkins_unit.sh +++ b/test/scripts/jenkins_unit.sh @@ -2,11 +2,23 @@ source test/scripts/jenkins_test_setup.sh +rename_coverage_file() { + test -f target/kibana-coverage/jest/coverage-final.json \ + && mv target/kibana-coverage/jest/coverage-final.json \ + target/kibana-coverage/jest/$1-coverage-final.json +} + if [[ -z "$CODE_COVERAGE" ]] ; then "$(FORCE_COLOR=0 yarn bin)/grunt" jenkins:unit --dev; else echo " -> Running jest tests with coverage" node scripts/jest --ci --verbose --coverage + rename_coverage_file "oss" + echo "" + echo "" + echo " -> Running jest integration tests with coverage" + node --max-old-space-size=8192 scripts/jest_integration --ci --verbose --coverage || true; + rename_coverage_file "oss-integration" echo "" echo "" echo " -> Running mocha tests with coverage" From e8a4b7e7dd773123618c2bd0d4dde2ace7f67121 Mon Sep 17 00:00:00 2001 From: Tyler Smalley Date: Tue, 24 Nov 2020 11:33:34 -0800 Subject: [PATCH 14/18] [@kbn/utils] Clean target before build (#84253) Signed-off-by: Tyler Smalley --- packages/kbn-utils/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/kbn-utils/package.json b/packages/kbn-utils/package.json index a07be96f0d4d..0859faa7ed0a 100644 --- a/packages/kbn-utils/package.json +++ b/packages/kbn-utils/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "private": true, "scripts": { - "build": "../../node_modules/.bin/tsc", + "build": "rm -rf target && ../../node_modules/.bin/tsc", "kbn:bootstrap": "yarn build", "kbn:watch": "yarn build --watch" }, From 3612e3f98d579c6f5075eb552151890640d9f154 Mon Sep 17 00:00:00 2001 From: Thomas Neirynck Date: Tue, 24 Nov 2020 14:36:57 -0500 Subject: [PATCH 15/18] [Maps] fix code-owners (#84265) --- .github/CODEOWNERS | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 834662044988..a536d1b54551 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -142,9 +142,8 @@ #CC# /src/plugins/maps_oss/ @elastic/kibana-gis #CC# /x-pack/plugins/file_upload @elastic/kibana-gis #CC# /x-pack/plugins/maps_legacy_licensing @elastic/kibana-gis -#CC# /src/plugins/home/server/tutorials @elastic/kibana-gis -#CC# /src/plugins/tile_map/ @elastic/kibana-gis -#CC# /src/plugins/region_map/ @elastic/kibana-gis +/src/plugins/tile_map/ @elastic/kibana-gis +/src/plugins/region_map/ @elastic/kibana-gis # Operations /src/dev/ @elastic/kibana-operations From cc35065f5ab027684444f3768f92d5a335e18cfd Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 24 Nov 2020 21:22:58 +0100 Subject: [PATCH 16/18] Update example docs with correct version of Boom (#84271) --- .../developer/plugin/migrating-legacy-plugins-examples.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/developer/plugin/migrating-legacy-plugins-examples.asciidoc b/docs/developer/plugin/migrating-legacy-plugins-examples.asciidoc index abf51bb3378b..469f7a4f3adb 100644 --- a/docs/developer/plugin/migrating-legacy-plugins-examples.asciidoc +++ b/docs/developer/plugin/migrating-legacy-plugins-examples.asciidoc @@ -242,7 +242,7 @@ migration is complete: ---- import { schema } from '@kbn/config-schema'; import { CoreSetup } from 'kibana/server'; -import Boom from 'boom'; +import Boom from '@hapi/boom'; export class DemoPlugin { public setup(core: CoreSetup) { From 13808e019e6ebcf62b392824885b743371120bb1 Mon Sep 17 00:00:00 2001 From: Brandon Kobel Date: Tue, 24 Nov 2020 12:28:15 -0800 Subject: [PATCH 17/18] Deprecate `kibana.index` setting (#83988) * Deprecating `kibana.index` setting * Using ela.st service so this can be changed to the blog in the future * Adding unit tests * Revising deprecation log message * Changing the deprecation log message to be more consistent with others * Updating kibana.index docs also * Using rename deprecation as the "standard" for the deprecation messages * /s/'/` --- docs/setup/settings.asciidoc | 6 ++- src/core/server/kibana_config.test.ts | 66 +++++++++++++++++++++++++++ src/core/server/kibana_config.ts | 14 ++++++ 3 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 src/core/server/kibana_config.test.ts diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index efc7a1b93093..c22d4466ee09 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -214,10 +214,12 @@ Please use the `defaultRoute` advanced setting instead. The default application to load. *Default: `"home"`* |[[kibana-index]] `kibana.index:` - | {kib} uses an index in {es} to store saved searches, visualizations, and + | *deprecated* This setting is deprecated and will be removed in 8.0. Multitenancy by changing +`kibana.index` will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes] +for more details. {kib} uses an index in {es} to store saved searches, visualizations, and dashboards. {kib} creates a new index if the index doesn’t already exist. If you configure a custom index, the name must be lowercase, and conform to the -{es} {ref}/indices-create-index.html[index name limitations]. +{es} {ref}/indices-create-index.html[index name limitations]. *Default: `".kibana"`* | `kibana.autocompleteTimeout:` {ess-icon} diff --git a/src/core/server/kibana_config.test.ts b/src/core/server/kibana_config.test.ts new file mode 100644 index 000000000000..804c02ae99e4 --- /dev/null +++ b/src/core/server/kibana_config.test.ts @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { config } from './kibana_config'; +import { applyDeprecations, configDeprecationFactory } from '@kbn/config'; + +const CONFIG_PATH = 'kibana'; + +const applyKibanaDeprecations = (settings: Record = {}) => { + const deprecations = config.deprecations!(configDeprecationFactory); + const deprecationMessages: string[] = []; + const _config: any = {}; + _config[CONFIG_PATH] = settings; + const migrated = applyDeprecations( + _config, + deprecations.map((deprecation) => ({ + deprecation, + path: CONFIG_PATH, + })), + (msg) => deprecationMessages.push(msg) + ); + return { + messages: deprecationMessages, + migrated, + }; +}; + +it('set correct defaults ', () => { + const configValue = config.schema.validate({}); + expect(configValue).toMatchInlineSnapshot(` + Object { + "autocompleteTerminateAfter": "PT1M40S", + "autocompleteTimeout": "PT1S", + "enabled": true, + "index": ".kibana", + } + `); +}); + +describe('deprecations', () => { + ['.foo', '.kibana'].forEach((index) => { + it('logs a warning if index is set', () => { + const { messages } = applyKibanaDeprecations({ index }); + expect(messages).toMatchInlineSnapshot(` + Array [ + "\\"kibana.index\\" is deprecated. Multitenancy by changing \\"kibana.index\\" will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy for more details", + ] + `); + }); + }); +}); diff --git a/src/core/server/kibana_config.ts b/src/core/server/kibana_config.ts index 17f77a6e9328..ae6897b6a6ad 100644 --- a/src/core/server/kibana_config.ts +++ b/src/core/server/kibana_config.ts @@ -18,9 +18,22 @@ */ import { schema, TypeOf } from '@kbn/config-schema'; +import { ConfigDeprecationProvider } from '@kbn/config'; export type KibanaConfigType = TypeOf; +const deprecations: ConfigDeprecationProvider = () => [ + (settings, fromPath, log) => { + const kibana = settings[fromPath]; + if (kibana?.index) { + log( + `"kibana.index" is deprecated. Multitenancy by changing "kibana.index" will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy for more details` + ); + } + return settings; + }, +]; + export const config = { path: 'kibana', schema: schema.object({ @@ -29,4 +42,5 @@ export const config = { autocompleteTerminateAfter: schema.duration({ defaultValue: 100000 }), autocompleteTimeout: schema.duration({ defaultValue: 1000 }), }), + deprecations, }; From 115916956d875bb56e9b57961b0bbb4cf090e3e9 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 24 Nov 2020 22:15:35 +0100 Subject: [PATCH 18/18] Use correct version of Podium (#84270) --- package.json | 1 - .../src/legacy_logging_server.ts | 2 +- yarn.lock | 17 ----------------- 3 files changed, 1 insertion(+), 19 deletions(-) diff --git a/package.json b/package.json index af80102641db..571dc7302f92 100644 --- a/package.json +++ b/package.json @@ -261,7 +261,6 @@ "pdfmake": "^0.1.65", "pegjs": "0.10.0", "pngjs": "^3.4.0", - "podium": "^3.1.2", "prop-types": "^15.7.2", "proper-lockfile": "^3.2.0", "proxy-from-env": "1.0.0", diff --git a/packages/kbn-legacy-logging/src/legacy_logging_server.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.ts index 45e4bda0b007..1b13eda44fff 100644 --- a/packages/kbn-legacy-logging/src/legacy_logging_server.ts +++ b/packages/kbn-legacy-logging/src/legacy_logging_server.ts @@ -18,7 +18,7 @@ */ import { ServerExtType, Server } from '@hapi/hapi'; -import Podium from 'podium'; +import Podium from '@hapi/podium'; import { setupLogging } from './setup_logging'; import { attachMetaData } from './metadata'; import { legacyLoggingConfigSchema } from './schema'; diff --git a/yarn.lock b/yarn.lock index 8d47d3e84378..dc171a44dca1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -17975,15 +17975,6 @@ joi@13.x.x, joi@^13.5.2: isemail "3.x.x" topo "3.x.x" -joi@14.x.x: - version "14.3.1" - resolved "https://registry.yarnpkg.com/joi/-/joi-14.3.1.tgz#164a262ec0b855466e0c35eea2a885ae8b6c703c" - integrity sha512-LQDdM+pkOrpAn4Lp+neNIFV3axv1Vna3j38bisbQhETPMANYRbFJFUyOZcOClYvM/hppMhGWuKSFEK9vjrB+bQ== - dependencies: - hoek "6.x.x" - isemail "3.x.x" - topo "3.x.x" - joi@^17.1.1: version "17.2.1" resolved "https://registry.yarnpkg.com/joi/-/joi-17.2.1.tgz#e5140fdf07e8fecf9bc977c2832d1bdb1e3f2a0a" @@ -22237,14 +22228,6 @@ pnp-webpack-plugin@1.6.4: dependencies: ts-pnp "^1.1.6" -podium@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/podium/-/podium-3.2.0.tgz#2a7c579ddd5408f412d014c9ffac080c41d83477" - integrity sha512-rbwvxwVkI6gRRlxZQ1zUeafrpGxZ7QPHIheinehAvGATvGIPfWRkaTeWedc5P4YjXJXEV8ZbBxPtglNylF9hjw== - dependencies: - hoek "6.x.x" - joi "14.x.x" - polished@^1.9.2: version "1.9.2" resolved "https://registry.yarnpkg.com/polished/-/polished-1.9.2.tgz#d705cac66f3a3ed1bd38aad863e2c1e269baf6b6"