From 946e9f09140ac7b5683eb3c85e78c2a39851212f Mon Sep 17 00:00:00 2001 From: Thomas Neirynck Date: Wed, 2 Sep 2020 08:56:23 -0400 Subject: [PATCH 01/21] [Maps] Remove obsolete link (#76419) --- .../routing/routes/list/maps_list_view.js | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/x-pack/plugins/maps/public/routing/routes/list/maps_list_view.js b/x-pack/plugins/maps/public/routing/routes/list/maps_list_view.js index e9229883d708d..8fe6866cd7834 100644 --- a/x-pack/plugins/maps/public/routing/routes/list/maps_list_view.js +++ b/x-pack/plugins/maps/public/routing/routes/list/maps_list_view.js @@ -12,6 +12,7 @@ import { getUiSettings, getToasts, getCoreChrome, + getNavigateToApp, } from '../../../kibana_services'; import { EuiTitle, @@ -32,11 +33,18 @@ import { import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; import { addHelpMenuToAppChrome } from '../../../help_menu_util'; -import { Link } from 'react-router-dom'; import { goToSpecifiedPath } from '../../maps_router'; +import { APP_ID, MAP_PATH } from '../../../../common/constants'; export const EMPTY_FILTER = ''; +function navigateToNewMap() { + const navigateToApp = getNavigateToApp(); + navigateToApp(APP_ID, { + path: MAP_PATH, + }); +} + export class MapsListView extends React.Component { state = { hasInitialFetchReturned: false, @@ -388,14 +396,12 @@ export class MapsListView extends React.Component { let createButton; if (!this.state.readOnly) { createButton = ( - - - - - + + + ); } return ( From 4762cf56f54b5f6fba17dbe4cdc8b18607b2dc61 Mon Sep 17 00:00:00 2001 From: James Gowdy Date: Wed, 2 Sep 2020 14:07:57 +0100 Subject: [PATCH 02/21] [ML] Switching to new es client (#74965) * [ML] Switching to new es client * further conversions * fixing tests * updating responses * test commit * refactoring shared services to removed context parameter * removing last scoped clients * removing ml js client * udating file data viz errors * fixing jest tests * fixing types after merge with master * error response changes * adding default sizes to some requests * adding custom error types for license checks * tidying up shared function checks * removing test data * removing need for DummyKibanaRequest * updating comment * fixing functional api tests * removing comments * fixing types after master merge * throw error rather than return it * removing placeholder error * changes based on review comments * fixing types after merge with master * fixing missing return Co-authored-by: Elastic Machine --- ...transaction_duration_anomaly_alert_type.ts | 7 +- .../apm/server/lib/helpers/setup_request.ts | 11 +- x-pack/plugins/infra/server/plugin.ts | 5 +- .../components/import_errors/errors.tsx | 18 +- .../common/chart_loader/chart_loader.ts | 8 +- .../ml/server/client/elasticsearch_ml.test.ts | 60 -- .../ml/server/client/elasticsearch_ml.ts | 929 ------------------ .../capabilities/check_capabilities.test.ts | 30 +- .../lib/capabilities/check_capabilities.ts | 6 +- .../ml/server/lib/capabilities/upgrade.ts | 8 +- .../ml/server/lib/check_annotations/index.ts | 12 +- .../server/lib/license/ml_server_license.ts | 3 - .../annotation_service/annotation.test.ts | 43 +- .../models/annotation_service/annotation.ts | 24 +- .../server/models/annotation_service/index.ts | 6 +- .../bucket_span_estimator.d.ts | 8 +- .../bucket_span_estimator.js | 90 +- .../bucket_span_estimator.test.ts | 17 +- .../polled_data_checker.js | 11 +- .../single_series_checker.js | 11 +- .../calculate_model_memory_limit.ts | 23 +- .../models/calendar/calendar_manager.ts | 39 +- .../server/models/calendar/event_manager.ts | 29 +- .../analytics_audit_messages.ts | 38 +- .../data_recognizer/data_recognizer.test.ts | 11 +- .../models/data_recognizer/data_recognizer.ts | 105 +- .../models/data_visualizer/data_visualizer.ts | 100 +- .../models/fields_service/fields_service.ts | 46 +- .../file_data_visualizer.ts | 12 +- .../file_data_visualizer/import_data.ts | 19 +- .../ml/server/models/filter/filter_manager.ts | 47 +- .../job_audit_messages.d.ts | 4 +- .../job_audit_messages/job_audit_messages.js | 248 +++-- .../ml/server/models/job_service/datafeeds.ts | 41 +- .../server/models/job_service/error_utils.ts | 6 +- .../ml/server/models/job_service/groups.ts | 19 +- .../ml/server/models/job_service/index.ts | 18 +- .../ml/server/models/job_service/jobs.ts | 135 ++- .../models/job_service/model_snapshots.ts | 34 +- .../new_job/categorization/examples.ts | 16 +- .../new_job/categorization/top_categories.ts | 19 +- .../categorization/validation_results.ts | 4 +- .../models/job_service/new_job/charts.ts | 8 +- .../models/job_service/new_job/line_chart.ts | 8 +- .../job_service/new_job/population_chart.ts | 18 +- .../job_service/new_job_caps/field_service.ts | 15 +- .../new_job_caps/new_job_caps.test.ts | 31 +- .../job_service/new_job_caps/new_job_caps.ts | 11 +- .../models/job_service/new_job_caps/rollup.ts | 8 +- .../job_validation/job_validation.test.ts | 47 +- .../models/job_validation/job_validation.ts | 18 +- .../job_validation/validate_bucket_span.js | 4 +- .../validate_bucket_span.test.ts | 6 +- .../validate_cardinality.test.ts | 28 +- .../job_validation/validate_cardinality.ts | 17 +- .../validate_model_memory_limit.test.ts | 44 +- .../validate_model_memory_limit.ts | 14 +- .../validate_time_range.test.ts | 19 +- .../job_validation/validate_time_range.ts | 11 +- .../get_partition_fields_values.ts | 12 +- .../models/results_service/results_service.ts | 48 +- x-pack/plugins/ml/server/plugin.ts | 38 +- .../plugins/ml/server/routes/annotations.ts | 16 +- .../ml/server/routes/anomaly_detectors.ts | 159 ++- x-pack/plugins/ml/server/routes/calendars.ts | 52 +- .../ml/server/routes/data_frame_analytics.ts | 133 +-- .../ml/server/routes/data_visualizer.ts | 26 +- x-pack/plugins/ml/server/routes/datafeeds.ts | 99 +- .../ml/server/routes/fields_service.ts | 18 +- .../ml/server/routes/file_data_visualizer.ts | 22 +- x-pack/plugins/ml/server/routes/filters.ts | 54 +- x-pack/plugins/ml/server/routes/indices.ts | 6 +- .../ml/server/routes/job_audit_messages.ts | 8 +- .../plugins/ml/server/routes/job_service.ts | 80 +- .../ml/server/routes/job_validation.ts | 22 +- x-pack/plugins/ml/server/routes/modules.ts | 38 +- .../ml/server/routes/notification_settings.ts | 13 +- .../ml/server/routes/results_service.ts | 66 +- x-pack/plugins/ml/server/routes/system.ts | 64 +- .../ml/server/shared_services/errors.ts | 12 + .../shared_services/license_checks/errors.ts | 21 + .../shared_services/license_checks/index.ts | 8 + .../{ => license_checks}/license_checks.ts | 7 +- .../providers/anomaly_detectors.ts | 35 +- .../shared_services/providers/job_service.ts | 30 +- .../shared_services/providers/modules.ts | 103 +- .../providers/results_service.ts | 31 +- .../shared_services/providers/system.ts | 95 +- .../server/shared_services/shared_services.ts | 105 +- .../signals/find_ml_signals.ts | 6 +- .../signals/signal_rule_alert_type.ts | 9 +- .../server/lib/machine_learning/authz.ts | 3 +- .../server/lib/machine_learning/index.ts | 5 +- .../usage/detections/detections_helpers.ts | 14 +- .../server/routes/api/field_histograms.ts | 2 +- .../server/lib/alerts/duration_anomaly.ts | 23 +- .../apis/ml/data_frame_analytics/update.ts | 2 +- .../data_visualizer/get_field_histograms.ts | 3 +- .../ml/data_visualizer/get_field_stats.ts | 3 +- .../ml/data_visualizer/get_overall_stats.ts | 3 +- .../ml/fields_service/field_cardinality.ts | 3 +- .../ml/fields_service/time_field_range.ts | 3 +- .../apis/ml/filters/create_filters.ts | 2 +- .../apis/ml/filters/get_filters.ts | 2 +- .../apis/ml/filters/update_filters.ts | 2 +- .../apis/ml/jobs/close_jobs.ts | 8 +- 106 files changed, 1573 insertions(+), 2568 deletions(-) delete mode 100644 x-pack/plugins/ml/server/client/elasticsearch_ml.test.ts delete mode 100644 x-pack/plugins/ml/server/client/elasticsearch_ml.ts create mode 100644 x-pack/plugins/ml/server/shared_services/errors.ts create mode 100644 x-pack/plugins/ml/server/shared_services/license_checks/errors.ts create mode 100644 x-pack/plugins/ml/server/shared_services/license_checks/index.ts rename x-pack/plugins/ml/server/shared_services/{ => license_checks}/license_checks.ts (66%) diff --git a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts index 3abc89c470b21..e7eb7b8de65e3 100644 --- a/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts +++ b/x-pack/plugins/apm/server/lib/alerts/register_transaction_duration_anomaly_alert_type.ts @@ -73,10 +73,9 @@ export function registerTransactionDurationAnomalyAlertType({ return; } const alertParams = params as TypeOf; - const mlClient = services.getLegacyScopedClusterClient(ml.mlClient); - const request = { params: 'DummyKibanaRequest' } as KibanaRequest; - const { mlAnomalySearch } = ml.mlSystemProvider(mlClient, request); - const anomalyDetectors = ml.anomalyDetectorsProvider(mlClient, request); + const request = {} as KibanaRequest; + const { mlAnomalySearch } = ml.mlSystemProvider(request); + const anomalyDetectors = ml.anomalyDetectorsProvider(request); const mlJobIds = await getMLJobIds( anomalyDetectors, diff --git a/x-pack/plugins/apm/server/lib/helpers/setup_request.ts b/x-pack/plugins/apm/server/lib/helpers/setup_request.ts index a242a0adb6d4c..6b69e57389dff 100644 --- a/x-pack/plugins/apm/server/lib/helpers/setup_request.ts +++ b/x-pack/plugins/apm/server/lib/helpers/setup_request.ts @@ -122,13 +122,10 @@ function getMlSetup( if (!ml) { return; } - const mlClient = ml.mlClient.asScoped(request); - const mlSystem = ml.mlSystemProvider(mlClient, request); + return { - mlClient, - mlSystem, - modules: ml.modulesProvider(mlClient, request, savedObjectsClient), - anomalyDetectors: ml.anomalyDetectorsProvider(mlClient, request), - mlAnomalySearch: mlSystem.mlAnomalySearch, + mlSystem: ml.mlSystemProvider(request), + anomalyDetectors: ml.anomalyDetectorsProvider(request), + modules: ml.modulesProvider(request, savedObjectsClient), }; } diff --git a/x-pack/plugins/infra/server/plugin.ts b/x-pack/plugins/infra/server/plugin.ts index 7cd6383a9b2e5..51f91d7189db7 100644 --- a/x-pack/plugins/infra/server/plugin.ts +++ b/x-pack/plugins/infra/server/plugin.ts @@ -152,9 +152,8 @@ export class InfraServerPlugin { core.http.registerRouteHandlerContext( 'infra', (context, request): InfraRequestHandlerContext => { - const mlSystem = context.ml && plugins.ml?.mlSystemProvider(context.ml?.mlClient, request); - const mlAnomalyDetectors = - context.ml && plugins.ml?.anomalyDetectorsProvider(context.ml?.mlClient, request); + const mlSystem = plugins.ml?.mlSystemProvider(request); + const mlAnomalyDetectors = plugins.ml?.anomalyDetectorsProvider(request); const spaceId = plugins.spaces?.spacesService.getSpaceId(request) || 'default'; return { diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_errors/errors.tsx b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_errors/errors.tsx index f723ad1a752bf..bcbba67e9cc60 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_errors/errors.tsx +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_errors/errors.tsx @@ -93,11 +93,11 @@ function title(statuses: Statuses) { } } -function ImportError(error: any, key: number) { +const ImportError: FC<{ error: any }> = ({ error }) => { const errorObj = toString(error); return ( - -

{errorObj.msg}

+ <> +

{errorObj.msg}

{errorObj.more !== undefined && ( )} -
+ ); -} +}; function toString(error: any): ImportError { if (typeof error === 'string') { @@ -127,11 +127,11 @@ function toString(error: any): ImportError { return { msg: error.msg }; } else if (error.error !== undefined) { if (typeof error.error === 'object') { - if (error.error.msg !== undefined) { + if (error.error.reason !== undefined) { // this will catch a bulk ingest failure - const errorObj: ImportError = { msg: error.error.msg }; - if (error.error.body !== undefined) { - errorObj.more = error.error.response; + const errorObj: ImportError = { msg: error.error.reason }; + if (error.error.root_cause !== undefined) { + errorObj.more = JSON.stringify(error.error.root_cause); } return errorObj; } diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/chart_loader/chart_loader.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/chart_loader/chart_loader.ts index 328cd1a5ef8d7..d61e75fd21b5a 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/chart_loader/chart_loader.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/chart_loader/chart_loader.ts @@ -71,9 +71,7 @@ export class ChartLoader { splitFieldName, splitFieldValue ); - if (resp.error !== undefined) { - throw resp.error; - } + return resp.results; } return {}; @@ -105,9 +103,7 @@ export class ChartLoader { aggFieldPairNames, splitFieldName ); - if (resp.error !== undefined) { - throw resp.error; - } + return resp.results; } return {}; diff --git a/x-pack/plugins/ml/server/client/elasticsearch_ml.test.ts b/x-pack/plugins/ml/server/client/elasticsearch_ml.test.ts deleted file mode 100644 index 5ad0db3c58ce4..0000000000000 --- a/x-pack/plugins/ml/server/client/elasticsearch_ml.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { elasticsearchJsPlugin } from './elasticsearch_ml'; - -interface Endpoint { - fmt: string; -} - -interface ClientAction { - urls?: Endpoint[]; - url: Endpoint; -} - -describe('ML - Endpoints', () => { - // Check all paths in the ML elasticsearchJsPlugin start with a leading forward slash - // so they work if Kibana is run behind a reverse proxy - const PATH_START: string = '/'; - const urls: string[] = []; - - // Stub objects - const Client = { - prototype: {}, - }; - - const components = { - clientAction: { - factory(obj: ClientAction) { - // add each endpoint URL to a list - if (obj.urls) { - obj.urls.forEach((url) => { - urls.push(url.fmt); - }); - } - if (obj.url) { - urls.push(obj.url.fmt); - } - }, - namespaceFactory() { - return { - prototype: {}, - }; - }, - }, - }; - - // Stub elasticsearchJsPlugin - elasticsearchJsPlugin(Client, null, components); - - describe('paths', () => { - it(`should start with ${PATH_START}`, () => { - urls.forEach((url) => { - expect(url[0]).toEqual(PATH_START); - }); - }); - }); -}); diff --git a/x-pack/plugins/ml/server/client/elasticsearch_ml.ts b/x-pack/plugins/ml/server/client/elasticsearch_ml.ts deleted file mode 100644 index 63153d18cb10b..0000000000000 --- a/x-pack/plugins/ml/server/client/elasticsearch_ml.ts +++ /dev/null @@ -1,929 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -export const elasticsearchJsPlugin = (Client: any, config: any, components: any) => { - const ca = components.clientAction.factory; - - Client.prototype.ml = components.clientAction.namespaceFactory(); - const ml = Client.prototype.ml.prototype; - - /** - * Perform a [ml.authenticate](Retrieve details about the currently authenticated user) request - * - * @param {Object} params - An object with parameters used to carry out this action - */ - ml.jobs = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>', - req: { - jobId: { - type: 'list', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/', - }, - ], - method: 'GET', - }); - - ml.jobStats = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_stats', - req: { - jobId: { - type: 'list', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/_stats', - }, - ], - method: 'GET', - }); - - ml.addJob = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'PUT', - }); - - ml.openJob = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_open', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.closeJob = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_close?force=<%=force%>', - req: { - jobId: { - type: 'string', - }, - force: { - type: 'boolean', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_close', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - // Currently the endpoint uses a default size of 100 unless a size is supplied. - // So until paging is supported in the UI, explicitly supply a size of 1000 - // to match the max number of docs that the endpoint can return. - ml.getDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/data_frame/analytics/_all?size=1000', - }, - ], - method: 'GET', - }); - - ml.getDataFrameAnalyticsStats = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_stats', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - { - // Currently the endpoint uses a default size of 100 unless a size is supplied. - // So until paging is supported in the UI, explicitly supply a size of 1000 - // to match the max number of docs that the endpoint can return. - fmt: '/_ml/data_frame/analytics/_all/_stats?size=1000', - }, - ], - method: 'GET', - }); - - ml.createDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'PUT', - }); - - ml.evaluateDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/_evaluate', - }, - ], - needBody: true, - method: 'POST', - }); - - ml.explainDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/_explain', - }, - ], - needBody: true, - method: 'POST', - }); - - ml.deleteDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - ], - method: 'DELETE', - }); - - ml.startDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_start', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.stopDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_stop?&force=<%=force%>', - req: { - analyticsId: { - type: 'string', - }, - force: { - type: 'boolean', - }, - }, - }, - ], - method: 'POST', - }); - - ml.updateDataFrameAnalytics = ca({ - urls: [ - { - fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_update', - req: { - analyticsId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'POST', - }); - - ml.deleteJob = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>?&force=<%=force%>&wait_for_completion=false', - req: { - jobId: { - type: 'string', - }, - force: { - type: 'boolean', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>?&wait_for_completion=false', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'DELETE', - }); - - ml.updateJob = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_update', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'POST', - }); - - ml.datafeeds = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>', - req: { - datafeedId: { - type: 'list', - }, - }, - }, - { - fmt: '/_ml/datafeeds/', - }, - ], - method: 'GET', - }); - - ml.datafeedStats = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_stats', - req: { - datafeedId: { - type: 'list', - }, - }, - }, - { - fmt: '/_ml/datafeeds/_stats', - }, - ], - method: 'GET', - }); - - ml.addDatafeed = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'PUT', - }); - - ml.updateDatafeed = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_update', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'POST', - }); - - ml.deleteDatafeed = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>?force=<%=force%>', - req: { - datafeedId: { - type: 'string', - }, - force: { - type: 'boolean', - }, - }, - }, - { - fmt: '/_ml/datafeeds/<%=datafeedId%>', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - ], - method: 'DELETE', - }); - - ml.startDatafeed = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_start?&start=<%=start%>&end=<%=end%>', - req: { - datafeedId: { - type: 'string', - }, - start: { - type: 'string', - }, - end: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_start?&start=<%=start%>', - req: { - datafeedId: { - type: 'string', - }, - start: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_start', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.stopDatafeed = ca({ - urls: [ - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_stop?force=<%=force%>', - req: { - datafeedId: { - type: 'string', - }, - force: { - type: 'boolean', - }, - }, - }, - { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_stop', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.validateDetector = ca({ - url: { - fmt: '/_ml/anomaly_detectors/_validate/detector', - }, - needBody: true, - method: 'POST', - }); - - ml.estimateModelMemory = ca({ - url: { - fmt: '/_ml/anomaly_detectors/_estimate_model_memory', - }, - needBody: true, - method: 'POST', - }); - - ml.datafeedPreview = ca({ - url: { - fmt: '/_ml/datafeeds/<%=datafeedId%>/_preview', - req: { - datafeedId: { - type: 'string', - }, - }, - }, - method: 'GET', - }); - - ml.forecast = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_forecast?&duration=<%=duration%>', - req: { - jobId: { - type: 'string', - }, - duration: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/_forecast', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.records = ca({ - url: { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/records', - req: { - jobId: { - type: 'string', - }, - }, - }, - method: 'POST', - }); - - ml.buckets = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/buckets', - req: { - jobId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/buckets/<%=timestamp%>', - req: { - jobId: { - type: 'string', - }, - timestamp: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); - - ml.overallBuckets = ca({ - url: { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/overall_buckets', - req: { - jobId: { - type: 'string', - }, - }, - }, - method: 'POST', - }); - - ml.privilegeCheck = ca({ - url: { - fmt: '/_security/user/_has_privileges', - }, - needBody: true, - method: 'POST', - }); - // Currently the endpoint uses a default size of 100 unless a size is supplied. So until paging is supported in the UI, explicitly supply a size of 1000 - ml.calendars = ca({ - urls: [ - { - fmt: '/_ml/calendars/<%=calendarId%>', - req: { - calendarId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/calendars?size=1000', - }, - ], - method: 'GET', - }); - - ml.deleteCalendar = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>', - req: { - calendarId: { - type: 'string', - }, - }, - }, - method: 'DELETE', - }); - - ml.addCalendar = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>', - req: { - calendarId: { - type: 'string', - }, - }, - }, - needBody: true, - method: 'PUT', - }); - - ml.addJobToCalendar = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>/jobs/<%=jobId%>', - req: { - calendarId: { - type: 'string', - }, - jobId: { - type: 'string', - }, - }, - }, - method: 'PUT', - }); - - ml.removeJobFromCalendar = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>/jobs/<%=jobId%>', - req: { - calendarId: { - type: 'string', - }, - jobId: { - type: 'string', - }, - }, - }, - method: 'DELETE', - }); - - ml.events = ca({ - urls: [ - { - fmt: '/_ml/calendars/<%=calendarId%>/events', - req: { - calendarId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/calendars/<%=calendarId%>/events?&job_id=<%=jobId%>', - req: { - calendarId: { - type: 'string', - }, - jobId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/calendars/<%=calendarId%>/events?&after=<%=start%>&before=<%=end%>', - req: { - calendarId: { - type: 'string', - }, - start: { - type: 'string', - }, - end: { - type: 'string', - }, - }, - }, - { - fmt: - '/_ml/calendars/<%=calendarId%>/events?&after=<%=start%>&before=<%=end%>&job_id=<%=jobId%>', - req: { - calendarId: { - type: 'string', - }, - start: { - type: 'string', - }, - end: { - type: 'string', - }, - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'GET', - }); - - ml.addEvent = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>/events', - req: { - calendarId: { - type: 'string', - }, - }, - }, - needBody: true, - method: 'POST', - }); - - ml.deleteEvent = ca({ - url: { - fmt: '/_ml/calendars/<%=calendarId%>/events/<%=eventId%>', - req: { - calendarId: { - type: 'string', - }, - eventId: { - type: 'string', - }, - }, - }, - method: 'DELETE', - }); - // Currently the endpoint uses a default size of 100 unless a size is supplied. So until paging is supported in the UI, explicitly supply a size of 1000 - ml.filters = ca({ - urls: [ - { - fmt: '/_ml/filters/<%=filterId%>', - req: { - filterId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/filters?size=1000', - }, - ], - method: 'GET', - }); - - ml.addFilter = ca({ - url: { - fmt: '/_ml/filters/<%=filterId%>', - req: { - filterId: { - type: 'string', - }, - }, - }, - needBody: true, - method: 'PUT', - }); - - ml.updateFilter = ca({ - urls: [ - { - fmt: '/_ml/filters/<%=filterId%>/_update', - req: { - filterId: { - type: 'string', - }, - }, - }, - ], - needBody: true, - method: 'POST', - }); - - ml.deleteFilter = ca({ - url: { - fmt: '/_ml/filters/<%=filterId%>', - req: { - filterId: { - type: 'string', - }, - }, - }, - method: 'DELETE', - }); - - ml.info = ca({ - url: { - fmt: '/_ml/info', - }, - method: 'GET', - }); - - ml.fileStructure = ca({ - urls: [ - { - fmt: - '/_ml/find_file_structure?&explain=true&charset=<%=charset%>&format=<%=format%>&has_header_row=<%=has_header_row%>&column_names=<%=column_names%>&delimiter=<%=delimiter%>"e=<%=quote%>&should_trim_fields=<%=should_trim_fields%>&grok_pattern=<%=grok_pattern%>×tamp_field=<%=timestamp_field%>×tamp_format=<%=timestamp_format%>&lines_to_sample=<%=lines_to_sample%>', - req: { - charset: { - type: 'string', - }, - format: { - type: 'string', - }, - has_header_row: { - type: 'string', - }, - column_names: { - type: 'string', - }, - delimiter: { - type: 'string', - }, - quote: { - type: 'string', - }, - should_trim_fields: { - type: 'string', - }, - grok_pattern: { - type: 'string', - }, - timestamp_field: { - type: 'string', - }, - timestamp_format: { - type: 'string', - }, - lines_to_sample: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/find_file_structure?&explain=true', - }, - ], - needBody: true, - method: 'POST', - }); - - ml.rollupIndexCapabilities = ca({ - urls: [ - { - fmt: '/<%=indexPattern%>/_rollup/data', - req: { - indexPattern: { - type: 'string', - }, - }, - }, - ], - method: 'GET', - }); - - ml.categories = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/categories/<%=categoryId%>', - req: { - jobId: { - type: 'string', - }, - categoryId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/categories', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'GET', - }); - - ml.modelSnapshots = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>', - req: { - jobId: { - type: 'string', - }, - snapshotId: { - type: 'string', - }, - }, - }, - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots', - req: { - jobId: { - type: 'string', - }, - }, - }, - ], - method: 'GET', - }); - - ml.updateModelSnapshot = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>/_update', - req: { - jobId: { - type: 'string', - }, - snapshotId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - needBody: true, - }); - - ml.deleteModelSnapshot = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>', - req: { - jobId: { - type: 'string', - }, - snapshotId: { - type: 'string', - }, - }, - }, - ], - method: 'DELETE', - }); - - ml.revertModelSnapshot = ca({ - urls: [ - { - fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>/_revert', - req: { - jobId: { - type: 'string', - }, - snapshotId: { - type: 'string', - }, - }, - }, - ], - method: 'POST', - }); -}; diff --git a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts index 21d32813c0d51..4dd17f8cf4889 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { getAdminCapabilities, getUserCapabilities } from './__mocks__/ml_capabilities'; import { capabilitiesProvider } from './check_capabilities'; import { MlLicense } from '../../../common/license'; @@ -24,16 +24,28 @@ const mlIsEnabled = async () => true; const mlIsNotEnabled = async () => false; const mlClusterClientNonUpgrade = ({ - callAsInternalUser: async () => ({ - upgrade_mode: false, - }), -} as unknown) as ILegacyScopedClusterClient; + asInternalUser: { + ml: { + info: async () => ({ + body: { + upgrade_mode: false, + }, + }), + }, + }, +} as unknown) as IScopedClusterClient; const mlClusterClientUpgrade = ({ - callAsInternalUser: async () => ({ - upgrade_mode: true, - }), -} as unknown) as ILegacyScopedClusterClient; + asInternalUser: { + ml: { + info: async () => ({ + body: { + upgrade_mode: true, + }, + }), + }, + }, +} as unknown) as IScopedClusterClient; describe('check_capabilities', () => { describe('getCapabilities() - right number of capabilities', () => { diff --git a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts index c976ab598b28c..c591ec07c7c3b 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; +import { IScopedClusterClient, KibanaRequest } from 'kibana/server'; import { mlLog } from '../../client/log'; import { MlCapabilities, @@ -22,12 +22,12 @@ import { } from './errors'; export function capabilitiesProvider( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, capabilities: MlCapabilities, mlLicense: MlLicense, isMlEnabledInSpace: () => Promise ) { - const { isUpgradeInProgress } = upgradeCheckProvider(mlClusterClient); + const { isUpgradeInProgress } = upgradeCheckProvider(client); async function getCapabilities(): Promise { const upgradeInProgress = await isUpgradeInProgress(); const isPlatinumOrTrialLicense = mlLicense.isFullLicense(); diff --git a/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts b/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts index 6df4d0c87ecf5..defb70429fa0c 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts @@ -4,17 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { mlLog } from '../../client/log'; -export function upgradeCheckProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { +export function upgradeCheckProvider({ asInternalUser }: IScopedClusterClient) { async function isUpgradeInProgress(): Promise { let upgradeInProgress = false; try { - const info = await callAsInternalUser('ml.info'); + const { body } = await asInternalUser.ml.info(); // if ml indices are currently being migrated, upgrade_mode will be set to true // pass this back with the privileges to allow for the disabling of UI controls. - upgradeInProgress = info.upgrade_mode === true; + upgradeInProgress = body.upgrade_mode === true; } catch (error) { // if the ml.info check fails, it could be due to the user having insufficient privileges // most likely they do not have the ml_user role and therefore will be blocked from using diff --git a/x-pack/plugins/ml/server/lib/check_annotations/index.ts b/x-pack/plugins/ml/server/lib/check_annotations/index.ts index de19f0ead6791..964f9d0b92261 100644 --- a/x-pack/plugins/ml/server/lib/check_annotations/index.ts +++ b/x-pack/plugins/ml/server/lib/check_annotations/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { mlLog } from '../../client/log'; import { @@ -17,18 +17,16 @@ import { // - ML_ANNOTATIONS_INDEX_PATTERN index is present // - ML_ANNOTATIONS_INDEX_ALIAS_READ alias is present // - ML_ANNOTATIONS_INDEX_ALIAS_WRITE alias is present -export async function isAnnotationsFeatureAvailable({ - callAsInternalUser, -}: ILegacyScopedClusterClient) { +export async function isAnnotationsFeatureAvailable({ asInternalUser }: IScopedClusterClient) { try { const indexParams = { index: ML_ANNOTATIONS_INDEX_PATTERN }; - const annotationsIndexExists = await callAsInternalUser('indices.exists', indexParams); + const { body: annotationsIndexExists } = await asInternalUser.indices.exists(indexParams); if (!annotationsIndexExists) { return false; } - const annotationsReadAliasExists = await callAsInternalUser('indices.existsAlias', { + const { body: annotationsReadAliasExists } = await asInternalUser.indices.existsAlias({ index: ML_ANNOTATIONS_INDEX_ALIAS_READ, name: ML_ANNOTATIONS_INDEX_ALIAS_READ, }); @@ -37,7 +35,7 @@ export async function isAnnotationsFeatureAvailable({ return false; } - const annotationsWriteAliasExists = await callAsInternalUser('indices.existsAlias', { + const { body: annotationsWriteAliasExists } = await asInternalUser.indices.existsAlias({ index: ML_ANNOTATIONS_INDEX_ALIAS_WRITE, name: ML_ANNOTATIONS_INDEX_ALIAS_WRITE, }); diff --git a/x-pack/plugins/ml/server/lib/license/ml_server_license.ts b/x-pack/plugins/ml/server/lib/license/ml_server_license.ts index bd0a29721248a..6e3019b303b88 100644 --- a/x-pack/plugins/ml/server/lib/license/ml_server_license.ts +++ b/x-pack/plugins/ml/server/lib/license/ml_server_license.ts @@ -7,7 +7,6 @@ import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext, - ILegacyScopedClusterClient, IScopedClusterClient, RequestHandler, } from 'kibana/server'; @@ -15,7 +14,6 @@ import { import { MlLicense } from '../../../common/license'; type Handler = (handlerParams: { - legacyClient: ILegacyScopedClusterClient; client: IScopedClusterClient; request: KibanaRequest; response: KibanaResponseFactory; @@ -42,7 +40,6 @@ function guard(check: () => boolean, handler: Handler) { } return handler({ - legacyClient: context.ml!.mlClient, client: context.core.elasticsearch.client, request, response, diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts index 5be443266ffe1..4c511b567615d 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts @@ -22,19 +22,15 @@ describe('annotation_service', () => { let mlClusterClientSpy = {} as any; beforeEach(() => { - const callAs = jest.fn((action: string) => { - switch (action) { - case 'delete': - case 'index': - return Promise.resolve(acknowledgedResponseMock); - case 'search': - return Promise.resolve(getAnnotationsResponseMock); - } - }); + const callAs = { + delete: jest.fn(() => Promise.resolve({ body: acknowledgedResponseMock })), + index: jest.fn(() => Promise.resolve({ body: acknowledgedResponseMock })), + search: jest.fn(() => Promise.resolve({ body: getAnnotationsResponseMock })), + }; mlClusterClientSpy = { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, + asCurrentUser: callAs, + asInternalUser: callAs, }; }); @@ -52,8 +48,7 @@ describe('annotation_service', () => { const response = await deleteAnnotation(annotationMockId); - expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('delete'); - expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(deleteParamsMock); + expect(mockFunct.asInternalUser.delete.mock.calls[0][0]).toStrictEqual(deleteParamsMock); expect(response).toBe(acknowledgedResponseMock); done(); }); @@ -73,8 +68,9 @@ describe('annotation_service', () => { const response: GetResponse = await getAnnotations(indexAnnotationArgsMock); - expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('search'); - expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(getAnnotationsRequestMock); + expect(mockFunct.asInternalUser.search.mock.calls[0][0]).toStrictEqual( + getAnnotationsRequestMock + ); expect(Object.keys(response.annotations)).toHaveLength(1); expect(response.annotations[jobIdMock]).toHaveLength(2); expect(isAnnotations(response.annotations[jobIdMock])).toBeTruthy(); @@ -89,9 +85,9 @@ describe('annotation_service', () => { }; const mlClusterClientSpyError: any = { - callAsInternalUser: jest.fn(() => { - return Promise.resolve(mockEsError); - }), + asInternalUser: { + search: jest.fn(() => Promise.resolve({ body: mockEsError })), + }, }; const { getAnnotations } = annotationServiceProvider(mlClusterClientSpyError); @@ -124,10 +120,8 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index'); - // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1]; + const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -154,10 +148,8 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index'); - // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1]; + const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -196,9 +188,8 @@ describe('annotation_service', () => { await indexAnnotation(annotation, modifiedUsernameMock); - expect(mockFunct.callAsInternalUser.mock.calls[1][0]).toBe('index'); // test if the annotation has been correctly updated - const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[1][1]; + const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0]; const modifiedAnnotation = indexParamsCheck.body; expect(modifiedAnnotation.annotation).toBe(modifiedAnnotationText); expect(modifiedAnnotation.create_username).toBe(originalUsernameMock); diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts index a585449db0a25..24f1d6951c940 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts @@ -7,7 +7,7 @@ import Boom from 'boom'; import each from 'lodash/each'; import get from 'lodash/get'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ANNOTATION_EVENT_USER, ANNOTATION_TYPE } from '../../../common/constants/annotations'; import { PARTITION_FIELDS } from '../../../common/constants/anomalies'; @@ -67,17 +67,17 @@ export interface GetResponse { export interface IndexParams { index: string; body: Annotation; - refresh?: string; + refresh: boolean | 'wait_for' | undefined; id?: string; } export interface DeleteParams { index: string; - refresh?: string; + refresh: boolean | 'wait_for' | undefined; id: string; } -export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { +export function annotationProvider({ asInternalUser }: IScopedClusterClient) { async function indexAnnotation(annotation: Annotation, username: string) { if (isAnnotation(annotation) === false) { // No need to translate, this will not be exposed in the UI. @@ -104,7 +104,8 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC delete params.body.key; } - return await callAsInternalUser('index', params); + const { body } = await asInternalUser.index(params); + return body; } async function getAnnotations({ @@ -287,14 +288,14 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC }; try { - const resp = await callAsInternalUser('search', params); + const { body } = await asInternalUser.search(params); - if (resp.error !== undefined && resp.message !== undefined) { + if (body.error !== undefined && body.message !== undefined) { // No need to translate, this will not be exposed in the UI. throw new Error(`Annotations couldn't be retrieved from Elasticsearch.`); } - const docs: Annotations = get(resp, ['hits', 'hits'], []).map((d: EsResult) => { + const docs: Annotations = get(body, ['hits', 'hits'], []).map((d: EsResult) => { // get the original source document and the document id, we need it // to identify the annotation when editing/deleting it. // if original `event` is undefined then substitute with 'user` by default @@ -306,7 +307,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC } as Annotation; }); - const aggregations = get(resp, ['aggregations'], {}) as EsAggregationResult; + const aggregations = get(body, ['aggregations'], {}) as EsAggregationResult; if (fields) { obj.aggregations = aggregations; } @@ -330,13 +331,14 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC } async function deleteAnnotation(id: string) { - const param: DeleteParams = { + const params: DeleteParams = { index: ML_ANNOTATIONS_INDEX_ALIAS_WRITE, id, refresh: 'wait_for', }; - return await callAsInternalUser('delete', param); + const { body } = await asInternalUser.delete(params); + return body; } return { diff --git a/x-pack/plugins/ml/server/models/annotation_service/index.ts b/x-pack/plugins/ml/server/models/annotation_service/index.ts index e17af2a154b87..9fcb84e2938ae 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/index.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/index.ts @@ -4,11 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { annotationProvider } from './annotation'; -export function annotationServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { +export function annotationServiceProvider(client: IScopedClusterClient) { return { - ...annotationProvider(mlClusterClient), + ...annotationProvider(client), }; } diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts index eeabb24d9be3b..5b52414d6753a 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ES_AGGREGATION } from '../../../common/constants/aggregation_types'; export interface BucketSpanEstimatorData { @@ -21,6 +21,6 @@ export interface BucketSpanEstimatorData { } export function estimateBucketSpanFactory({ - callAsCurrentUser, - callAsInternalUser, -}: ILegacyScopedClusterClient): (config: BucketSpanEstimatorData) => Promise; + asCurrentUser, + asInternalUser, +}: IScopedClusterClient): (config: BucketSpanEstimatorData) => Promise; diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js index 381c615051e3b..1d59db8fa564f 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js @@ -16,10 +16,10 @@ import { INTERVALS } from './intervals'; import { singleSeriesCheckerFactory } from './single_series_checker'; import { polledDataCheckerFactory } from './polled_data_checker'; -export function estimateBucketSpanFactory(mlClusterClient) { - const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; - const PolledDataChecker = polledDataCheckerFactory(mlClusterClient); - const SingleSeriesChecker = singleSeriesCheckerFactory(mlClusterClient); +export function estimateBucketSpanFactory(client) { + const { asCurrentUser, asInternalUser } = client; + const PolledDataChecker = polledDataCheckerFactory(client); + const SingleSeriesChecker = singleSeriesCheckerFactory(client); class BucketSpanEstimator { constructor( @@ -246,21 +246,22 @@ export function estimateBucketSpanFactory(mlClusterClient) { const getFieldCardinality = function (index, field) { return new Promise((resolve, reject) => { - callAsCurrentUser('search', { - index, - size: 0, - body: { - aggs: { - field_count: { - cardinality: { - field, + asCurrentUser + .search({ + index, + size: 0, + body: { + aggs: { + field_count: { + cardinality: { + field, + }, }, }, }, - }, - }) - .then((resp) => { - const value = get(resp, ['aggregations', 'field_count', 'value'], 0); + }) + .then(({ body }) => { + const value = get(body, ['aggregations', 'field_count', 'value'], 0); resolve(value); }) .catch((resp) => { @@ -278,28 +279,29 @@ export function estimateBucketSpanFactory(mlClusterClient) { getFieldCardinality(index, field) .then((value) => { const numPartitions = Math.floor(value / NUM_PARTITIONS) || 1; - callAsCurrentUser('search', { - index, - size: 0, - body: { - query, - aggs: { - fields_bucket_counts: { - terms: { - field, - include: { - partition: 0, - num_partitions: numPartitions, + asCurrentUser + .search({ + index, + size: 0, + body: { + query, + aggs: { + fields_bucket_counts: { + terms: { + field, + include: { + partition: 0, + num_partitions: numPartitions, + }, }, }, }, }, - }, - }) - .then((partitionResp) => { + }) + .then(({ body }) => { // eslint-disable-next-line camelcase - if (partitionResp.aggregations?.fields_bucket_counts?.buckets !== undefined) { - const buckets = partitionResp.aggregations.fields_bucket_counts.buckets; + if (body.aggregations?.fields_bucket_counts?.buckets !== undefined) { + const buckets = body.aggregations.fields_bucket_counts.buckets; fieldValues = buckets.map((b) => b.key); } resolve(fieldValues); @@ -338,21 +340,21 @@ export function estimateBucketSpanFactory(mlClusterClient) { return new Promise((resolve, reject) => { // fetch the `search.max_buckets` cluster setting so we're able to // adjust aggregations to not exceed that limit. - callAsInternalUser('cluster.getSettings', { - flatSettings: true, - includeDefaults: true, - filterPath: '*.*max_buckets', - }) - .then((settings) => { - if (typeof settings !== 'object') { + asInternalUser.cluster + .getSettings({ + flat_settings: true, + include_defaults: true, + filter_path: '*.*max_buckets', + }) + .then(({ body }) => { + if (typeof body !== 'object') { reject('Unable to retrieve cluster settings'); } // search.max_buckets could exist in default, persistent or transient cluster settings - const maxBucketsSetting = (settings.defaults || - settings.persistent || - settings.transient || - {})['search.max_buckets']; + const maxBucketsSetting = (body.defaults || body.persistent || body.transient || {})[ + 'search.max_buckets' + ]; if (maxBucketsSetting === undefined) { reject('Unable to retrieve cluster setting search.max_buckets'); diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts index f7c7dd8172ea5..35c4f1a0a741b 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts @@ -4,22 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ES_AGGREGATION } from '../../../common/constants/aggregation_types'; import { estimateBucketSpanFactory, BucketSpanEstimatorData } from './bucket_span_estimator'; -const callAs = () => { - return new Promise((resolve) => { - resolve({}); - }) as Promise; +const callAs = { + search: () => Promise.resolve({ body: {} }), + cluster: { getSettings: () => Promise.resolve({ body: {} }) }, }; -const mlClusterClient: ILegacyScopedClusterClient = { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, -}; +const mlClusterClient = ({ + asCurrentUser: callAs, + asInternalUser: callAs, +} as unknown) as IScopedClusterClient; // mock configuration to be passed to the estimator const formConfig: BucketSpanEstimatorData = { diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js index d3bbd59f3cf9b..fd0cab7c0625d 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js @@ -12,7 +12,7 @@ import get from 'lodash/get'; -export function polledDataCheckerFactory({ callAsCurrentUser }) { +export function polledDataCheckerFactory({ asCurrentUser }) { class PolledDataChecker { constructor(index, timeField, duration, query) { this.index = index; @@ -65,14 +65,15 @@ export function polledDataCheckerFactory({ callAsCurrentUser }) { return search; } - performSearch(intervalMs) { - const body = this.createSearch(intervalMs); + async performSearch(intervalMs) { + const searchBody = this.createSearch(intervalMs); - return callAsCurrentUser('search', { + const { body } = await asCurrentUser.search({ index: this.index, size: 0, - body, + body: searchBody, }); + return body; } // test that the coefficient of variation of time difference between non-empty buckets is small diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js index a5449395501dc..750f0cfc0b4a8 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js @@ -13,7 +13,7 @@ import { mlLog } from '../../client/log'; import { INTERVALS, LONG_INTERVALS } from './intervals'; -export function singleSeriesCheckerFactory({ callAsCurrentUser }) { +export function singleSeriesCheckerFactory({ asCurrentUser }) { const REF_DATA_INTERVAL = { name: '1h', ms: 3600000 }; class SingleSeriesChecker { @@ -184,14 +184,15 @@ export function singleSeriesCheckerFactory({ callAsCurrentUser }) { return search; } - performSearch(intervalMs) { - const body = this.createSearch(intervalMs); + async performSearch(intervalMs) { + const searchBody = this.createSearch(intervalMs); - return callAsCurrentUser('search', { + const { body } = await asCurrentUser.search({ index: this.index, size: 0, - body, + body: searchBody, }); + return body; } getFullBuckets(buckets) { diff --git a/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts b/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts index bc3c326e7d070..2a2c30601e2ca 100644 --- a/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts +++ b/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts @@ -5,13 +5,13 @@ */ import numeral from '@elastic/numeral'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { MLCATEGORY } from '../../../common/constants/field_types'; import { AnalysisConfig } from '../../../common/types/anomaly_detection_jobs'; import { fieldsServiceProvider } from '../fields_service'; import { MlInfoResponse } from '../../../common/types/ml_server_info'; -interface ModelMemoryEstimationResult { +export interface ModelMemoryEstimationResult { /** * Result model memory limit */ @@ -29,15 +29,15 @@ interface ModelMemoryEstimationResult { /** * Response of the _estimate_model_memory endpoint. */ -export interface ModelMemoryEstimate { +export interface ModelMemoryEstimateResponse { model_memory_estimate: string; } /** * Retrieves overall and max bucket cardinalities. */ -const cardinalityCheckProvider = (mlClusterClient: ILegacyScopedClusterClient) => { - const fieldsService = fieldsServiceProvider(mlClusterClient); +const cardinalityCheckProvider = (client: IScopedClusterClient) => { + const fieldsService = fieldsServiceProvider(client); return async ( analysisConfig: AnalysisConfig, @@ -123,9 +123,9 @@ const cardinalityCheckProvider = (mlClusterClient: ILegacyScopedClusterClient) = }; }; -export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsInternalUser } = mlClusterClient; - const getCardinalities = cardinalityCheckProvider(mlClusterClient); +export function calculateModelMemoryLimitProvider(client: IScopedClusterClient) { + const { asInternalUser } = client; + const getCardinalities = cardinalityCheckProvider(client); /** * Retrieves an estimated size of the model memory limit used in the job config @@ -141,7 +141,7 @@ export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScoped latestMs: number, allowMMLGreaterThanMax = false ): Promise { - const info = (await callAsInternalUser('ml.info')) as MlInfoResponse; + const { body: info } = await asInternalUser.ml.info(); const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase(); const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase(); @@ -154,13 +154,14 @@ export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScoped latestMs ); - const estimatedModelMemoryLimit = ((await callAsInternalUser('ml.estimateModelMemory', { + const { body } = await asInternalUser.ml.estimateModelMemory({ body: { analysis_config: analysisConfig, overall_cardinality: overallCardinality, max_bucket_cardinality: maxBucketCardinality, }, - })) as ModelMemoryEstimate).model_memory_estimate.toUpperCase(); + }); + const estimatedModelMemoryLimit = body.model_memory_estimate.toUpperCase(); let modelMemoryLimit = estimatedModelMemoryLimit; let mmlCappedAtMax = false; diff --git a/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts b/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts index b461e71c006ff..95684e790636f 100644 --- a/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts +++ b/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts @@ -5,7 +5,7 @@ */ import { difference } from 'lodash'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { EventManager, CalendarEvent } from './event_manager'; interface BasicCalendar { @@ -23,30 +23,30 @@ export interface FormCalendar extends BasicCalendar { } export class CalendarManager { - private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; + private _asInternalUser: IScopedClusterClient['asInternalUser']; private _eventManager: EventManager; - constructor(mlClusterClient: ILegacyScopedClusterClient) { - this._callAsInternalUser = mlClusterClient.callAsInternalUser; - this._eventManager = new EventManager(mlClusterClient); + constructor(client: IScopedClusterClient) { + this._asInternalUser = client.asInternalUser; + this._eventManager = new EventManager(client); } async getCalendar(calendarId: string) { - const resp = await this._callAsInternalUser('ml.calendars', { - calendarId, + const { body } = await this._asInternalUser.ml.getCalendars({ + calendar_id: calendarId, }); - const calendars = resp.calendars; + const calendars = body.calendars; const calendar = calendars[0]; // Endpoint throws a 404 if calendar is not found. calendar.events = await this._eventManager.getCalendarEvents(calendarId); return calendar; } async getAllCalendars() { - const calendarsResp = await this._callAsInternalUser('ml.calendars'); + const { body } = await this._asInternalUser.ml.getCalendars({ size: 1000 }); const events: CalendarEvent[] = await this._eventManager.getAllEvents(); - const calendars: Calendar[] = calendarsResp.calendars; + const calendars: Calendar[] = body.calendars; calendars.forEach((cal) => (cal.events = [])); // loop events and combine with related calendars @@ -71,8 +71,8 @@ export class CalendarManager { async newCalendar(calendar: FormCalendar) { const { calendarId, events, ...newCalendar } = calendar; - await this._callAsInternalUser('ml.addCalendar', { - calendarId, + await this._asInternalUser.ml.putCalendar({ + calendar_id: calendarId, body: newCalendar, }); @@ -106,17 +106,17 @@ export class CalendarManager { // add all new jobs if (jobsToAdd.length) { - await this._callAsInternalUser('ml.addJobToCalendar', { - calendarId, - jobId: jobsToAdd.join(','), + await this._asInternalUser.ml.putCalendarJob({ + calendar_id: calendarId, + job_id: jobsToAdd.join(','), }); } // remove all removed jobs if (jobsToRemove.length) { - await this._callAsInternalUser('ml.removeJobFromCalendar', { - calendarId, - jobId: jobsToRemove.join(','), + await this._asInternalUser.ml.deleteCalendarJob({ + calendar_id: calendarId, + job_id: jobsToRemove.join(','), }); } @@ -137,6 +137,7 @@ export class CalendarManager { } async deleteCalendar(calendarId: string) { - return this._callAsInternalUser('ml.deleteCalendar', { calendarId }); + const { body } = await this._asInternalUser.ml.deleteCalendar({ calendar_id: calendarId }); + return body; } } diff --git a/x-pack/plugins/ml/server/models/calendar/event_manager.ts b/x-pack/plugins/ml/server/models/calendar/event_manager.ts index b670bbe187544..b42068e748394 100644 --- a/x-pack/plugins/ml/server/models/calendar/event_manager.ts +++ b/x-pack/plugins/ml/server/models/calendar/event_manager.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { GLOBAL_CALENDAR } from '../../../common/constants/calendars'; export interface CalendarEvent { @@ -16,39 +16,42 @@ export interface CalendarEvent { } export class EventManager { - private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; - constructor({ callAsInternalUser }: ILegacyScopedClusterClient) { - this._callAsInternalUser = callAsInternalUser; + private _asInternalUser: IScopedClusterClient['asInternalUser']; + constructor({ asInternalUser }: IScopedClusterClient) { + this._asInternalUser = asInternalUser; } async getCalendarEvents(calendarId: string) { - const resp = await this._callAsInternalUser('ml.events', { calendarId }); + const { body } = await this._asInternalUser.ml.getCalendarEvents({ calendar_id: calendarId }); - return resp.events; + return body.events; } // jobId is optional async getAllEvents(jobId?: string) { const calendarId = GLOBAL_CALENDAR; - const resp = await this._callAsInternalUser('ml.events', { - calendarId, - jobId, + const { body } = await this._asInternalUser.ml.getCalendarEvents({ + calendar_id: calendarId, + job_id: jobId, }); - return resp.events; + return body.events; } async addEvents(calendarId: string, events: CalendarEvent[]) { const body = { events }; - return await this._callAsInternalUser('ml.addEvent', { - calendarId, + return await this._asInternalUser.ml.postCalendarEvents({ + calendar_id: calendarId, body, }); } async deleteEvent(calendarId: string, eventId: string) { - return this._callAsInternalUser('ml.deleteEvent', { calendarId, eventId }); + return this._asInternalUser.ml.deleteCalendarEvent({ + calendar_id: calendarId, + event_id: eventId, + }); } isEqual(ev1: CalendarEvent, ev2: CalendarEvent) { diff --git a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts index 1cb0656e88a0b..0f4cac37d2e8f 100644 --- a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts +++ b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ML_NOTIFICATION_INDEX_PATTERN } from '../../../common/constants/index_patterns'; import { JobMessage } from '../../../common/types/audit_message'; @@ -23,7 +23,7 @@ interface BoolQuery { bool: { [key: string]: any }; } -export function analyticsAuditMessagesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { +export function analyticsAuditMessagesProvider({ asInternalUser }: IScopedClusterClient) { // search for audit messages, // analyticsId is optional. without it, all analytics will be listed. async function getAnalyticsAuditMessages(analyticsId: string) { @@ -68,27 +68,23 @@ export function analyticsAuditMessagesProvider({ callAsInternalUser }: ILegacySc }); } - try { - const resp = await callAsInternalUser('search', { - index: ML_NOTIFICATION_INDEX_PATTERN, - ignore_unavailable: true, - rest_total_hits_as_int: true, - size: SIZE, - body: { - sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }], - query, - }, - }); + const { body } = await asInternalUser.search({ + index: ML_NOTIFICATION_INDEX_PATTERN, + ignore_unavailable: true, + rest_total_hits_as_int: true, + size: SIZE, + body: { + sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }], + query, + }, + }); - let messages = []; - if (resp.hits.total !== 0) { - messages = resp.hits.hits.map((hit: Message) => hit._source); - messages.reverse(); - } - return messages; - } catch (e) { - throw e; + let messages = []; + if (body.hits.total !== 0) { + messages = body.hits.hits.map((hit: Message) => hit._source); + messages.reverse(); } + return messages; } return { diff --git a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts index 82d7707464308..ab879d4c93b29 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts +++ b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts @@ -4,13 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SavedObjectsClientContract, KibanaRequest } from 'kibana/server'; +import { SavedObjectsClientContract, KibanaRequest, IScopedClusterClient } from 'kibana/server'; import { Module } from '../../../common/types/modules'; import { DataRecognizer } from '../data_recognizer'; +const callAs = () => Promise.resolve({ body: {} }); + +const mlClusterClient = ({ + asCurrentUser: callAs, + asInternalUser: callAs, +} as unknown) as IScopedClusterClient; + describe('ML - data recognizer', () => { const dr = new DataRecognizer( - { callAsCurrentUser: jest.fn(), callAsInternalUser: jest.fn() }, + mlClusterClient, ({ find: jest.fn(), bulkCreate: jest.fn(), diff --git a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts index 206baacd98322..820fcfa9253b6 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts +++ b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts @@ -7,15 +7,11 @@ import fs from 'fs'; import Boom from 'boom'; import numeral from '@elastic/numeral'; -import { - KibanaRequest, - ILegacyScopedClusterClient, - SavedObjectsClientContract, -} from 'kibana/server'; +import { KibanaRequest, IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; import moment from 'moment'; import { IndexPatternAttributes } from 'src/plugins/data/server'; import { merge } from 'lodash'; -import { AnalysisLimits, CombinedJobWithStats } from '../../../common/types/anomaly_detection_jobs'; +import { AnalysisLimits } from '../../../common/types/anomaly_detection_jobs'; import { getAuthorizationHeader } from '../../lib/request_authorization'; import { MlInfoResponse } from '../../../common/types/ml_server_info'; import { @@ -46,6 +42,7 @@ import { fieldsServiceProvider } from '../fields_service'; import { jobServiceProvider } from '../job_service'; import { resultsServiceProvider } from '../results_service'; import { JobExistResult, JobStat } from '../../../common/types/data_recognizer'; +import { MlJobsStatsResponse } from '../job_service/jobs'; const ML_DIR = 'ml'; const KIBANA_DIR = 'kibana'; @@ -74,10 +71,6 @@ interface RawModuleConfig { }; } -interface MlJobStats { - jobs: CombinedJobWithStats[]; -} - interface Config { dirName: any; json: RawModuleConfig; @@ -111,9 +104,9 @@ interface SaveResults { } export class DataRecognizer { - private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser']; - private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; - private _mlClusterClient: ILegacyScopedClusterClient; + private _asCurrentUser: IScopedClusterClient['asCurrentUser']; + private _asInternalUser: IScopedClusterClient['asInternalUser']; + private _client: IScopedClusterClient; private _authorizationHeader: object; private _modulesDir = `${__dirname}/modules`; private _indexPatternName: string = ''; @@ -124,13 +117,13 @@ export class DataRecognizer { jobsForModelMemoryEstimation: Array<{ job: ModuleJob; query: any }> = []; constructor( - mlClusterClient: ILegacyScopedClusterClient, + mlClusterClient: IScopedClusterClient, private savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest ) { - this._mlClusterClient = mlClusterClient; - this._callAsCurrentUser = mlClusterClient.callAsCurrentUser; - this._callAsInternalUser = mlClusterClient.callAsInternalUser; + this._client = mlClusterClient; + this._asCurrentUser = mlClusterClient.asCurrentUser; + this._asInternalUser = mlClusterClient.asInternalUser; this._authorizationHeader = getAuthorizationHeader(request); } @@ -249,18 +242,18 @@ export class DataRecognizer { const index = indexPattern; const size = 0; - const body = { + const searchBody = { query: moduleConfig.query, }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, rest_total_hits_as_int: true, size, - body, + body: searchBody, }); - return resp.hits.total !== 0; + return body.hits.total !== 0; } async listModules() { @@ -518,7 +511,7 @@ export class DataRecognizer { // Add a wildcard at the front of each of the job IDs in the module, // as a prefix may have been supplied when creating the jobs in the module. const jobIds = module.jobs.map((job) => `*${job.id}`); - const { jobsExist } = jobServiceProvider(this._mlClusterClient); + const { jobsExist } = jobServiceProvider(this._client); const jobInfo = await jobsExist(jobIds); // Check if the value for any of the jobs is false. @@ -527,16 +520,16 @@ export class DataRecognizer { if (doJobsExist === true) { // Get the IDs of the jobs created from the module, and their earliest / latest timestamps. - const jobStats: MlJobStats = await this._callAsInternalUser('ml.jobStats', { - jobId: jobIds, + const { body } = await this._asInternalUser.ml.getJobStats({ + job_id: jobIds.join(), }); const jobStatsJobs: JobStat[] = []; - if (jobStats.jobs && jobStats.jobs.length > 0) { - const foundJobIds = jobStats.jobs.map((job) => job.job_id); - const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._mlClusterClient); + if (body.jobs && body.jobs.length > 0) { + const foundJobIds = body.jobs.map((job) => job.job_id); + const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._client); const latestBucketTimestampsByJob = await getLatestBucketTimestampByJob(foundJobIds); - jobStats.jobs.forEach((job) => { + body.jobs.forEach((job) => { const jobStat = { id: job.job_id, } as JobStat; @@ -704,16 +697,15 @@ export class DataRecognizer { job.id = jobId; await this.saveJob(job); return { id: jobId, success: true }; - } catch (error) { - return { id: jobId, success: false, error }; + } catch ({ body }) { + return { id: jobId, success: false, error: body }; } }) ); } async saveJob(job: ModuleJob) { - const { id: jobId, config: body } = job; - return this._callAsInternalUser('ml.addJob', { jobId, body }); + return this._asInternalUser.ml.putJob({ job_id: job.id, body: job.config }); } // save the datafeeds. @@ -725,20 +717,21 @@ export class DataRecognizer { try { await this.saveDatafeed(datafeed); return { id: datafeed.id, success: true, started: false }; - } catch (error) { - return { id: datafeed.id, success: false, started: false, error }; + } catch ({ body }) { + return { id: datafeed.id, success: false, started: false, error: body }; } }) ); } async saveDatafeed(datafeed: ModuleDataFeed) { - const { id: datafeedId, config: body } = datafeed; - return this._callAsInternalUser('ml.addDatafeed', { - datafeedId, - body, - ...this._authorizationHeader, - }); + return this._asInternalUser.ml.putDatafeed( + { + datafeed_id: datafeed.id, + body: datafeed.config, + }, + this._authorizationHeader + ); } async startDatafeeds( @@ -761,10 +754,10 @@ export class DataRecognizer { const result = { started: false } as DatafeedResponse; let opened = false; try { - const openResult = await this._callAsInternalUser('ml.openJob', { - jobId: datafeed.config.job_id, + const { body } = await this._asInternalUser.ml.openJob({ + job_id: datafeed.config.job_id, }); - opened = openResult.opened; + opened = body.opened; } catch (error) { // if the job is already open, a 409 will be returned. if (error.statusCode === 409) { @@ -772,27 +765,27 @@ export class DataRecognizer { } else { opened = false; result.started = false; - result.error = error; + result.error = error.body; } } if (opened) { try { - const duration: { start: number; end?: number } = { start: 0 }; + const duration: { start: string; end?: string } = { start: '0' }; if (start !== undefined) { - duration.start = start; + duration.start = (start as unknown) as string; } if (end !== undefined) { - duration.end = end; + duration.end = (end as unknown) as string; } - await this._callAsInternalUser('ml.startDatafeed', { - datafeedId: datafeed.id, + await this._asInternalUser.ml.startDatafeed({ + datafeed_id: datafeed.id, ...duration, }); result.started = true; - } catch (error) { + } catch ({ body }) { result.started = false; - result.error = error; + result.error = body; } } return result; @@ -995,7 +988,7 @@ export class DataRecognizer { timeField: string, query?: any ): Promise<{ start: number; end: number }> { - const fieldsService = fieldsServiceProvider(this._mlClusterClient); + const fieldsService = fieldsServiceProvider(this._client); const timeFieldRange = await fieldsService.getTimeFieldRange( this._indexPatternName, @@ -1025,7 +1018,7 @@ export class DataRecognizer { if (estimateMML && this.jobsForModelMemoryEstimation.length > 0) { try { - const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._mlClusterClient); + const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._client); // Checks if all jobs in the module have the same time field configured const firstJobTimeField = this.jobsForModelMemoryEstimation[0].job.config.data_description @@ -1074,11 +1067,13 @@ export class DataRecognizer { job.config.analysis_limits.model_memory_limit = modelMemoryLimit; } } catch (error) { - mlLog.warn(`Data recognizer could not estimate model memory limit ${error}`); + mlLog.warn(`Data recognizer could not estimate model memory limit ${error.body}`); } } - const { limits } = (await this._callAsInternalUser('ml.info')) as MlInfoResponse; + const { + body: { limits }, + } = await this._asInternalUser.ml.info(); const maxMml = limits.max_model_memory_limit; if (!maxMml) { diff --git a/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts b/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts index 838315d8d272c..dbfa4b5656e5f 100644 --- a/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts +++ b/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import get from 'lodash/get'; import each from 'lodash/each'; import last from 'lodash/last'; @@ -183,7 +183,7 @@ type BatchStats = | FieldExamples; const getAggIntervals = async ( - { callAsCurrentUser }: ILegacyScopedClusterClient, + { asCurrentUser }: IScopedClusterClient, indexPatternTitle: string, query: any, fields: HistogramField[], @@ -207,7 +207,7 @@ const getAggIntervals = async ( return aggs; }, {} as Record); - const respStats = await callAsCurrentUser('search', { + const { body } = await asCurrentUser.search({ index: indexPatternTitle, size: 0, body: { @@ -218,8 +218,7 @@ const getAggIntervals = async ( }); const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); - const aggregations = - aggsPath.length > 0 ? get(respStats.aggregations, aggsPath) : respStats.aggregations; + const aggregations = aggsPath.length > 0 ? get(body.aggregations, aggsPath) : body.aggregations; return Object.keys(aggregations).reduce((p, aggName) => { const stats = [aggregations[aggName].min, aggregations[aggName].max]; @@ -241,15 +240,15 @@ const getAggIntervals = async ( // export for re-use by transforms plugin export const getHistogramsForFields = async ( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, indexPatternTitle: string, query: any, fields: HistogramField[], samplerShardSize: number ) => { - const { callAsCurrentUser } = mlClusterClient; + const { asCurrentUser } = client; const aggIntervals = await getAggIntervals( - mlClusterClient, + client, indexPatternTitle, query, fields, @@ -291,7 +290,7 @@ export const getHistogramsForFields = async ( return []; } - const respChartsData = await callAsCurrentUser('search', { + const { body } = await asCurrentUser.search({ index: indexPatternTitle, size: 0, body: { @@ -302,8 +301,7 @@ export const getHistogramsForFields = async ( }); const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); - const aggregations = - aggsPath.length > 0 ? get(respChartsData.aggregations, aggsPath) : respChartsData.aggregations; + const aggregations = aggsPath.length > 0 ? get(body.aggregations, aggsPath) : body.aggregations; const chartsData: ChartData[] = fields.map( (field): ChartData => { @@ -350,12 +348,12 @@ export const getHistogramsForFields = async ( }; export class DataVisualizer { - private _mlClusterClient: ILegacyScopedClusterClient; - private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser']; + private _client: IScopedClusterClient; + private _asCurrentUser: IScopedClusterClient['asCurrentUser']; - constructor(mlClusterClient: ILegacyScopedClusterClient) { - this._callAsCurrentUser = mlClusterClient.callAsCurrentUser; - this._mlClusterClient = mlClusterClient; + constructor(client: IScopedClusterClient) { + this._asCurrentUser = client.asCurrentUser; + this._client = client; } // Obtains overall stats on the fields in the supplied index pattern, returning an object @@ -451,7 +449,7 @@ export class DataVisualizer { samplerShardSize: number ): Promise { return await getHistogramsForFields( - this._mlClusterClient, + this._client, indexPatternTitle, query, fields, @@ -621,7 +619,7 @@ export class DataVisualizer { }; }); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -630,14 +628,14 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, rest_total_hits_as_int: true, size, - body, + body: searchBody, }); - const aggregations = resp.aggregations; - const totalCount = get(resp, ['hits', 'total'], 0); + const aggregations = body.aggregations; + const totalCount = get(body, ['hits', 'total'], 0); const stats = { totalCount, aggregatableExistsFields: [] as FieldData[], @@ -688,7 +686,7 @@ export class DataVisualizer { const size = 0; const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -697,13 +695,13 @@ export class DataVisualizer { }; filterCriteria.push({ exists: { field } }); - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, rest_total_hits_as_int: true, size, - body, + body: searchBody, }); - return resp.hits.total > 0; + return body.hits.total > 0; } async getDocumentCountStats( @@ -730,7 +728,7 @@ export class DataVisualizer { }, }; - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -739,15 +737,15 @@ export class DataVisualizer { aggs, }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, size, - body, + body: searchBody, }); const buckets: { [key: string]: number } = {}; const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get( - resp, + body, ['aggregations', 'eventRate', 'buckets'], [] ); @@ -833,7 +831,7 @@ export class DataVisualizer { } }); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -842,12 +840,12 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, size, - body, + body: searchBody, }); - const aggregations = resp.aggregations; + const aggregations = body.aggregations; const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const batchStats: NumericFieldStats[] = []; fields.forEach((field, i) => { @@ -954,7 +952,7 @@ export class DataVisualizer { } }); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -963,12 +961,12 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, size, - body, + body: searchBody, }); - const aggregations = resp.aggregations; + const aggregations = body.aggregations; const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const batchStats: StringFieldStats[] = []; fields.forEach((field, i) => { @@ -1028,7 +1026,7 @@ export class DataVisualizer { }; }); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -1037,12 +1035,12 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, size, - body, + body: searchBody, }); - const aggregations = resp.aggregations; + const aggregations = body.aggregations; const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const batchStats: DateFieldStats[] = []; fields.forEach((field, i) => { @@ -1095,7 +1093,7 @@ export class DataVisualizer { }; }); - const body = { + const searchBody = { query: { bool: { filter: filterCriteria, @@ -1104,12 +1102,12 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, size, - body, + body: searchBody, }); - const aggregations = resp.aggregations; + const aggregations = body.aggregations; const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const batchStats: BooleanFieldStats[] = []; fields.forEach((field, i) => { @@ -1157,7 +1155,7 @@ export class DataVisualizer { exists: { field }, }); - const body = { + const searchBody = { _source: field, query: { bool: { @@ -1166,18 +1164,18 @@ export class DataVisualizer { }, }; - const resp = await this._callAsCurrentUser('search', { + const { body } = await this._asCurrentUser.search({ index, rest_total_hits_as_int: true, size, - body, + body: searchBody, }); const stats = { fieldName: field, examples: [] as any[], }; - if (resp.hits.total !== 0) { - const hits = resp.hits.hits; + if (body.hits.total !== 0) { + const hits = body.hits.hits; for (let i = 0; i < hits.length; i++) { // Look in the _source for the field value. // If the field is not in the _source (as will happen if the diff --git a/x-pack/plugins/ml/server/models/fields_service/fields_service.ts b/x-pack/plugins/ml/server/models/fields_service/fields_service.ts index 43a6876f76c49..68427e98750eb 100644 --- a/x-pack/plugins/ml/server/models/fields_service/fields_service.ts +++ b/x-pack/plugins/ml/server/models/fields_service/fields_service.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { duration } from 'moment'; import { parseInterval } from '../../../common/util/parse_interval'; import { initCardinalityFieldsCache } from './fields_aggs_cache'; @@ -14,7 +14,7 @@ import { initCardinalityFieldsCache } from './fields_aggs_cache'; * Service for carrying out queries to obtain data * specific to fields in Elasticsearch indices. */ -export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) { const fieldsAggsCache = initCardinalityFieldsCache(); /** @@ -37,13 +37,13 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste index: string | string[], fieldNames: string[] ): Promise { - const fieldCapsResp = await callAsCurrentUser('fieldCaps', { + const { body } = await asCurrentUser.fieldCaps({ index, fields: fieldNames, }); const aggregatableFields: string[] = []; fieldNames.forEach((fieldName) => { - const fieldInfo = fieldCapsResp.fields[fieldName]; + const fieldInfo = body.fields[fieldName]; const typeKeys = fieldInfo !== undefined ? Object.keys(fieldInfo) : []; if (typeKeys.length > 0) { const fieldType = typeKeys[0]; @@ -130,12 +130,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste aggs, }; - const aggregations = ( - await callAsCurrentUser('search', { - index, - body, - }) - )?.aggregations; + const { + body: { aggregations }, + } = await asCurrentUser.search({ + index, + body, + }); if (!aggregations) { return {}; @@ -170,7 +170,9 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste }> { const obj = { success: true, start: { epoch: 0, string: '' }, end: { epoch: 0, string: '' } }; - const resp = await callAsCurrentUser('search', { + const { + body: { aggregations }, + } = await asCurrentUser.search({ index, size: 0, body: { @@ -190,12 +192,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste }, }); - if (resp.aggregations && resp.aggregations.earliest && resp.aggregations.latest) { - obj.start.epoch = resp.aggregations.earliest.value; - obj.start.string = resp.aggregations.earliest.value_as_string; + if (aggregations && aggregations.earliest && aggregations.latest) { + obj.start.epoch = aggregations.earliest.value; + obj.start.string = aggregations.earliest.value_as_string; - obj.end.epoch = resp.aggregations.latest.value; - obj.end.string = resp.aggregations.latest.value_as_string; + obj.end.epoch = aggregations.latest.value; + obj.end.string = aggregations.latest.value_as_string; } return obj; } @@ -338,12 +340,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste }, }; - const aggregations = ( - await callAsCurrentUser('search', { - index, - body, - }) - )?.aggregations; + const { + body: { aggregations }, + } = await asCurrentUser.search({ + index, + body, + }); if (!aggregations) { return cachedValues; diff --git a/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts b/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts index 9cd71c046b66c..9e502c04fbb7b 100644 --- a/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts +++ b/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts @@ -4,18 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { AnalysisResult, FormattedOverrides, InputOverrides, + FindFileStructureResponse, } from '../../../common/types/file_datavisualizer'; export type InputData = any[]; -export function fileDataVisualizerProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { - async function analyzeFile(data: any, overrides: any): Promise { - const results = await callAsInternalUser('ml.fileStructure', { +export function fileDataVisualizerProvider({ asInternalUser }: IScopedClusterClient) { + async function analyzeFile(data: InputData, overrides: InputOverrides): Promise { + overrides.explain = overrides.explain === undefined ? 'true' : overrides.explain; + const { body } = await asInternalUser.ml.findFileStructure({ body: data, ...overrides, }); @@ -24,7 +26,7 @@ export function fileDataVisualizerProvider({ callAsInternalUser }: ILegacyScoped return { ...(hasOverrides && { overrides: reducedOverrides }), - results, + results: body, }; } diff --git a/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts b/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts index fc9b333298c9d..6108454c08aa7 100644 --- a/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts +++ b/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer'; import { ImportResponse, @@ -15,7 +15,7 @@ import { } from '../../../common/types/file_datavisualizer'; import { InputData } from './file_data_visualizer'; -export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function importDataProvider({ asCurrentUser }: IScopedClusterClient) { async function importData( id: string, index: string, @@ -40,9 +40,9 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl // create the pipeline if one has been supplied if (pipelineId !== undefined) { - const success = await createPipeline(pipelineId, pipeline); - if (success.acknowledged !== true) { - throw success; + const resp = await createPipeline(pipelineId, pipeline); + if (resp.acknowledged !== true) { + throw resp; } } createdPipelineId = pipelineId; @@ -80,7 +80,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl id, index: createdIndex, pipelineId: createdPipelineId, - error: error.error !== undefined ? error.error : error, + error: error.body !== undefined ? error.body : error, docCount, ingestError: error.ingestError, failures: error.failures || [], @@ -102,7 +102,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl body.settings = settings; } - await callAsCurrentUser('indices.create', { index, body }); + await asCurrentUser.indices.create({ index, body }); } async function indexData(index: string, pipelineId: string, data: InputData) { @@ -118,7 +118,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl settings.pipeline = pipelineId; } - const resp = await callAsCurrentUser('bulk', settings); + const { body: resp } = await asCurrentUser.bulk(settings); if (resp.errors) { throw resp; } else { @@ -151,7 +151,8 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl } async function createPipeline(id: string, pipeline: any) { - return await callAsCurrentUser('ingest.putPipeline', { id, body: pipeline }); + const { body } = await asCurrentUser.ingest.putPipeline({ id, body: pipeline }); + return body; } function getFailures(items: any[], data: InputData): ImportFailure[] { diff --git a/x-pack/plugins/ml/server/models/filter/filter_manager.ts b/x-pack/plugins/ml/server/models/filter/filter_manager.ts index 768ca1f893b68..19ba1b76f8a60 100644 --- a/x-pack/plugins/ml/server/models/filter/filter_manager.ts +++ b/x-pack/plugins/ml/server/models/filter/filter_manager.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { DetectorRule, DetectorRuleScope } from '../../../common/types/detector_rules'; @@ -58,26 +58,26 @@ interface PartialJob { } export class FilterManager { - private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; - constructor({ callAsInternalUser }: ILegacyScopedClusterClient) { - this._callAsInternalUser = callAsInternalUser; + private _asInternalUser: IScopedClusterClient['asInternalUser']; + constructor({ asInternalUser }: IScopedClusterClient) { + this._asInternalUser = asInternalUser; } async getFilter(filterId: string) { try { const [JOBS, FILTERS] = [0, 1]; const results = await Promise.all([ - this._callAsInternalUser('ml.jobs'), - this._callAsInternalUser('ml.filters', { filterId }), + this._asInternalUser.ml.getJobs(), + this._asInternalUser.ml.getFilters({ filter_id: filterId }), ]); - if (results[FILTERS] && results[FILTERS].filters.length) { + if (results[FILTERS] && results[FILTERS].body.filters.length) { let filtersInUse: FiltersInUse = {}; - if (results[JOBS] && results[JOBS].jobs) { - filtersInUse = this.buildFiltersInUse(results[JOBS].jobs); + if (results[JOBS] && results[JOBS].body.jobs) { + filtersInUse = this.buildFiltersInUse(results[JOBS].body.jobs); } - const filter = results[FILTERS].filters[0]; + const filter = results[FILTERS].body.filters[0]; filter.used_by = filtersInUse[filter.filter_id]; return filter; } else { @@ -90,8 +90,8 @@ export class FilterManager { async getAllFilters() { try { - const filtersResp = await this._callAsInternalUser('ml.filters'); - return filtersResp.filters; + const { body } = await this._asInternalUser.ml.getFilters({ size: 1000 }); + return body.filters; } catch (error) { throw Boom.badRequest(error); } @@ -101,14 +101,14 @@ export class FilterManager { try { const [JOBS, FILTERS] = [0, 1]; const results = await Promise.all([ - this._callAsInternalUser('ml.jobs'), - this._callAsInternalUser('ml.filters'), + this._asInternalUser.ml.getJobs(), + this._asInternalUser.ml.getFilters({ size: 1000 }), ]); // Build a map of filter_ids against jobs and detectors using that filter. let filtersInUse: FiltersInUse = {}; - if (results[JOBS] && results[JOBS].jobs) { - filtersInUse = this.buildFiltersInUse(results[JOBS].jobs); + if (results[JOBS] && results[JOBS].body.jobs) { + filtersInUse = this.buildFiltersInUse(results[JOBS].body.jobs); } // For each filter, return just @@ -117,8 +117,8 @@ export class FilterManager { // item_count // jobs using the filter const filterStats: FilterStats[] = []; - if (results[FILTERS] && results[FILTERS].filters) { - results[FILTERS].filters.forEach((filter: Filter) => { + if (results[FILTERS] && results[FILTERS].body.filters) { + results[FILTERS].body.filters.forEach((filter: Filter) => { const stats: FilterStats = { filter_id: filter.filter_id, description: filter.description, @@ -139,7 +139,8 @@ export class FilterManager { const { filterId, ...body } = filter; try { // Returns the newly created filter. - return await this._callAsInternalUser('ml.addFilter', { filterId, body }); + const { body: resp } = await this._asInternalUser.ml.putFilter({ filter_id: filterId, body }); + return resp; } catch (error) { throw Boom.badRequest(error); } @@ -159,17 +160,19 @@ export class FilterManager { } // Returns the newly updated filter. - return await this._callAsInternalUser('ml.updateFilter', { - filterId, + const { body: resp } = await this._asInternalUser.ml.updateFilter({ + filter_id: filterId, body, }); + return resp; } catch (error) { throw Boom.badRequest(error); } } async deleteFilter(filterId: string) { - return this._callAsInternalUser('ml.deleteFilter', { filterId }); + const { body } = await this._asInternalUser.ml.deleteFilter({ filter_id: filterId }); + return body; } buildFiltersInUse(jobsList: PartialJob[]) { diff --git a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts index d72552b548b82..afdd3e9bb8ce9 100644 --- a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts +++ b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts @@ -4,10 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; export function jobAuditMessagesProvider( - mlClusterClient: ILegacyScopedClusterClient + client: IScopedClusterClient ): { getJobAuditMessages: (jobId?: string, from?: string) => any; getAuditMessagesSummary: (jobIds?: string[]) => any; diff --git a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js index 86d80c394137f..3fd5ebf3f68f4 100644 --- a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js +++ b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js @@ -34,14 +34,14 @@ const anomalyDetectorTypeFilter = { }, }; -export function jobAuditMessagesProvider({ callAsInternalUser }) { +export function jobAuditMessagesProvider({ asInternalUser }) { // search for audit messages, // jobId is optional. without it, all jobs will be listed. // from is optional and should be a string formatted in ES time units. e.g. 12h, 1d, 7d async function getJobAuditMessages(jobId, from) { let gte = null; if (jobId !== undefined && from === undefined) { - const jobs = await callAsInternalUser('ml.jobs', { jobId }); + const jobs = await asInternalUser.ml.getJobs({ job_id: jobId }); if (jobs.count > 0 && jobs.jobs !== undefined) { gte = moment(jobs.jobs[0].create_time).valueOf(); } @@ -99,26 +99,22 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) { }); } - try { - const resp = await callAsInternalUser('search', { - index: ML_NOTIFICATION_INDEX_PATTERN, - ignore_unavailable: true, - rest_total_hits_as_int: true, - size: SIZE, - body: { - sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }], - query, - }, - }); + const { body } = await asInternalUser.search({ + index: ML_NOTIFICATION_INDEX_PATTERN, + ignore_unavailable: true, + rest_total_hits_as_int: true, + size: SIZE, + body: { + sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }], + query, + }, + }); - let messages = []; - if (resp.hits.total !== 0) { - messages = resp.hits.hits.map((hit) => hit._source); - } - return messages; - } catch (e) { - throw e; + let messages = []; + if (body.hits.total !== 0) { + messages = body.hits.hits.map((hit) => hit._source); } + return messages; } // search highest, most recent audit messages for all jobs for the last 24hrs. @@ -128,65 +124,63 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) { const maxBuckets = 10000; let levelsPerJobAggSize = maxBuckets; - try { - const query = { - bool: { - filter: [ - { - range: { - timestamp: { - gte: 'now-1d', - }, + const query = { + bool: { + filter: [ + { + range: { + timestamp: { + gte: 'now-1d', }, }, - anomalyDetectorTypeFilter, - ], - }, - }; - - // If the jobIds arg is supplied, add a query filter - // to only include those jobIds in the aggregations. - if (Array.isArray(jobIds) && jobIds.length > 0) { - query.bool.filter.push({ - terms: { - job_id: jobIds, }, - }); - levelsPerJobAggSize = jobIds.length; - } + anomalyDetectorTypeFilter, + ], + }, + }; - const resp = await callAsInternalUser('search', { - index: ML_NOTIFICATION_INDEX_PATTERN, - ignore_unavailable: true, - rest_total_hits_as_int: true, - size: 0, - body: { - query, - aggs: { - levelsPerJob: { - terms: { - field: 'job_id', - size: levelsPerJobAggSize, - }, - aggs: { - levels: { - terms: { - field: 'level', - }, - aggs: { - latestMessage: { - terms: { - field: 'message.raw', - size: 1, - order: { - latestMessage: 'desc', - }, + // If the jobIds arg is supplied, add a query filter + // to only include those jobIds in the aggregations. + if (Array.isArray(jobIds) && jobIds.length > 0) { + query.bool.filter.push({ + terms: { + job_id: jobIds, + }, + }); + levelsPerJobAggSize = jobIds.length; + } + + const { body } = await asInternalUser.search({ + index: ML_NOTIFICATION_INDEX_PATTERN, + ignore_unavailable: true, + rest_total_hits_as_int: true, + size: 0, + body: { + query, + aggs: { + levelsPerJob: { + terms: { + field: 'job_id', + size: levelsPerJobAggSize, + }, + aggs: { + levels: { + terms: { + field: 'level', + }, + aggs: { + latestMessage: { + terms: { + field: 'message.raw', + size: 1, + order: { + latestMessage: 'desc', }, - aggs: { - latestMessage: { - max: { - field: 'timestamp', - }, + }, + aggs: { + latestMessage: { + max: { + field: 'timestamp', }, }, }, @@ -196,67 +190,65 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) { }, }, }, - }); + }, + }); - let messagesPerJob = []; - const jobMessages = []; - if ( - resp.hits.total !== 0 && - resp.aggregations && - resp.aggregations.levelsPerJob && - resp.aggregations.levelsPerJob.buckets && - resp.aggregations.levelsPerJob.buckets.length - ) { - messagesPerJob = resp.aggregations.levelsPerJob.buckets; - } + let messagesPerJob = []; + const jobMessages = []; + if ( + body.hits.total !== 0 && + body.aggregations && + body.aggregations.levelsPerJob && + body.aggregations.levelsPerJob.buckets && + body.aggregations.levelsPerJob.buckets.length + ) { + messagesPerJob = body.aggregations.levelsPerJob.buckets; + } - messagesPerJob.forEach((job) => { - // ignore system messages (id==='') - if (job.key !== '' && job.levels && job.levels.buckets && job.levels.buckets.length) { - let highestLevel = 0; - let highestLevelText = ''; - let msgTime = 0; + messagesPerJob.forEach((job) => { + // ignore system messages (id==='') + if (job.key !== '' && job.levels && job.levels.buckets && job.levels.buckets.length) { + let highestLevel = 0; + let highestLevelText = ''; + let msgTime = 0; - job.levels.buckets.forEach((level) => { - const label = level.key; - // note the highest message level - if (LEVEL[label] > highestLevel) { - highestLevel = LEVEL[label]; - if ( - level.latestMessage && - level.latestMessage.buckets && - level.latestMessage.buckets.length - ) { - level.latestMessage.buckets.forEach((msg) => { - // there should only be one result here. - highestLevelText = msg.key; + job.levels.buckets.forEach((level) => { + const label = level.key; + // note the highest message level + if (LEVEL[label] > highestLevel) { + highestLevel = LEVEL[label]; + if ( + level.latestMessage && + level.latestMessage.buckets && + level.latestMessage.buckets.length + ) { + level.latestMessage.buckets.forEach((msg) => { + // there should only be one result here. + highestLevelText = msg.key; - // note the time in ms for the highest level - // so we can filter them out later if they're earlier than the - // job's create time. - if (msg.latestMessage && msg.latestMessage.value_as_string) { - const time = moment(msg.latestMessage.value_as_string); - msgTime = time.valueOf(); - } - }); - } + // note the time in ms for the highest level + // so we can filter them out later if they're earlier than the + // job's create time. + if (msg.latestMessage && msg.latestMessage.value_as_string) { + const time = moment(msg.latestMessage.value_as_string); + msgTime = time.valueOf(); + } + }); } - }); - - if (msgTime !== 0 && highestLevel !== 0) { - jobMessages.push({ - job_id: job.key, - highestLevelText, - highestLevel: levelToText(highestLevel), - msgTime, - }); } + }); + + if (msgTime !== 0 && highestLevel !== 0) { + jobMessages.push({ + job_id: job.key, + highestLevelText, + highestLevel: levelToText(highestLevel), + msgTime, + }); } - }); - return jobMessages; - } catch (e) { - throw e; - } + } + }); + return jobMessages; } function levelToText(level) { diff --git a/x-pack/plugins/ml/server/models/job_service/datafeeds.ts b/x-pack/plugins/ml/server/models/job_service/datafeeds.ts index 98e1be48bb766..c0eb1b72825df 100644 --- a/x-pack/plugins/ml/server/models/job_service/datafeeds.ts +++ b/x-pack/plugins/ml/server/models/job_service/datafeeds.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { i18n } from '@kbn/i18n'; import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states'; import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils'; @@ -26,7 +26,7 @@ interface Results { }; } -export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { +export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) { async function forceStartDatafeeds(datafeedIds: string[], start?: number, end?: number) { const jobIds = await getJobIdsByDatafeedId(); const doStartsCalled = datafeedIds.reduce((acc, cur) => { @@ -42,8 +42,8 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl try { await startDatafeed(datafeedId, start, end); return { started: true }; - } catch (error) { - return { started: false, error }; + } catch ({ body }) { + return { started: false, error: body }; } } else { return { started: true }; @@ -66,7 +66,7 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl results[datafeedId] = await doStart(datafeedId); return fillResultsWithTimeouts(results, datafeedId, datafeedIds, JOB_STATE.OPENED); } - results[datafeedId] = { started: false, error }; + results[datafeedId] = { started: false, error: error.body }; } } else { results[datafeedId] = { @@ -84,8 +84,8 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl async function openJob(jobId: string) { let opened = false; try { - const resp = await callAsInternalUser('ml.openJob', { jobId }); - opened = resp.opened; + const { body } = await asInternalUser.ml.openJob({ job_id: jobId }); + opened = body.opened; } catch (error) { if (error.statusCode === 409) { opened = true; @@ -97,7 +97,11 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl } async function startDatafeed(datafeedId: string, start?: number, end?: number) { - return callAsInternalUser('ml.startDatafeed', { datafeedId, start, end }); + return asInternalUser.ml.startDatafeed({ + datafeed_id: datafeedId, + start: (start as unknown) as string, + end: (end as unknown) as string, + }); } async function stopDatafeeds(datafeedIds: string[]) { @@ -105,7 +109,12 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl for (const datafeedId of datafeedIds) { try { - results[datafeedId] = await callAsInternalUser('ml.stopDatafeed', { datafeedId }); + const { body } = await asInternalUser.ml.stopDatafeed<{ + started: boolean; + }>({ + datafeed_id: datafeedId, + }); + results[datafeedId] = body; } catch (error) { if (isRequestTimeout(error)) { return fillResultsWithTimeouts(results, datafeedId, datafeedIds, DATAFEED_STATE.STOPPED); @@ -117,11 +126,17 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl } async function forceDeleteDatafeed(datafeedId: string) { - return callAsInternalUser('ml.deleteDatafeed', { datafeedId, force: true }); + const { body } = await asInternalUser.ml.deleteDatafeed({ + datafeed_id: datafeedId, + force: true, + }); + return body; } async function getDatafeedIdsByJobId() { - const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse; + const { + body: { datafeeds }, + } = await asInternalUser.ml.getDatafeeds(); return datafeeds.reduce((acc, cur) => { acc[cur.job_id] = cur.datafeed_id; return acc; @@ -129,7 +144,9 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl } async function getJobIdsByDatafeedId() { - const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse; + const { + body: { datafeeds }, + } = await asInternalUser.ml.getDatafeeds(); return datafeeds.reduce((acc, cur) => { acc[cur.datafeed_id] = cur.job_id; return acc; diff --git a/x-pack/plugins/ml/server/models/job_service/error_utils.ts b/x-pack/plugins/ml/server/models/job_service/error_utils.ts index 8a47993546fb8..dc871a9dce805 100644 --- a/x-pack/plugins/ml/server/models/job_service/error_utils.ts +++ b/x-pack/plugins/ml/server/models/job_service/error_utils.ts @@ -7,11 +7,11 @@ import { i18n } from '@kbn/i18n'; import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states'; -const REQUEST_TIMEOUT = 'RequestTimeout'; +const REQUEST_TIMEOUT_NAME = 'RequestTimeout'; type ACTION_STATE = DATAFEED_STATE | JOB_STATE; -export function isRequestTimeout(error: { displayName: string }) { - return error.displayName === REQUEST_TIMEOUT; +export function isRequestTimeout(error: { name: string }) { + return error.name === REQUEST_TIMEOUT_NAME; } interface Results { diff --git a/x-pack/plugins/ml/server/models/job_service/groups.ts b/x-pack/plugins/ml/server/models/job_service/groups.ts index c4ea854c14f87..0f53d27f2eddf 100644 --- a/x-pack/plugins/ml/server/models/job_service/groups.ts +++ b/x-pack/plugins/ml/server/models/job_service/groups.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { CalendarManager } from '../calendar'; import { GLOBAL_CALENDAR } from '../../../common/constants/calendars'; import { Job } from '../../../common/types/anomaly_detection_jobs'; @@ -23,18 +23,19 @@ interface Results { }; } -export function groupsProvider(mlClusterClient: ILegacyScopedClusterClient) { - const calMngr = new CalendarManager(mlClusterClient); - const { callAsInternalUser } = mlClusterClient; +export function groupsProvider(client: IScopedClusterClient) { + const calMngr = new CalendarManager(client); + const { asInternalUser } = client; async function getAllGroups() { const groups: { [id: string]: Group } = {}; const jobIds: { [id: string]: undefined | null } = {}; - const [{ jobs }, calendars] = await Promise.all([ - callAsInternalUser('ml.jobs') as Promise, + const [{ body }, calendars] = await Promise.all([ + asInternalUser.ml.getJobs(), calMngr.getAllCalendars(), ]); + const { jobs } = body; if (jobs) { jobs.forEach((job) => { jobIds[job.job_id] = null; @@ -80,10 +81,10 @@ export function groupsProvider(mlClusterClient: ILegacyScopedClusterClient) { for (const job of jobs) { const { job_id: jobId, groups } = job; try { - await callAsInternalUser('ml.updateJob', { jobId, body: { groups } }); + await asInternalUser.ml.updateJob({ job_id: jobId, body: { groups } }); results[jobId] = { success: true }; - } catch (error) { - results[jobId] = { success: false, error }; + } catch ({ body }) { + results[jobId] = { success: false, error: body }; } } return results; diff --git a/x-pack/plugins/ml/server/models/job_service/index.ts b/x-pack/plugins/ml/server/models/job_service/index.ts index 1ff33a7b00f0b..6fea5d3b5a491 100644 --- a/x-pack/plugins/ml/server/models/job_service/index.ts +++ b/x-pack/plugins/ml/server/models/job_service/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { datafeedsProvider } from './datafeeds'; import { jobsProvider } from './jobs'; import { groupsProvider } from './groups'; @@ -12,14 +12,14 @@ import { newJobCapsProvider } from './new_job_caps'; import { newJobChartsProvider, topCategoriesProvider } from './new_job'; import { modelSnapshotProvider } from './model_snapshots'; -export function jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { +export function jobServiceProvider(client: IScopedClusterClient) { return { - ...datafeedsProvider(mlClusterClient), - ...jobsProvider(mlClusterClient), - ...groupsProvider(mlClusterClient), - ...newJobCapsProvider(mlClusterClient), - ...newJobChartsProvider(mlClusterClient), - ...topCategoriesProvider(mlClusterClient), - ...modelSnapshotProvider(mlClusterClient), + ...datafeedsProvider(client), + ...jobsProvider(client), + ...groupsProvider(client), + ...newJobCapsProvider(client), + ...newJobChartsProvider(client), + ...topCategoriesProvider(client), + ...modelSnapshotProvider(client), }; } diff --git a/x-pack/plugins/ml/server/models/job_service/jobs.ts b/x-pack/plugins/ml/server/models/job_service/jobs.ts index 0aa1cfdae13c7..e047d31ba6eb7 100644 --- a/x-pack/plugins/ml/server/models/job_service/jobs.ts +++ b/x-pack/plugins/ml/server/models/job_service/jobs.ts @@ -7,7 +7,7 @@ import { i18n } from '@kbn/i18n'; import { uniq } from 'lodash'; import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { parseTimeIntervalForJob } from '../../../common/util/job_utils'; import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states'; import { @@ -22,7 +22,7 @@ import { GLOBAL_CALENDAR } from '../../../common/constants/calendars'; import { datafeedsProvider, MlDatafeedsResponse, MlDatafeedsStatsResponse } from './datafeeds'; import { jobAuditMessagesProvider } from '../job_audit_messages'; import { resultsServiceProvider } from '../results_service'; -import { CalendarManager, Calendar } from '../calendar'; +import { CalendarManager } from '../calendar'; import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils'; import { getEarliestDatafeedStartTime, @@ -47,16 +47,16 @@ interface Results { }; } -export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsInternalUser } = mlClusterClient; +export function jobsProvider(client: IScopedClusterClient) { + const { asInternalUser } = client; - const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient); - const { getAuditMessagesSummary } = jobAuditMessagesProvider(mlClusterClient); - const { getLatestBucketTimestampByJob } = resultsServiceProvider(mlClusterClient); - const calMngr = new CalendarManager(mlClusterClient); + const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(client); + const { getAuditMessagesSummary } = jobAuditMessagesProvider(client); + const { getLatestBucketTimestampByJob } = resultsServiceProvider(client); + const calMngr = new CalendarManager(client); async function forceDeleteJob(jobId: string) { - return callAsInternalUser('ml.deleteJob', { jobId, force: true }); + return asInternalUser.ml.deleteJob({ job_id: jobId, force: true, wait_for_completion: false }); } async function deleteJobs(jobIds: string[]) { @@ -78,7 +78,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { if (isRequestTimeout(error)) { return fillResultsWithTimeouts(results, jobId, jobIds, DATAFEED_STATE.DELETED); } - results[jobId] = { deleted: false, error }; + results[jobId] = { deleted: false, error: error.body }; } } } catch (error) { @@ -90,7 +90,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { DATAFEED_STATE.DELETED ); } - results[jobId] = { deleted: false, error }; + results[jobId] = { deleted: false, error: error.body }; } } return results; @@ -100,7 +100,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { const results: Results = {}; for (const jobId of jobIds) { try { - await callAsInternalUser('ml.closeJob', { jobId }); + await asInternalUser.ml.closeJob({ job_id: jobId }); results[jobId] = { closed: true }; } catch (error) { if (isRequestTimeout(error)) { @@ -109,23 +109,23 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { if ( error.statusCode === 409 && - error.response && - error.response.includes('datafeed') === false + error.body.error?.reason && + error.body.error.reason.includes('datafeed') === false ) { // the close job request may fail (409) if the job has failed or if the datafeed hasn't been stopped. // if the job has failed we want to attempt a force close. // however, if we received a 409 due to the datafeed being started we should not attempt a force close. try { - await callAsInternalUser('ml.closeJob', { jobId, force: true }); + await asInternalUser.ml.closeJob({ job_id: jobId, force: true }); results[jobId] = { closed: true }; } catch (error2) { - if (isRequestTimeout(error)) { + if (isRequestTimeout(error2)) { return fillResultsWithTimeouts(results, jobId, jobIds, JOB_STATE.CLOSED); } - results[jobId] = { closed: false, error: error2 }; + results[jobId] = { closed: false, error: error2.body }; } } else { - results[jobId] = { closed: false, error }; + results[jobId] = { closed: false, error: error.body }; } } } @@ -139,12 +139,12 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { throw Boom.notFound(`Cannot find datafeed for job ${jobId}`); } - const dfResult = await callAsInternalUser('ml.stopDatafeed', { datafeedId, force: true }); - if (!dfResult || dfResult.stopped !== true) { + const { body } = await asInternalUser.ml.stopDatafeed({ datafeed_id: datafeedId, force: true }); + if (body.stopped !== true) { return { success: false }; } - await callAsInternalUser('ml.closeJob', { jobId, force: true }); + await asInternalUser.ml.closeJob({ job_id: jobId, force: true }); return { success: true }; } @@ -256,41 +256,26 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { const calendarsByJobId: { [jobId: string]: string[] } = {}; const globalCalendars: string[] = []; - const requests: [ - Promise, - Promise, - Promise, - Promise, - Promise, - Promise<{ [id: string]: number | undefined }> - ] = [ - jobIds.length > 0 - ? (callAsInternalUser('ml.jobs', { jobId: jobIds }) as Promise) // move length check in side call - : (callAsInternalUser('ml.jobs') as Promise), - jobIds.length > 0 - ? (callAsInternalUser('ml.jobStats', { jobId: jobIds }) as Promise) - : (callAsInternalUser('ml.jobStats') as Promise), - callAsInternalUser('ml.datafeeds') as Promise, - callAsInternalUser('ml.datafeedStats') as Promise, - calMngr.getAllCalendars(), - getLatestBucketTimestampByJob(), - ]; - + const jobIdsString = jobIds.join(); const [ - jobResults, - jobStatsResults, - datafeedResults, - datafeedStatsResults, + { body: jobResults }, + { body: jobStatsResults }, + { body: datafeedResults }, + { body: datafeedStatsResults }, calendarResults, latestBucketTimestampByJob, - ] = await Promise.all< - MlJobsResponse, - MlJobsStatsResponse, - MlDatafeedsResponse, - MlDatafeedsStatsResponse, - Calendar[], - { [id: string]: number | undefined } - >(requests); + ] = await Promise.all([ + asInternalUser.ml.getJobs( + jobIds.length > 0 ? { job_id: jobIdsString } : undefined + ), + asInternalUser.ml.getJobStats( + jobIds.length > 0 ? { job_id: jobIdsString } : undefined + ), + asInternalUser.ml.getDatafeeds(), + asInternalUser.ml.getDatafeedStats(), + calMngr.getAllCalendars(), + getLatestBucketTimestampByJob(), + ]); if (datafeedResults && datafeedResults.datafeeds) { datafeedResults.datafeeds.forEach((datafeed) => { @@ -400,9 +385,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { const detailed = true; const jobIds = []; try { - const tasksList = await callAsInternalUser('tasks.list', { actions, detailed }); - Object.keys(tasksList.nodes).forEach((nodeId) => { - const tasks = tasksList.nodes[nodeId].tasks; + const { body } = await asInternalUser.tasks.list({ actions, detailed }); + Object.keys(body.nodes).forEach((nodeId) => { + const tasks = body.nodes[nodeId].tasks; Object.keys(tasks).forEach((taskId) => { jobIds.push(tasks[taskId].description.replace(/^delete-job-/, '')); }); @@ -410,7 +395,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { } catch (e) { // if the user doesn't have permission to load the task list, // use the jobs list to get the ids of deleting jobs - const { jobs } = (await callAsInternalUser('ml.jobs')) as MlJobsResponse; + const { + body: { jobs }, + } = await asInternalUser.ml.getJobs(); jobIds.push(...jobs.filter((j) => j.deleting === true).map((j) => j.job_id)); } return { jobIds }; @@ -421,13 +408,13 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { // e.g. *_low_request_rate_ecs async function jobsExist(jobIds: string[] = []) { // Get the list of job IDs. - const jobsInfo = (await callAsInternalUser('ml.jobs', { - jobId: jobIds, - })) as MlJobsResponse; + const { body } = await asInternalUser.ml.getJobs({ + job_id: jobIds.join(), + }); const results: { [id: string]: boolean } = {}; - if (jobsInfo.count > 0) { - const allJobIds = jobsInfo.jobs.map((job) => job.job_id); + if (body.count > 0) { + const allJobIds = body.jobs.map((job) => job.job_id); // Check if each of the supplied IDs match existing jobs. jobIds.forEach((jobId) => { @@ -446,9 +433,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { } async function getAllJobAndGroupIds() { - const { getAllGroups } = groupsProvider(mlClusterClient); - const jobs = (await callAsInternalUser('ml.jobs')) as MlJobsResponse; - const jobIds = jobs.jobs.map((job) => job.job_id); + const { getAllGroups } = groupsProvider(client); + const { body } = await asInternalUser.ml.getJobs(); + const jobIds = body.jobs.map((job) => job.job_id); const groups = await getAllGroups(); const groupIds = groups.map((group) => group.id); @@ -460,13 +447,13 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { async function getLookBackProgress(jobId: string, start: number, end: number) { const datafeedId = `datafeed-${jobId}`; - const [jobStats, isRunning] = await Promise.all([ - callAsInternalUser('ml.jobStats', { jobId: [jobId] }) as Promise, + const [{ body }, isRunning] = await Promise.all([ + asInternalUser.ml.getJobStats({ job_id: jobId }), isDatafeedRunning(datafeedId), ]); - if (jobStats.jobs.length) { - const statsForJob = jobStats.jobs[0]; + if (body.jobs.length) { + const statsForJob = body.jobs[0]; const time = statsForJob.data_counts.latest_record_timestamp; const progress = (time - start) / (end - start); const isJobClosed = statsForJob.state === JOB_STATE.CLOSED; @@ -480,11 +467,11 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { } async function isDatafeedRunning(datafeedId: string) { - const stats = (await callAsInternalUser('ml.datafeedStats', { - datafeedId: [datafeedId], - })) as MlDatafeedsStatsResponse; - if (stats.datafeeds.length) { - const state = stats.datafeeds[0].state; + const { body } = await asInternalUser.ml.getDatafeedStats({ + datafeed_id: datafeedId, + }); + if (body.datafeeds.length) { + const state = body.datafeeds[0].state; return ( state === DATAFEED_STATE.STARTED || state === DATAFEED_STATE.STARTING || diff --git a/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts b/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts index 576d6f8cbb160..34206a68ffeb9 100644 --- a/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts +++ b/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts @@ -6,7 +6,7 @@ import Boom from 'boom'; import { i18n } from '@kbn/i18n'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ModelSnapshot } from '../../../common/types/anomaly_detection_jobs'; import { datafeedsProvider } from './datafeeds'; import { FormCalendar, CalendarManager } from '../calendar'; @@ -19,9 +19,9 @@ export interface RevertModelSnapshotResponse { model: ModelSnapshot; } -export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsInternalUser } = mlClusterClient; - const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient); +export function modelSnapshotProvider(client: IScopedClusterClient) { + const { asInternalUser } = client; + const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(client); async function revertModelSnapshot( jobId: string, @@ -33,13 +33,13 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien ) { let datafeedId = `datafeed-${jobId}`; // ensure job exists - await callAsInternalUser('ml.jobs', { jobId: [jobId] }); + await asInternalUser.ml.getJobs({ job_id: jobId }); try { // ensure the datafeed exists // the datafeed is probably called datafeed- - await callAsInternalUser('ml.datafeeds', { - datafeedId: [datafeedId], + await asInternalUser.ml.getDatafeeds({ + datafeed_id: datafeedId, }); } catch (e) { // if the datafeed isn't called datafeed- @@ -52,19 +52,21 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien } // ensure the snapshot exists - const snapshot = (await callAsInternalUser('ml.modelSnapshots', { - jobId, - snapshotId, - })) as ModelSnapshotsResponse; + const { body: snapshot } = await asInternalUser.ml.getModelSnapshots({ + job_id: jobId, + snapshot_id: snapshotId, + }); // apply the snapshot revert - const { model } = (await callAsInternalUser('ml.revertModelSnapshot', { - jobId, - snapshotId, + const { + body: { model }, + } = await asInternalUser.ml.revertModelSnapshot({ + job_id: jobId, + snapshot_id: snapshotId, body: { delete_intervening_results: deleteInterveningResults, }, - })) as RevertModelSnapshotResponse; + }); // create calendar (if specified) and replay datafeed if (replay && model.snapshot_id === snapshotId && snapshot.model_snapshots.length) { @@ -85,7 +87,7 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien end_time: s.end, })), }; - const cm = new CalendarManager(mlClusterClient); + const cm = new CalendarManager(client); await cm.newCalendar(calendar); } diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts index ca3e0cef21049..6b9f30b2ae00b 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { chunk } from 'lodash'; import { SearchResponse } from 'elasticsearch'; import { CATEGORY_EXAMPLES_SAMPLE_SIZE } from '../../../../../common/constants/categorization_job'; @@ -18,9 +18,9 @@ import { ValidationResults } from './validation_results'; const CHUNK_SIZE = 100; export function categorizationExamplesProvider({ - callAsCurrentUser, - callAsInternalUser, -}: ILegacyScopedClusterClient) { + asCurrentUser, + asInternalUser, +}: IScopedClusterClient) { const validationResults = new ValidationResults(); async function categorizationExamples( @@ -57,7 +57,7 @@ export function categorizationExamplesProvider({ } } - const results: SearchResponse<{ [id: string]: string }> = await callAsCurrentUser('search', { + const { body } = await asCurrentUser.search>({ index: indexPatternTitle, size, body: { @@ -67,7 +67,7 @@ export function categorizationExamplesProvider({ }, }); - const tempExamples = results.hits.hits.map(({ _source }) => _source[categorizationFieldName]); + const tempExamples = body.hits.hits.map(({ _source }) => _source[categorizationFieldName]); validationResults.createNullValueResult(tempExamples); @@ -112,7 +112,9 @@ export function categorizationExamplesProvider({ } async function loadTokens(examples: string[], analyzer: CategorizationAnalyzer) { - const { tokens }: { tokens: Token[] } = await callAsInternalUser('indices.analyze', { + const { + body: { tokens }, + } = await asInternalUser.indices.analyze<{ tokens: Token[] }>({ body: { ...getAnalyzer(analyzer), text: examples, diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts index 5ade86806f383..347afec8ef73c 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts @@ -5,13 +5,13 @@ */ import { SearchResponse } from 'elasticsearch'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns'; import { CategoryId, Category } from '../../../../../common/types/categories'; -export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { - async function getTotalCategories(jobId: string): Promise<{ total: number }> { - const totalResp = await callAsInternalUser('search', { +export function topCategoriesProvider({ asInternalUser }: IScopedClusterClient) { + async function getTotalCategories(jobId: string): Promise { + const { body } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -33,11 +33,12 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust }, }, }); - return totalResp?.hits?.total?.value ?? 0; + // @ts-ignore total is an object here + return body?.hits?.total?.value ?? 0; } async function getTopCategoryCounts(jobId: string, numberOfCategories: number) { - const top: SearchResponse = await callAsInternalUser('search', { + const { body } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -76,7 +77,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust const catCounts: Array<{ id: CategoryId; count: number; - }> = top.aggregations?.cat_count?.buckets.map((c: any) => ({ + }> = body.aggregations?.cat_count?.buckets.map((c: any) => ({ id: c.key, count: c.doc_count, })); @@ -99,7 +100,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust field: 'category_id', }, }; - const result: SearchResponse = await callAsInternalUser('search', { + const { body } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, size, body: { @@ -118,7 +119,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust }, }); - return result.hits.hits?.map((c: { _source: Category }) => c._source) || []; + return body.hits.hits?.map((c: { _source: Category }) => c._source) || []; } async function topCategories(jobId: string, numberOfCategories: number) { diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/validation_results.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/validation_results.ts index 4b90283a3a966..60595ccedff45 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/validation_results.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/validation_results.ts @@ -144,7 +144,7 @@ export class ValidationResults { this.createPrivilegesErrorResult(error); return; } - const message: string = error.message; + const message: string = error.body.error?.reason; if (message) { const rxp = /exceeded the allowed maximum of \[(\d+?)\]/; const match = rxp.exec(message); @@ -170,7 +170,7 @@ export class ValidationResults { } public createPrivilegesErrorResult(error: any) { - const message: string = error.message; + const message: string = error.body.error?.reason; if (message) { this._results.push({ id: VALIDATION_RESULT.INSUFFICIENT_PRIVILEGES, diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts b/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts index 63ae2c624ac38..da7d8d0577e4e 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { newJobLineChartProvider } from './line_chart'; import { newJobPopulationChartProvider } from './population_chart'; -export function newJobChartsProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { newJobLineChart } = newJobLineChartProvider(mlClusterClient); - const { newJobPopulationChart } = newJobPopulationChartProvider(mlClusterClient); +export function newJobChartsProvider(client: IScopedClusterClient) { + const { newJobLineChart } = newJobLineChartProvider(client); + const { newJobPopulationChart } = newJobPopulationChartProvider(client); return { newJobLineChart, diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts b/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts index 3080b37867de5..9eea1ea2a28ae 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts @@ -5,7 +5,7 @@ */ import { get } from 'lodash'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; @@ -23,7 +23,7 @@ interface ProcessedResults { totalResults: number; } -export function newJobLineChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function newJobLineChartProvider({ asCurrentUser }: IScopedClusterClient) { async function newJobLineChart( indexPatternTitle: string, timeField: string, @@ -47,9 +47,9 @@ export function newJobLineChartProvider({ callAsCurrentUser }: ILegacyScopedClus splitFieldValue ); - const results = await callAsCurrentUser('search', json); + const { body } = await asCurrentUser.search(json); return processSearchResults( - results, + body, aggFieldNamePairs.map((af) => af.field) ); } diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts b/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts index ab75787a0069f..567afec809405 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts @@ -5,7 +5,7 @@ */ import { get } from 'lodash'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; @@ -29,7 +29,7 @@ interface ProcessedResults { totalResults: number; } -export function newJobPopulationChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function newJobPopulationChartProvider({ asCurrentUser }: IScopedClusterClient) { async function newJobPopulationChart( indexPatternTitle: string, timeField: string, @@ -51,15 +51,11 @@ export function newJobPopulationChartProvider({ callAsCurrentUser }: ILegacyScop splitFieldName ); - try { - const results = await callAsCurrentUser('search', json); - return processSearchResults( - results, - aggFieldNamePairs.map((af) => af.field) - ); - } catch (error) { - return { error }; - } + const { body } = await asCurrentUser.search(json); + return processSearchResults( + body, + aggFieldNamePairs.map((af) => af.field) + ); } return { diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts index fd20610450cc1..c3b1de64c3eb5 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { cloneDeep } from 'lodash'; import { SavedObjectsClientContract } from 'kibana/server'; import { @@ -40,35 +40,36 @@ const supportedTypes: string[] = [ export function fieldServiceProvider( indexPattern: string, isRollup: boolean, - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract ) { - return new FieldsService(indexPattern, isRollup, mlClusterClient, savedObjectsClient); + return new FieldsService(indexPattern, isRollup, client, savedObjectsClient); } class FieldsService { private _indexPattern: string; private _isRollup: boolean; - private _mlClusterClient: ILegacyScopedClusterClient; + private _mlClusterClient: IScopedClusterClient; private _savedObjectsClient: SavedObjectsClientContract; constructor( indexPattern: string, isRollup: boolean, - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract ) { this._indexPattern = indexPattern; this._isRollup = isRollup; - this._mlClusterClient = mlClusterClient; + this._mlClusterClient = client; this._savedObjectsClient = savedObjectsClient; } private async loadFieldCaps(): Promise { - return this._mlClusterClient.callAsCurrentUser('fieldCaps', { + const { body } = await this._mlClusterClient.asCurrentUser.fieldCaps({ index: this._indexPattern, fields: '*', }); + return body; } // create field object from the results from _field_caps diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts index 38d6481e02a74..891cb2e0d1e64 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts @@ -21,28 +21,23 @@ describe('job_service - job_caps', () => { let savedObjectsClientMock: any; beforeEach(() => { - const callAsNonRollupMock = jest.fn((action: string) => { - switch (action) { - case 'fieldCaps': - return farequoteFieldCaps; - } - }); + const asNonRollupMock = { + fieldCaps: jest.fn(() => ({ body: farequoteFieldCaps })), + }; + mlClusterClientNonRollupMock = { - callAsCurrentUser: callAsNonRollupMock, - callAsInternalUser: callAsNonRollupMock, + asCurrentUser: asNonRollupMock, + asInternalUser: asNonRollupMock, + }; + + const callAsRollupMock = { + fieldCaps: jest.fn(() => ({ body: cloudwatchFieldCaps })), + rollup: { getRollupIndexCaps: jest.fn(() => Promise.resolve({ body: rollupCaps })) }, }; - const callAsRollupMock = jest.fn((action: string) => { - switch (action) { - case 'fieldCaps': - return cloudwatchFieldCaps; - case 'ml.rollupIndexCapabilities': - return Promise.resolve(rollupCaps); - } - }); mlClusterClientRollupMock = { - callAsCurrentUser: callAsRollupMock, - callAsInternalUser: callAsRollupMock, + asCurrentUser: callAsRollupMock, + asInternalUser: callAsRollupMock, }; savedObjectsClientMock = { diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts index 5616dade53a78..7559111d012d0 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; +import { IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; import { Aggregation, Field, NewJobCaps } from '../../../../common/types/fields'; import { fieldServiceProvider } from './field_service'; @@ -12,18 +12,13 @@ interface NewJobCapsResponse { [indexPattern: string]: NewJobCaps; } -export function newJobCapsProvider(mlClusterClient: ILegacyScopedClusterClient) { +export function newJobCapsProvider(client: IScopedClusterClient) { async function newJobCaps( indexPattern: string, isRollup: boolean = false, savedObjectsClient: SavedObjectsClientContract ): Promise { - const fieldService = fieldServiceProvider( - indexPattern, - isRollup, - mlClusterClient, - savedObjectsClient - ); + const fieldService = fieldServiceProvider(indexPattern, isRollup, client, savedObjectsClient); const { aggs, fields } = await fieldService.getData(); convertForStringify(aggs, fields); diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts index f3a9bd49c27d6..b7f4c8af62283 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { SavedObject } from 'kibana/server'; import { IndexPatternAttributes } from 'src/plugins/data/server'; import { SavedObjectsClientContract } from 'kibana/server'; @@ -22,7 +22,7 @@ export interface RollupJob { export async function rollupServiceProvider( indexPattern: string, - { callAsCurrentUser }: ILegacyScopedClusterClient, + { asCurrentUser }: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract ) { const rollupIndexPatternObject = await loadRollupIndexPattern(indexPattern, savedObjectsClient); @@ -32,8 +32,8 @@ export async function rollupServiceProvider( if (rollupIndexPatternObject !== null) { const parsedTypeMetaData = JSON.parse(rollupIndexPatternObject.attributes.typeMeta); const rollUpIndex: string = parsedTypeMetaData.params.rollup_index; - const rollupCaps = await callAsCurrentUser('ml.rollupIndexCapabilities', { - indexPattern: rollUpIndex, + const { body: rollupCaps } = await asCurrentUser.rollup.getRollupIndexCaps({ + index: rollUpIndex, }); const indexRollupCaps = rollupCaps[rollUpIndex]; diff --git a/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts b/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts index 1c74953e4dda9..810d0ae9dcd87 100644 --- a/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts @@ -4,48 +4,31 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { validateJob, ValidateJobPayload } from './job_validation'; import { JobValidationMessage } from '../../../common/constants/messages'; -const mlClusterClient = ({ - // mock callAsCurrentUser - callAsCurrentUser: (method: string) => { - return new Promise((resolve) => { - if (method === 'fieldCaps') { - resolve({ fields: [] }); - return; - } else if (method === 'ml.info') { - resolve({ +const callAs = { + fieldCaps: () => Promise.resolve({ body: { fields: [] } }), + ml: { + info: () => + Promise.resolve({ + body: { limits: { effective_max_model_memory_limit: '100MB', max_model_memory_limit: '1GB', }, - }); - } - resolve({}); - }) as Promise; + }, + }), }, + search: () => Promise.resolve({ body: {} }), +}; - // mock callAsInternalUser - callAsInternalUser: (method: string) => { - return new Promise((resolve) => { - if (method === 'fieldCaps') { - resolve({ fields: [] }); - return; - } else if (method === 'ml.info') { - resolve({ - limits: { - effective_max_model_memory_limit: '100MB', - max_model_memory_limit: '1GB', - }, - }); - } - resolve({}); - }) as Promise; - }, -} as unknown) as ILegacyScopedClusterClient; +const mlClusterClient = ({ + asCurrentUser: callAs, + asInternalUser: callAs, +} as unknown) as IScopedClusterClient; // Note: The tests cast `payload` as any // so we can simulate possible runtime payloads diff --git a/x-pack/plugins/ml/server/models/job_validation/job_validation.ts b/x-pack/plugins/ml/server/models/job_validation/job_validation.ts index 6692ecb22bd9e..9e272f1f770fc 100644 --- a/x-pack/plugins/ml/server/models/job_validation/job_validation.ts +++ b/x-pack/plugins/ml/server/models/job_validation/job_validation.ts @@ -6,7 +6,7 @@ import { i18n } from '@kbn/i18n'; import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { fieldsServiceProvider } from '../fields_service'; import { renderTemplate } from '../../../common/util/string_utils'; @@ -34,7 +34,7 @@ export type ValidateJobPayload = TypeOf; * @kbn/config-schema has checked the payload {@link validateJobSchema}. */ export async function validateJob( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, payload: ValidateJobPayload, kbnVersion = 'current', isSecurityDisabled?: boolean @@ -63,8 +63,8 @@ export async function validateJob( // if no duration was part of the request, fall back to finding out // the time range of the time field of the index, but also check first // if the time field is a valid field of type 'date' using isValidTimeField() - if (typeof duration === 'undefined' && (await isValidTimeField(mlClusterClient, job))) { - const fs = fieldsServiceProvider(mlClusterClient); + if (typeof duration === 'undefined' && (await isValidTimeField(client, job))) { + const fs = fieldsServiceProvider(client); const index = job.datafeed_config.indices.join(','); const timeField = job.data_description.time_field; const timeRange = await fs.getTimeFieldRange(index, timeField, job.datafeed_config.query); @@ -79,24 +79,22 @@ export async function validateJob( // next run only the cardinality tests to find out if they trigger an error // so we can decide later whether certain additional tests should be run - const cardinalityMessages = await validateCardinality(mlClusterClient, job); + const cardinalityMessages = await validateCardinality(client, job); validationMessages.push(...cardinalityMessages); const cardinalityError = cardinalityMessages.some((m) => { return messages[m.id as MessageId].status === VALIDATION_STATUS.ERROR; }); validationMessages.push( - ...(await validateBucketSpan(mlClusterClient, job, duration, isSecurityDisabled)) + ...(await validateBucketSpan(client, job, duration, isSecurityDisabled)) ); - validationMessages.push(...(await validateTimeRange(mlClusterClient, job, duration))); + validationMessages.push(...(await validateTimeRange(client, job, duration))); // only run the influencer and model memory limit checks // if cardinality checks didn't return a message with an error level if (cardinalityError === false) { validationMessages.push(...(await validateInfluencers(job))); - validationMessages.push( - ...(await validateModelMemoryLimit(mlClusterClient, job, duration)) - ); + validationMessages.push(...(await validateModelMemoryLimit(client, job, duration))); } } else { validationMessages = basicValidation.messages; diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js index 11f8d8967c4e0..315ad09176571 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js +++ b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js @@ -45,7 +45,7 @@ const pickBucketSpan = (bucketSpans) => { return bucketSpans[i]; }; -export async function validateBucketSpan(mlClusterClient, job, duration) { +export async function validateBucketSpan(client, job, duration) { validateJobObject(job); // if there is no duration, do not run the estimate test @@ -117,7 +117,7 @@ export async function validateBucketSpan(mlClusterClient, job, duration) { try { const estimations = estimatorConfigs.map((data) => { return new Promise((resolve) => { - estimateBucketSpanFactory(mlClusterClient)(data) + estimateBucketSpanFactory(client)(data) .then(resolve) // this catch gets triggered when the estimation code runs without error // but isn't able to come up with a bucket span estimation. diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts index f9145ab576d71..80418d590af76 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts @@ -24,12 +24,12 @@ import mockItSearchResponse from './__mocks__/mock_it_search_response.json'; const mlClusterClientFactory = (mockSearchResponse: any) => { const callAs = () => { return new Promise((resolve) => { - resolve(mockSearchResponse); + resolve({ body: mockSearchResponse }); }); }; return { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, + asCurrentUser: callAs, + asInternalUser: callAs, }; }; diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts index 16ee70ad9efde..1be0751e15f22 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts @@ -6,7 +6,7 @@ import cloneDeep from 'lodash/cloneDeep'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; @@ -24,21 +24,21 @@ const mockResponses = { const mlClusterClientFactory = ( responses: Record, fail = false -): ILegacyScopedClusterClient => { - const callAs = (requestName: string) => { - return new Promise((resolve, reject) => { - const response = responses[requestName]; - if (fail) { - reject(response); - } else { - resolve(response); - } - }) as Promise; +): IScopedClusterClient => { + const callAs = { + search: () => Promise.resolve({ body: responses.search }), + fieldCaps: () => Promise.resolve({ body: responses.fieldCaps }), }; - return { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, + + const callAsFail = { + search: () => Promise.reject({ body: {} }), + fieldCaps: () => Promise.reject({ body: {} }), }; + + return ({ + asCurrentUser: fail === false ? callAs : callAsFail, + asInternalUser: fail === false ? callAs : callAsFail, + } as unknown) as IScopedClusterClient; }; describe('ML - validateCardinality', () => { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts index 1545c4c0062ec..c5822b863c83d 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { DataVisualizer } from '../data_visualizer'; import { validateJobObject } from './validate_job_object'; @@ -43,12 +43,9 @@ type Validator = (obj: { messages: Messages; }>; -const validateFactory = ( - mlClusterClient: ILegacyScopedClusterClient, - job: CombinedJob -): Validator => { - const { callAsCurrentUser } = mlClusterClient; - const dv = new DataVisualizer(mlClusterClient); +const validateFactory = (client: IScopedClusterClient, job: CombinedJob): Validator => { + const { asCurrentUser } = client; + const dv = new DataVisualizer(client); const modelPlotConfigTerms = job?.model_plot_config?.terms ?? ''; const modelPlotConfigFieldCount = @@ -77,7 +74,7 @@ const validateFactory = ( ] as string[]; // use fieldCaps endpoint to get data about whether fields are aggregatable - const fieldCaps = await callAsCurrentUser('fieldCaps', { + const { body: fieldCaps } = await asCurrentUser.fieldCaps({ index: job.datafeed_config.indices.join(','), fields: uniqueFieldNames, }); @@ -154,7 +151,7 @@ const validateFactory = ( }; export async function validateCardinality( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, job?: CombinedJob ): Promise | never { const messages: Messages = []; @@ -174,7 +171,7 @@ export async function validateCardinality( } // validate({ type, isInvalid }) asynchronously returns an array of validation messages - const validate = validateFactory(mlClusterClient, job); + const validate = validateFactory(client, job); const modelPlotEnabled = job.model_plot_config?.enabled ?? false; diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts index 6ffb0e320982b..35792c66e66ec 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { CombinedJob, Detector } from '../../../common/types/anomaly_detection_jobs'; -import { ModelMemoryEstimate } from '../calculate_model_memory_limit/calculate_model_memory_limit'; +import { ModelMemoryEstimateResponse } from '../calculate_model_memory_limit/calculate_model_memory_limit'; import { validateModelMemoryLimit } from './validate_model_memory_limit'; describe('ML - validateModelMemoryLimit', () => { @@ -65,44 +65,36 @@ describe('ML - validateModelMemoryLimit', () => { }; // mock estimate model memory - const modelMemoryEstimateResponse: ModelMemoryEstimate = { + const modelMemoryEstimateResponse: ModelMemoryEstimateResponse = { model_memory_estimate: '40mb', }; interface MockAPICallResponse { - 'ml.estimateModelMemory'?: ModelMemoryEstimate; + 'ml.estimateModelMemory'?: ModelMemoryEstimateResponse; } - // mock callAsCurrentUser + // mock asCurrentUser // used in three places: // - to retrieve the info endpoint // - to search for cardinality of split field // - to retrieve field capabilities used in search for split field cardinality const getMockMlClusterClient = ({ 'ml.estimateModelMemory': estimateModelMemory, - }: MockAPICallResponse = {}): ILegacyScopedClusterClient => { - const callAs = (call: string) => { - if (typeof call === undefined) { - return Promise.reject(); - } - - let response = {}; - if (call === 'ml.info') { - response = mlInfoResponse; - } else if (call === 'search') { - response = cardinalitySearchResponse; - } else if (call === 'fieldCaps') { - response = fieldCapsResponse; - } else if (call === 'ml.estimateModelMemory') { - response = estimateModelMemory || modelMemoryEstimateResponse; - } - return Promise.resolve(response); + }: MockAPICallResponse = {}): IScopedClusterClient => { + const callAs = { + ml: { + info: () => Promise.resolve({ body: mlInfoResponse }), + estimateModelMemory: () => + Promise.resolve({ body: estimateModelMemory || modelMemoryEstimateResponse }), + }, + search: () => Promise.resolve({ body: cardinalitySearchResponse }), + fieldCaps: () => Promise.resolve({ body: fieldCapsResponse }), }; - return { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, - }; + return ({ + asCurrentUser: callAs, + asInternalUser: callAs, + } as unknown) as IScopedClusterClient; }; function getJobConfig(influencers: string[] = [], detectors: Detector[] = []) { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts index 728342294c424..9733e17e0f379 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts @@ -5,7 +5,7 @@ */ import numeral from '@elastic/numeral'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; import { validateJobObject } from './validate_job_object'; import { calculateModelMemoryLimitProvider } from '../calculate_model_memory_limit'; @@ -16,11 +16,11 @@ import { MlInfoResponse } from '../../../common/types/ml_server_info'; const MODEL_MEMORY_LIMIT_MINIMUM_BYTES = 1048576; export async function validateModelMemoryLimit( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, job: CombinedJob, duration?: { start?: number; end?: number } ) { - const { callAsInternalUser } = mlClusterClient; + const { asInternalUser } = client; validateJobObject(job); // retrieve the model memory limit specified by the user in the job config. @@ -52,12 +52,12 @@ export async function validateModelMemoryLimit( // retrieve the max_model_memory_limit value from the server // this will be unset unless the user has set this on their cluster - const info = (await callAsInternalUser('ml.info')) as MlInfoResponse; - const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase(); - const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase(); + const { body } = await asInternalUser.ml.info(); + const maxModelMemoryLimit = body.limits.max_model_memory_limit?.toUpperCase(); + const effectiveMaxModelMemoryLimit = body.limits.effective_max_model_memory_limit?.toUpperCase(); if (runCalcModelMemoryTest) { - const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(mlClusterClient)( + const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(client)( job.analysis_config, job.datafeed_config.indices.join(','), job.datafeed_config.query, diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts index a45be189ba3d8..12458af0521a9 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts @@ -6,7 +6,7 @@ import cloneDeep from 'lodash/cloneDeep'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; @@ -21,16 +21,15 @@ const mockSearchResponse = { search: mockTimeRange, }; -const mlClusterClientFactory = (resp: any): ILegacyScopedClusterClient => { - const callAs = (path: string) => { - return new Promise((resolve) => { - resolve(resp[path]); - }) as Promise; - }; - return { - callAsCurrentUser: callAs, - callAsInternalUser: callAs, +const mlClusterClientFactory = (response: any): IScopedClusterClient => { + const callAs = { + fieldCaps: () => Promise.resolve({ body: response.fieldCaps }), + search: () => Promise.resolve({ body: response.search }), }; + return ({ + asCurrentUser: callAs, + asInternalUser: callAs, + } as unknown) as IScopedClusterClient; }; function getMinimalValidJob() { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts index a94ceffa90273..83d9621898f96 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/server'; import { parseInterval } from '../../../common/util/parse_interval'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; @@ -26,15 +26,12 @@ const BUCKET_SPAN_COMPARE_FACTOR = 25; const MIN_TIME_SPAN_MS = 7200000; const MIN_TIME_SPAN_READABLE = '2 hours'; -export async function isValidTimeField( - { callAsCurrentUser }: ILegacyScopedClusterClient, - job: CombinedJob -) { +export async function isValidTimeField({ asCurrentUser }: IScopedClusterClient, job: CombinedJob) { const index = job.datafeed_config.indices.join(','); const timeField = job.data_description.time_field; // check if time_field is of type 'date' or 'date_nanos' - const fieldCaps = await callAsCurrentUser('fieldCaps', { + const { body: fieldCaps } = await asCurrentUser.fieldCaps({ index, fields: [timeField], }); @@ -47,7 +44,7 @@ export async function isValidTimeField( } export async function validateTimeRange( - mlClientCluster: ILegacyScopedClusterClient, + mlClientCluster: IScopedClusterClient, job: CombinedJob, timeRange?: Partial ) { diff --git a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts index 9c0efe259844c..76dc68d2b59e3 100644 --- a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts +++ b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { PARTITION_FIELDS } from '../../../common/constants/anomalies'; import { PartitionFieldsType } from '../../../common/types/anomalies'; import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; @@ -74,9 +74,7 @@ function getFieldObject(fieldType: PartitionFieldsType, aggs: any) { : {}; } -export const getPartitionFieldsValuesFactory = ({ - callAsInternalUser, -}: ILegacyScopedClusterClient) => +export const getPartitionFieldsValuesFactory = ({ asInternalUser }: IScopedClusterClient) => /** * Gets the record of partition fields with possible values that fit the provided queries. * @param jobId - Job ID @@ -92,7 +90,7 @@ export const getPartitionFieldsValuesFactory = ({ earliestMs: number, latestMs: number ) { - const jobsResponse = await callAsInternalUser('ml.jobs', { jobId: [jobId] }); + const { body: jobsResponse } = await asInternalUser.ml.getJobs({ job_id: jobId }); if (jobsResponse.count === 0 || jobsResponse.jobs === undefined) { throw Boom.notFound(`Job with the id "${jobId}" not found`); } @@ -101,7 +99,7 @@ export const getPartitionFieldsValuesFactory = ({ const isModelPlotEnabled = job?.model_plot_config?.enabled; - const resp = await callAsInternalUser('search', { + const { body } = await asInternalUser.search({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -151,7 +149,7 @@ export const getPartitionFieldsValuesFactory = ({ return PARTITION_FIELDS.reduce((acc, key) => { return { ...acc, - ...getFieldObject(key, resp.aggregations), + ...getFieldObject(key, body.aggregations), }; }, {}); }; diff --git a/x-pack/plugins/ml/server/models/results_service/results_service.ts b/x-pack/plugins/ml/server/models/results_service/results_service.ts index 7be8bac61e69d..190b5d99309d7 100644 --- a/x-pack/plugins/ml/server/models/results_service/results_service.ts +++ b/x-pack/plugins/ml/server/models/results_service/results_service.ts @@ -9,7 +9,7 @@ import slice from 'lodash/slice'; import get from 'lodash/get'; import moment from 'moment'; import { SearchResponse } from 'elasticsearch'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import Boom from 'boom'; import { buildAnomalyTableItems } from './build_anomaly_table_items'; import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; @@ -40,8 +40,8 @@ interface Influencer { fieldValue: any; } -export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsInternalUser } = mlClusterClient; +export function resultsServiceProvider(client: IScopedClusterClient) { + const { asInternalUser } = client; // Obtains data for the anomalies table, aggregating anomalies by day or hour as requested. // Return an Object with properties 'anomalies' and 'interval' (interval used to aggregate anomalies, // one of day, hour or second. Note 'auto' can be provided as the aggregationInterval in the request, @@ -144,7 +144,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }); } - const resp: SearchResponse = await callAsInternalUser('search', { + const { body } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: maxRecords, @@ -178,9 +178,9 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie anomalies: [], interval: 'second', }; - if (resp.hits.total !== 0) { + if (body.hits.total !== 0) { let records: AnomalyRecordDoc[] = []; - resp.hits.hits.forEach((hit) => { + body.hits.hits.forEach((hit) => { records.push(hit._source); }); @@ -298,8 +298,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }, }; - const resp = await callAsInternalUser('search', query); - const maxScore = get(resp, ['aggregations', 'max_score', 'value'], null); + const { body } = await asInternalUser.search(query); + const maxScore = get(body, ['aggregations', 'max_score', 'value'], null); return { maxScore }; } @@ -336,7 +336,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // Size of job terms agg, consistent with maximum number of jobs supported by Java endpoints. const maxJobs = 10000; - const resp = await callAsInternalUser('search', { + const { body } = await asInternalUser.search({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -364,7 +364,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }); const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = get( - resp, + body, ['aggregations', 'byJobId', 'buckets'], [] ); @@ -380,7 +380,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // from the given index and job ID. // Returned response consists of a list of examples against category ID. async function getCategoryExamples(jobId: string, categoryIds: any, maxExamples: number) { - const resp = await callAsInternalUser('search', { + const { body } = await asInternalUser.search({ index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: ANOMALIES_TABLE_DEFAULT_QUERY_SIZE, // Matches size of records in anomaly summary table. @@ -394,8 +394,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }); const examplesByCategoryId: { [key: string]: any } = {}; - if (resp.hits.total !== 0) { - resp.hits.hits.forEach((hit: any) => { + if (body.hits.total !== 0) { + body.hits.hits.forEach((hit: any) => { if (maxExamples) { examplesByCategoryId[hit._source.category_id] = slice( hit._source.examples, @@ -415,7 +415,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // Returned response contains four properties - categoryId, regex, examples // and terms (space delimited String of the common tokens matched in values of the category). async function getCategoryDefinition(jobId: string, categoryId: string) { - const resp = await callAsInternalUser('search', { + const { body } = await asInternalUser.search({ index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: 1, @@ -429,8 +429,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }); const definition = { categoryId, terms: null, regex: null, examples: [] }; - if (resp.hits.total !== 0) { - const source = resp.hits.hits[0]._source; + if (body.hits.total !== 0) { + const source = body.hits.hits[0]._source; definition.categoryId = source.category_id; definition.regex = source.regex; definition.terms = source.terms; @@ -456,7 +456,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }, }); } - const results: SearchResponse = await callAsInternalUser('search', { + const { body } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, body: { query: { @@ -473,7 +473,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }, }, }); - return results ? results.hits.hits.map((r) => r._source) : []; + return body ? body.hits.hits.map((r) => r._source) : []; } async function getCategoryStoppedPartitions( @@ -485,15 +485,15 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }; // first determine from job config if stop_on_warn is true // if false return [] - const jobConfigResponse: MlJobsResponse = await callAsInternalUser('ml.jobs', { - jobId: jobIds, + const { body } = await asInternalUser.ml.getJobs({ + job_id: jobIds.join(), }); - if (!jobConfigResponse || jobConfigResponse.jobs.length < 1) { + if (!body || body.jobs.length < 1) { throw Boom.notFound(`Unable to find anomaly detector jobs ${jobIds.join(', ')}`); } - const jobIdsWithStopOnWarnSet = jobConfigResponse.jobs + const jobIdsWithStopOnWarnSet = body.jobs .filter( (jobConfig) => jobConfig.analysis_config?.per_partition_categorization?.stop_on_warn === true @@ -543,7 +543,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }, }, ]; - const results: SearchResponse = await callAsInternalUser('search', { + const { body: results } = await asInternalUser.search>({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -594,7 +594,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie getCategoryExamples, getLatestBucketTimestampByJob, getMaxAnomalyScore, - getPartitionFieldsValues: getPartitionFieldsValuesFactory(mlClusterClient), + getPartitionFieldsValues: getPartitionFieldsValuesFactory(client), getCategorizerStats, getCategoryStoppedPartitions, }; diff --git a/x-pack/plugins/ml/server/plugin.ts b/x-pack/plugins/ml/server/plugin.ts index 76128341e6ddc..39672f5b188bc 100644 --- a/x-pack/plugins/ml/server/plugin.ts +++ b/x-pack/plugins/ml/server/plugin.ts @@ -9,18 +9,16 @@ import { CoreSetup, CoreStart, Plugin, - ILegacyScopedClusterClient, KibanaRequest, Logger, PluginInitializerContext, - ILegacyCustomClusterClient, CapabilitiesStart, + IClusterClient, } from 'kibana/server'; import { PluginsSetup, RouteInitialization } from './types'; import { PLUGIN_ID, PLUGIN_ICON } from '../common/constants/app'; import { MlCapabilities } from '../common/types/capabilities'; -import { elasticsearchJsPlugin } from './client/elasticsearch_ml'; import { initMlTelemetry } from './lib/telemetry'; import { initMlServerLog } from './client/log'; import { initSampleDataSets } from './lib/sample_data_sets'; @@ -50,17 +48,7 @@ import { setupCapabilitiesSwitcher } from './lib/capabilities'; import { registerKibanaSettings } from './lib/register_settings'; import { inferenceRoutes } from './routes/inference'; -declare module 'kibana/server' { - interface RequestHandlerContext { - [PLUGIN_ID]?: { - mlClient: ILegacyScopedClusterClient; - }; - } -} - -export interface MlPluginSetup extends SharedServices { - mlClient: ILegacyCustomClusterClient; -} +export type MlPluginSetup = SharedServices; export type MlPluginStart = void; export class MlServerPlugin implements Plugin { @@ -68,6 +56,7 @@ export class MlServerPlugin implements Plugin { - return { - mlClient: mlClient.asScoped(request), - }; - }); - const routeInit: RouteInitialization = { router: coreSetup.http.createRouter(), mlLicense: this.mlLicense, @@ -176,13 +154,19 @@ export class MlServerPlugin implements Plugin this.clusterClient + ), }; } public start(coreStart: CoreStart): MlPluginStart { this.capabilities = coreStart.capabilities; + this.clusterClient = coreStart.elasticsearch.client; } public stop() { diff --git a/x-pack/plugins/ml/server/routes/annotations.ts b/x-pack/plugins/ml/server/routes/annotations.ts index a6de80bb7e5e2..5c4b36164fbb0 100644 --- a/x-pack/plugins/ml/server/routes/annotations.ts +++ b/x-pack/plugins/ml/server/routes/annotations.ts @@ -58,9 +58,9 @@ export function annotationRoutes( tags: ['access:ml:canGetAnnotations'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { getAnnotations } = annotationServiceProvider(legacyClient); + const { getAnnotations } = annotationServiceProvider(client); const resp = await getAnnotations(request.body); return response.ok({ @@ -91,14 +91,14 @@ export function annotationRoutes( tags: ['access:ml:canCreateAnnotation'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(legacyClient); + const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client); if (annotationsFeatureAvailable === false) { throw getAnnotationsFeatureUnavailableErrorMessage(); } - const { indexAnnotation } = annotationServiceProvider(legacyClient); + const { indexAnnotation } = annotationServiceProvider(client); const currentUser = securityPlugin !== undefined ? securityPlugin.authc.getCurrentUser(request) : {}; @@ -134,15 +134,15 @@ export function annotationRoutes( tags: ['access:ml:canDeleteAnnotation'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(legacyClient); + const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client); if (annotationsFeatureAvailable === false) { throw getAnnotationsFeatureUnavailableErrorMessage(); } const annotationId = request.params.annotationId; - const { deleteAnnotation } = annotationServiceProvider(legacyClient); + const { deleteAnnotation } = annotationServiceProvider(client); const resp = await deleteAnnotation(annotationId); return response.ok({ diff --git a/x-pack/plugins/ml/server/routes/anomaly_detectors.ts b/x-pack/plugins/ml/server/routes/anomaly_detectors.ts index 0027bec910134..251e465eafccc 100644 --- a/x-pack/plugins/ml/server/routes/anomaly_detectors.ts +++ b/x-pack/plugins/ml/server/routes/anomaly_detectors.ts @@ -5,6 +5,7 @@ */ import { schema } from '@kbn/config-schema'; +import { RequestParams } from '@elastic/elasticsearch'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { @@ -20,6 +21,7 @@ import { getModelSnapshotsSchema, updateModelSnapshotSchema, } from './schemas/anomaly_detectors_schema'; + /** * Routes for the anomaly detectors */ @@ -42,11 +44,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ response, client }) => { try { - const results = await legacyClient.callAsInternalUser('ml.jobs'); + const { body } = await client.asInternalUser.ml.getJobs(); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -73,12 +75,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { jobId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.jobs', { jobId }); + const { body } = await client.asInternalUser.ml.getJobs({ job_id: jobId }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -104,11 +106,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.jobStats'); + const { body } = await client.asInternalUser.ml.getJobStats(); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -135,12 +137,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { jobId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.jobStats', { jobId }); + const { body } = await client.asInternalUser.ml.getJobStats({ job_id: jobId }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -171,15 +173,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { jobId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.addJob', { - jobId, + const { body } = await client.asInternalUser.ml.putJob({ + job_id: jobId, body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -208,15 +210,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canUpdateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { jobId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.updateJob', { - jobId, + const { body } = await client.asInternalUser.ml.updateJob({ + job_id: jobId, body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -243,14 +245,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canOpenJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { jobId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.openJob', { - jobId, - }); + const { body } = await client.asInternalUser.ml.openJob({ job_id: jobId }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -277,18 +277,18 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCloseJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const options: { jobId: string; force?: boolean } = { - jobId: request.params.jobId, + const options: RequestParams.MlCloseJob = { + job_id: request.params.jobId, }; const force = request.query.force; if (force !== undefined) { options.force = force; } - const results = await legacyClient.callAsInternalUser('ml.closeJob', options); + const { body } = await client.asInternalUser.ml.closeJob(options); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -315,18 +315,19 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canDeleteJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const options: { jobId: string; force?: boolean } = { - jobId: request.params.jobId, + const options: RequestParams.MlDeleteJob = { + job_id: request.params.jobId, + wait_for_completion: false, }; const force = request.query.force; if (force !== undefined) { options.force = force; } - const results = await legacyClient.callAsInternalUser('ml.deleteJob', options); + const { body } = await client.asInternalUser.ml.deleteJob(options); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -351,13 +352,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.validateDetector', { - body: request.body, - }); + const { body } = await client.asInternalUser.ml.validateDetector({ body: request.body }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -386,16 +385,16 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canForecastJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const jobId = request.params.jobId; const duration = request.body.duration; - const results = await legacyClient.callAsInternalUser('ml.forecast', { - jobId, + const { body } = await client.asInternalUser.ml.forecast({ + job_id: jobId, duration, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -427,14 +426,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.records', { - jobId: request.params.jobId, + const { body } = await client.asInternalUser.ml.getRecords({ + job_id: request.params.jobId, body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -466,15 +465,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.buckets', { - jobId: request.params.jobId, + const { body } = await client.asInternalUser.ml.getBuckets({ + job_id: request.params.jobId, timestamp: request.params.timestamp, body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -506,17 +505,17 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.overallBuckets', { - jobId: request.params.jobId, + const { body } = await client.asInternalUser.ml.getOverallBuckets({ + job_id: request.params.jobId, top_n: request.body.topN, bucket_span: request.body.bucketSpan, start: request.body.start, end: request.body.end, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -543,14 +542,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.categories', { - jobId: request.params.jobId, - categoryId: request.params.categoryId, + const { body } = await client.asInternalUser.ml.getCategories({ + job_id: request.params.jobId, + category_id: request.params.categoryId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -577,13 +576,13 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.modelSnapshots', { - jobId: request.params.jobId, + const { body } = await client.asInternalUser.ml.getModelSnapshots({ + job_id: request.params.jobId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -610,14 +609,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.modelSnapshots', { - jobId: request.params.jobId, - snapshotId: request.params.snapshotId, + const { body } = await client.asInternalUser.ml.getModelSnapshots({ + job_id: request.params.jobId, + snapshot_id: request.params.snapshotId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -646,15 +645,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.updateModelSnapshot', { - jobId: request.params.jobId, - snapshotId: request.params.snapshotId, + const { body } = await client.asInternalUser.ml.updateModelSnapshot({ + job_id: request.params.jobId, + snapshot_id: request.params.snapshotId, body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -681,14 +680,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.deleteModelSnapshot', { - jobId: request.params.jobId, - snapshotId: request.params.snapshotId, + const { body } = await client.asInternalUser.ml.deleteModelSnapshot({ + job_id: request.params.jobId, + snapshot_id: request.params.snapshotId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); diff --git a/x-pack/plugins/ml/server/routes/calendars.ts b/x-pack/plugins/ml/server/routes/calendars.ts index 3beb6e437b2ee..2c95ce6fb59ec 100644 --- a/x-pack/plugins/ml/server/routes/calendars.ts +++ b/x-pack/plugins/ml/server/routes/calendars.ts @@ -4,43 +4,39 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { calendarSchema, calendarIdSchema, calendarIdsSchema } from './schemas/calendars_schema'; import { CalendarManager, Calendar, FormCalendar } from '../models/calendar'; -function getAllCalendars(legacyClient: ILegacyScopedClusterClient) { - const cal = new CalendarManager(legacyClient); +function getAllCalendars(client: IScopedClusterClient) { + const cal = new CalendarManager(client); return cal.getAllCalendars(); } -function getCalendar(legacyClient: ILegacyScopedClusterClient, calendarId: string) { - const cal = new CalendarManager(legacyClient); +function getCalendar(client: IScopedClusterClient, calendarId: string) { + const cal = new CalendarManager(client); return cal.getCalendar(calendarId); } -function newCalendar(legacyClient: ILegacyScopedClusterClient, calendar: FormCalendar) { - const cal = new CalendarManager(legacyClient); +function newCalendar(client: IScopedClusterClient, calendar: FormCalendar) { + const cal = new CalendarManager(client); return cal.newCalendar(calendar); } -function updateCalendar( - legacyClient: ILegacyScopedClusterClient, - calendarId: string, - calendar: Calendar -) { - const cal = new CalendarManager(legacyClient); +function updateCalendar(client: IScopedClusterClient, calendarId: string, calendar: Calendar) { + const cal = new CalendarManager(client); return cal.updateCalendar(calendarId, calendar); } -function deleteCalendar(legacyClient: ILegacyScopedClusterClient, calendarId: string) { - const cal = new CalendarManager(legacyClient); +function deleteCalendar(client: IScopedClusterClient, calendarId: string) { + const cal = new CalendarManager(client); return cal.deleteCalendar(calendarId); } -function getCalendarsByIds(legacyClient: ILegacyScopedClusterClient, calendarIds: string) { - const cal = new CalendarManager(legacyClient); +function getCalendarsByIds(client: IScopedClusterClient, calendarIds: string) { + const cal = new CalendarManager(client); return cal.getCalendarsByIds(calendarIds); } @@ -60,9 +56,9 @@ export function calendars({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetCalendars'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const resp = await getAllCalendars(legacyClient); + const resp = await getAllCalendars(client); return response.ok({ body: resp, @@ -92,15 +88,15 @@ export function calendars({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetCalendars'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { let returnValue; try { const calendarIds = request.params.calendarIds.split(','); if (calendarIds.length === 1) { - returnValue = await getCalendar(legacyClient, calendarIds[0]); + returnValue = await getCalendar(client, calendarIds[0]); } else { - returnValue = await getCalendarsByIds(legacyClient, calendarIds); + returnValue = await getCalendarsByIds(client, calendarIds); } return response.ok({ @@ -131,10 +127,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateCalendar'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const body = request.body; - const resp = await newCalendar(legacyClient, body); + const resp = await newCalendar(client, body); return response.ok({ body: resp, @@ -166,11 +162,11 @@ export function calendars({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateCalendar'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { calendarId } = request.params; const body = request.body; - const resp = await updateCalendar(legacyClient, calendarId, body); + const resp = await updateCalendar(client, calendarId, body); return response.ok({ body: resp, @@ -200,10 +196,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canDeleteCalendar'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { calendarId } = request.params; - const resp = await deleteCalendar(legacyClient, calendarId); + const resp = await deleteCalendar(client, calendarId); return response.ok({ body: resp, diff --git a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts index 75d48056cf458..dea4803e8275e 100644 --- a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts +++ b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestHandlerContext, ILegacyScopedClusterClient } from 'kibana/server'; +import { RequestHandlerContext, IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { analyticsAuditMessagesProvider } from '../models/data_frame_analytics/analytics_audit_messages'; import { RouteInitialization } from '../types'; @@ -36,13 +36,14 @@ function deleteDestIndexPatternById(context: RequestHandlerContext, indexPattern */ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitialization) { async function userCanDeleteIndex( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, destinationIndex: string ): Promise { if (!mlLicense.isSecurityEnabled()) { return true; } - const privilege = await legacyClient.callAsCurrentUser('ml.privilegeCheck', { + + const { body } = await client.asCurrentUser.security.hasPrivileges({ body: { index: [ { @@ -52,10 +53,8 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat ], }, }); - if (!privilege) { - return false; - } - return privilege.has_all_requested === true; + + return body?.has_all_requested === true; } /** @@ -76,11 +75,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics'); + const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({ size: 1000 }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -107,14 +106,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics', { - analyticsId, + const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({ + id: analyticsId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -137,11 +136,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalyticsStats'); + const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({ size: 1000 }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -168,14 +167,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalyticsStats', { - analyticsId, + const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({ + id: analyticsId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -205,16 +204,18 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canCreateDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.createDataFrameAnalytics', { - body: request.body, - analyticsId, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.putDataFrameAnalytics( + { + id: analyticsId, + body: request.body, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -241,14 +242,16 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.evaluateDataFrameAnalytics', { - body: request.body, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.evaluateDataFrame( + { + body: request.body, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -276,13 +279,13 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canCreateDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const results = await legacyClient.callAsInternalUser('ml.explainDataFrameAnalytics', { + const { body } = await client.asInternalUser.ml.explainDataFrameAnalytics({ body: request.body, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -310,7 +313,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canDeleteDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { const { analyticsId } = request.params; const { deleteDestIndex, deleteDestIndexPattern } = request.query; @@ -324,11 +327,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat // Check if analyticsId is valid and get destination index if (deleteDestIndex || deleteDestIndexPattern) { try { - const dfa = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics', { - analyticsId, + const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({ + id: analyticsId, }); - if (Array.isArray(dfa.data_frame_analytics) && dfa.data_frame_analytics.length > 0) { - destinationIndex = dfa.data_frame_analytics[0].dest.index; + if (Array.isArray(body.data_frame_analytics) && body.data_frame_analytics.length > 0) { + destinationIndex = body.data_frame_analytics[0].dest.index; } } catch (e) { return response.customError(wrapError(e)); @@ -337,11 +340,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat // If user checks box to delete the destinationIndex associated with the job if (destinationIndex && deleteDestIndex) { // Verify if user has privilege to delete the destination index - const userCanDeleteDestIndex = await userCanDeleteIndex(legacyClient, destinationIndex); + const userCanDeleteDestIndex = await userCanDeleteIndex(client, destinationIndex); // If user does have privilege to delete the index, then delete the index if (userCanDeleteDestIndex) { try { - await legacyClient.callAsCurrentUser('indices.delete', { + await client.asCurrentUser.indices.delete({ index: destinationIndex, }); destIndexDeleted.success = true; @@ -370,8 +373,8 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat // Delete the data frame analytics try { - await legacyClient.callAsInternalUser('ml.deleteDataFrameAnalytics', { - analyticsId, + await client.asInternalUser.ml.deleteDataFrameAnalytics({ + id: analyticsId, }); analyticsJobDeleted.success = true; } catch (deleteDFAError) { @@ -413,14 +416,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canStartStopDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.startDataFrameAnalytics', { - analyticsId, + const { body } = await client.asInternalUser.ml.startDataFrameAnalytics({ + id: analyticsId, }); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -449,10 +452,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canStartStopDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const options: { analyticsId: string; force?: boolean | undefined } = { - analyticsId: request.params.analyticsId, + const options: { id: string; force?: boolean | undefined } = { + id: request.params.analyticsId, }; // @ts-expect-error TODO: update types if (request.url?.query?.force !== undefined) { @@ -460,9 +463,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat options.force = request.url.query.force; } - const results = await legacyClient.callAsInternalUser('ml.stopDataFrameAnalytics', options); + const { body } = await client.asInternalUser.ml.stopDataFrameAnalytics(options); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -490,16 +493,18 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canCreateDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const results = await legacyClient.callAsInternalUser('ml.updateDataFrameAnalytics', { - body: request.body, - analyticsId, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.updateDataFrameAnalytics( + { + id: analyticsId, + body: request.body, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: results, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -526,10 +531,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canGetDataFrameAnalytics'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { analyticsId } = request.params; - const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(legacyClient); + const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(client); const results = await getAnalyticsAuditMessages(analyticsId); return response.ok({ diff --git a/x-pack/plugins/ml/server/routes/data_visualizer.ts b/x-pack/plugins/ml/server/routes/data_visualizer.ts index 6355285127f06..a697fe017f192 100644 --- a/x-pack/plugins/ml/server/routes/data_visualizer.ts +++ b/x-pack/plugins/ml/server/routes/data_visualizer.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { DataVisualizer } from '../models/data_visualizer'; import { Field, HistogramField } from '../models/data_visualizer/data_visualizer'; @@ -17,7 +17,7 @@ import { import { RouteInitialization } from '../types'; function getOverallStats( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, indexPatternTitle: string, query: object, aggregatableFields: string[], @@ -27,7 +27,7 @@ function getOverallStats( earliestMs: number, latestMs: number ) { - const dv = new DataVisualizer(legacyClient); + const dv = new DataVisualizer(client); return dv.getOverallStats( indexPatternTitle, query, @@ -41,7 +41,7 @@ function getOverallStats( } function getStatsForFields( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, indexPatternTitle: string, query: any, fields: Field[], @@ -52,7 +52,7 @@ function getStatsForFields( interval: number, maxExamples: number ) { - const dv = new DataVisualizer(legacyClient); + const dv = new DataVisualizer(client); return dv.getStatsForFields( indexPatternTitle, query, @@ -67,13 +67,13 @@ function getStatsForFields( } function getHistogramsForFields( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, indexPatternTitle: string, query: any, fields: HistogramField[], samplerShardSize: number ) { - const dv = new DataVisualizer(legacyClient); + const dv = new DataVisualizer(client); return dv.getHistogramsForFields(indexPatternTitle, query, fields, samplerShardSize); } @@ -104,7 +104,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { const { params: { indexPatternTitle }, @@ -112,7 +112,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) } = request; const results = await getHistogramsForFields( - legacyClient, + client, indexPatternTitle, query, fields, @@ -151,7 +151,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { const { params: { indexPatternTitle }, @@ -168,7 +168,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) } = request; const results = await getStatsForFields( - legacyClient, + client, indexPatternTitle, query, fields, @@ -216,7 +216,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { const { params: { indexPatternTitle }, @@ -232,7 +232,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) } = request; const results = await getOverallStats( - legacyClient, + client, indexPatternTitle, query, aggregatableFields, diff --git a/x-pack/plugins/ml/server/routes/datafeeds.ts b/x-pack/plugins/ml/server/routes/datafeeds.ts index 47a9afc2244d9..df2aa9e79d71b 100644 --- a/x-pack/plugins/ml/server/routes/datafeeds.ts +++ b/x-pack/plugins/ml/server/routes/datafeeds.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { RequestParams } from '@elastic/elasticsearch'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { @@ -33,12 +34,12 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetDatafeeds'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const resp = await legacyClient.callAsInternalUser('ml.datafeeds'); + const { body } = await client.asInternalUser.ml.getDatafeeds(); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -65,13 +66,13 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetDatafeeds'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.datafeeds', { datafeedId }); + const { body } = await client.asInternalUser.ml.getDatafeeds({ datafeed_id: datafeedId }); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -94,12 +95,12 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetDatafeeds'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await legacyClient.callAsInternalUser('ml.datafeedStats'); + const { body } = await client.asInternalUser.ml.getDatafeedStats(); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -126,15 +127,15 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetDatafeeds'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.datafeedStats', { - datafeedId, + const { body } = await client.asInternalUser.ml.getDatafeedStats({ + datafeed_id: datafeedId, }); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -163,17 +164,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.addDatafeed', { - datafeedId, - body: request.body, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.putDatafeed( + { + datafeed_id: datafeedId, + body: request.body, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -202,17 +205,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canUpdateDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.updateDatafeed', { - datafeedId, - body: request.body, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.updateDatafeed( + { + datafeed_id: datafeedId, + body: request.body, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -241,20 +246,20 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canDeleteDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const options: { datafeedId: string; force?: boolean } = { - datafeedId: request.params.jobId, + const options: RequestParams.MlDeleteDatafeed = { + datafeed_id: request.params.jobId, }; const force = request.query.force; if (force !== undefined) { options.force = force; } - const resp = await legacyClient.callAsInternalUser('ml.deleteDatafeed', options); + const { body } = await client.asInternalUser.ml.deleteDatafeed(options); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -283,19 +288,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; const { start, end } = request.body; - const resp = await legacyClient.callAsInternalUser('ml.startDatafeed', { - datafeedId, + const { body } = await client.asInternalUser.ml.startDatafeed({ + datafeed_id: datafeedId, start, end, }); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -322,16 +327,16 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.stopDatafeed', { - datafeedId, + const { body } = await client.asInternalUser.ml.stopDatafeed({ + datafeed_id: datafeedId, }); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); @@ -358,16 +363,18 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canPreviewDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const datafeedId = request.params.datafeedId; - const resp = await legacyClient.callAsInternalUser('ml.datafeedPreview', { - datafeedId, - ...getAuthorizationHeader(request), - }); + const { body } = await client.asInternalUser.ml.previewDatafeed( + { + datafeed_id: datafeedId, + }, + getAuthorizationHeader(request) + ); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); diff --git a/x-pack/plugins/ml/server/routes/fields_service.ts b/x-pack/plugins/ml/server/routes/fields_service.ts index 0595b31d5bbbc..e1bd8a5736d82 100644 --- a/x-pack/plugins/ml/server/routes/fields_service.ts +++ b/x-pack/plugins/ml/server/routes/fields_service.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { @@ -13,14 +13,14 @@ import { } from './schemas/fields_service_schema'; import { fieldsServiceProvider } from '../models/fields_service'; -function getCardinalityOfFields(legacyClient: ILegacyScopedClusterClient, payload: any) { - const fs = fieldsServiceProvider(legacyClient); +function getCardinalityOfFields(client: IScopedClusterClient, payload: any) { + const fs = fieldsServiceProvider(client); const { index, fieldNames, query, timeFieldName, earliestMs, latestMs } = payload; return fs.getCardinalityOfFields(index, fieldNames, query, timeFieldName, earliestMs, latestMs); } -function getTimeFieldRange(legacyClient: ILegacyScopedClusterClient, payload: any) { - const fs = fieldsServiceProvider(legacyClient); +function getTimeFieldRange(client: IScopedClusterClient, payload: any) { + const fs = fieldsServiceProvider(client); const { index, timeFieldName, query } = payload; return fs.getTimeFieldRange(index, timeFieldName, query); } @@ -50,9 +50,9 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canAccessML'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getCardinalityOfFields(legacyClient, request.body); + const resp = await getCardinalityOfFields(client, request.body); return response.ok({ body: resp, @@ -85,9 +85,9 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getTimeFieldRange(legacyClient, request.body); + const resp = await getTimeFieldRange(client, request.body); return response.ok({ body: resp, diff --git a/x-pack/plugins/ml/server/routes/file_data_visualizer.ts b/x-pack/plugins/ml/server/routes/file_data_visualizer.ts index 88949fecbc7df..4c1ee87e96fc5 100644 --- a/x-pack/plugins/ml/server/routes/file_data_visualizer.ts +++ b/x-pack/plugins/ml/server/routes/file_data_visualizer.ts @@ -5,7 +5,7 @@ */ import { schema } from '@kbn/config-schema'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { MAX_FILE_SIZE_BYTES } from '../../common/constants/file_datavisualizer'; import { InputOverrides, @@ -28,17 +28,13 @@ import { importFileQuerySchema, } from './schemas/file_data_visualizer_schema'; -function analyzeFiles( - legacyClient: ILegacyScopedClusterClient, - data: InputData, - overrides: InputOverrides -) { - const { analyzeFile } = fileDataVisualizerProvider(legacyClient); +function analyzeFiles(client: IScopedClusterClient, data: InputData, overrides: InputOverrides) { + const { analyzeFile } = fileDataVisualizerProvider(client); return analyzeFile(data, overrides); } function importData( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, id: string, index: string, settings: Settings, @@ -46,7 +42,7 @@ function importData( ingestPipeline: IngestPipelineWrapper, data: InputData ) { - const { importData: importDataFunc } = importDataProvider(legacyClient); + const { importData: importDataFunc } = importDataProvider(client); return importDataFunc(id, index, settings, mappings, ingestPipeline, data); } @@ -78,9 +74,9 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canFindFileStructure'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { - const result = await analyzeFiles(legacyClient, request.body, request.query); + const result = await analyzeFiles(client, request.body, request.query); return response.ok({ body: result }); } catch (e) { return response.customError(wrapError(e)); @@ -113,7 +109,7 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat tags: ['access:ml:canFindFileStructure'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { const { id } = request.query; const { index, data, settings, mappings, ingestPipeline } = request.body; @@ -126,7 +122,7 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat } const result = await importData( - legacyClient, + client, id, index, settings, diff --git a/x-pack/plugins/ml/server/routes/filters.ts b/x-pack/plugins/ml/server/routes/filters.ts index bb4f8a2bebaa9..efb4acfa432f9 100644 --- a/x-pack/plugins/ml/server/routes/filters.ts +++ b/x-pack/plugins/ml/server/routes/filters.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { createFilterSchema, filterIdSchema, updateFilterSchema } from './schemas/filters_schema'; @@ -12,37 +12,33 @@ import { FilterManager, FormFilter } from '../models/filter'; // TODO - add function for returning a list of just the filter IDs. // TODO - add function for returning a list of filter IDs plus item count. -function getAllFilters(legacyClient: ILegacyScopedClusterClient) { - const mgr = new FilterManager(legacyClient); +function getAllFilters(client: IScopedClusterClient) { + const mgr = new FilterManager(client); return mgr.getAllFilters(); } -function getAllFilterStats(legacyClient: ILegacyScopedClusterClient) { - const mgr = new FilterManager(legacyClient); +function getAllFilterStats(client: IScopedClusterClient) { + const mgr = new FilterManager(client); return mgr.getAllFilterStats(); } -function getFilter(legacyClient: ILegacyScopedClusterClient, filterId: string) { - const mgr = new FilterManager(legacyClient); +function getFilter(client: IScopedClusterClient, filterId: string) { + const mgr = new FilterManager(client); return mgr.getFilter(filterId); } -function newFilter(legacyClient: ILegacyScopedClusterClient, filter: FormFilter) { - const mgr = new FilterManager(legacyClient); +function newFilter(client: IScopedClusterClient, filter: FormFilter) { + const mgr = new FilterManager(client); return mgr.newFilter(filter); } -function updateFilter( - legacyClient: ILegacyScopedClusterClient, - filterId: string, - filter: FormFilter -) { - const mgr = new FilterManager(legacyClient); +function updateFilter(client: IScopedClusterClient, filterId: string, filter: FormFilter) { + const mgr = new FilterManager(client); return mgr.updateFilter(filterId, filter); } -function deleteFilter(legacyClient: ILegacyScopedClusterClient, filterId: string) { - const mgr = new FilterManager(legacyClient); +function deleteFilter(client: IScopedClusterClient, filterId: string) { + const mgr = new FilterManager(client); return mgr.deleteFilter(filterId); } @@ -65,9 +61,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetFilters'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const resp = await getAllFilters(legacyClient); + const resp = await getAllFilters(client); return response.ok({ body: resp, @@ -100,9 +96,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetFilters'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getFilter(legacyClient, request.params.filterId); + const resp = await getFilter(client, request.params.filterId); return response.ok({ body: resp, }); @@ -134,10 +130,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateFilter'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const body = request.body; - const resp = await newFilter(legacyClient, body); + const resp = await newFilter(client, body); return response.ok({ body: resp, @@ -172,11 +168,11 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateFilter'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { filterId } = request.params; const body = request.body; - const resp = await updateFilter(legacyClient, filterId, body); + const resp = await updateFilter(client, filterId, body); return response.ok({ body: resp, @@ -206,10 +202,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canDeleteFilter'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { filterId } = request.params; - const resp = await deleteFilter(legacyClient, filterId); + const resp = await deleteFilter(client, filterId); return response.ok({ body: resp, @@ -239,9 +235,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetFilters'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const resp = await getAllFilterStats(legacyClient); + const resp = await getAllFilterStats(client); return response.ok({ body: resp, diff --git a/x-pack/plugins/ml/server/routes/indices.ts b/x-pack/plugins/ml/server/routes/indices.ts index 6a759cb97f308..ee817c492dbd4 100644 --- a/x-pack/plugins/ml/server/routes/indices.ts +++ b/x-pack/plugins/ml/server/routes/indices.ts @@ -31,7 +31,7 @@ export function indicesRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canAccessML'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { body: { index, fields: requestFields }, @@ -40,8 +40,8 @@ export function indicesRoutes({ router, mlLicense }: RouteInitialization) { requestFields !== undefined && Array.isArray(requestFields) ? requestFields.join(',') : '*'; - const result = await legacyClient.callAsCurrentUser('fieldCaps', { index, fields }); - return response.ok({ body: result }); + const { body } = await client.asInternalUser.fieldCaps({ index, fields }); + return response.ok({ body }); } catch (e) { return response.customError(wrapError(e)); } diff --git a/x-pack/plugins/ml/server/routes/job_audit_messages.ts b/x-pack/plugins/ml/server/routes/job_audit_messages.ts index 2313decfabd5b..0c90081f8e755 100644 --- a/x-pack/plugins/ml/server/routes/job_audit_messages.ts +++ b/x-pack/plugins/ml/server/routes/job_audit_messages.ts @@ -37,9 +37,9 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { getJobAuditMessages } = jobAuditMessagesProvider(legacyClient); + const { getJobAuditMessages } = jobAuditMessagesProvider(client); const { jobId } = request.params; const { from } = request.query; const resp = await getJobAuditMessages(jobId, from); @@ -72,9 +72,9 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { getJobAuditMessages } = jobAuditMessagesProvider(legacyClient); + const { getJobAuditMessages } = jobAuditMessagesProvider(client); const { from } = request.query; const resp = await getJobAuditMessages(undefined, from); diff --git a/x-pack/plugins/ml/server/routes/job_service.ts b/x-pack/plugins/ml/server/routes/job_service.ts index 3d560fc857e95..3c7f35b871b10 100644 --- a/x-pack/plugins/ml/server/routes/job_service.ts +++ b/x-pack/plugins/ml/server/routes/job_service.ts @@ -48,9 +48,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { forceStartDatafeeds } = jobServiceProvider(legacyClient); + const { forceStartDatafeeds } = jobServiceProvider(client); const { datafeedIds, start, end } = request.body; const resp = await forceStartDatafeeds(datafeedIds, start, end); @@ -82,9 +82,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { stopDatafeeds } = jobServiceProvider(legacyClient); + const { stopDatafeeds } = jobServiceProvider(client); const { datafeedIds } = request.body; const resp = await stopDatafeeds(datafeedIds); @@ -116,9 +116,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canDeleteJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { deleteJobs } = jobServiceProvider(legacyClient); + const { deleteJobs } = jobServiceProvider(client); const { jobIds } = request.body; const resp = await deleteJobs(jobIds); @@ -150,9 +150,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCloseJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { closeJobs } = jobServiceProvider(legacyClient); + const { closeJobs } = jobServiceProvider(client); const { jobIds } = request.body; const resp = await closeJobs(jobIds); @@ -184,9 +184,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCloseJob', 'access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { forceStopAndCloseJob } = jobServiceProvider(legacyClient); + const { forceStopAndCloseJob } = jobServiceProvider(client); const { jobId } = request.body; const resp = await forceStopAndCloseJob(jobId); @@ -223,9 +223,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { jobsSummary } = jobServiceProvider(legacyClient); + const { jobsSummary } = jobServiceProvider(client); const { jobIds } = request.body; const resp = await jobsSummary(jobIds); @@ -257,9 +257,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const { jobsWithTimerange } = jobServiceProvider(legacyClient); + const { jobsWithTimerange } = jobServiceProvider(client); const resp = await jobsWithTimerange(); return response.ok({ @@ -290,9 +290,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { createFullJobsList } = jobServiceProvider(legacyClient); + const { createFullJobsList } = jobServiceProvider(client); const { jobIds } = request.body; const resp = await createFullJobsList(jobIds); @@ -320,9 +320,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const { getAllGroups } = jobServiceProvider(legacyClient); + const { getAllGroups } = jobServiceProvider(client); const resp = await getAllGroups(); return response.ok({ @@ -353,9 +353,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canUpdateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { updateGroups } = jobServiceProvider(legacyClient); + const { updateGroups } = jobServiceProvider(client); const { jobs } = request.body; const resp = await updateGroups(jobs); @@ -383,9 +383,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const { deletingJobTasks } = jobServiceProvider(legacyClient); + const { deletingJobTasks } = jobServiceProvider(client); const resp = await deletingJobTasks(); return response.ok({ @@ -416,9 +416,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { jobsExist } = jobServiceProvider(legacyClient); + const { jobsExist } = jobServiceProvider(client); const { jobIds } = request.body; const resp = await jobsExist(jobIds); @@ -449,12 +449,12 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { const { indexPattern } = request.params; const isRollup = request.query.rollup === 'true'; const savedObjectsClient = context.core.savedObjects.client; - const { newJobCaps } = jobServiceProvider(legacyClient); + const { newJobCaps } = jobServiceProvider(client); const resp = await newJobCaps(indexPattern, isRollup, savedObjectsClient); return response.ok({ @@ -485,7 +485,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { indexPatternTitle, @@ -499,7 +499,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { splitFieldValue, } = request.body; - const { newJobLineChart } = jobServiceProvider(legacyClient); + const { newJobLineChart } = jobServiceProvider(client); const resp = await newJobLineChart( indexPatternTitle, timeField, @@ -540,7 +540,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { const { indexPatternTitle, @@ -553,7 +553,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { splitFieldName, } = request.body; - const { newJobPopulationChart } = jobServiceProvider(legacyClient); + const { newJobPopulationChart } = jobServiceProvider(client); const resp = await newJobPopulationChart( indexPatternTitle, timeField, @@ -589,9 +589,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const { getAllJobAndGroupIds } = jobServiceProvider(legacyClient); + const { getAllJobAndGroupIds } = jobServiceProvider(client); const resp = await getAllJobAndGroupIds(); return response.ok({ @@ -622,9 +622,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { getLookBackProgress } = jobServiceProvider(legacyClient); + const { getLookBackProgress } = jobServiceProvider(client); const { jobId, start, end } = request.body; const resp = await getLookBackProgress(jobId, start, end); @@ -656,9 +656,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { validateCategoryExamples } = categorizationExamplesProvider(legacyClient); + const { validateCategoryExamples } = categorizationExamplesProvider(client); const { indexPatternTitle, timeField, @@ -709,9 +709,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { topCategories } = jobServiceProvider(legacyClient); + const { topCategories } = jobServiceProvider(client); const { jobId, count } = request.body; const resp = await topCategories(jobId, count); @@ -743,9 +743,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob', 'access:ml:canStartStopDatafeed'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const { revertModelSnapshot } = jobServiceProvider(legacyClient); + const { revertModelSnapshot } = jobServiceProvider(client); const { jobId, snapshotId, diff --git a/x-pack/plugins/ml/server/routes/job_validation.ts b/x-pack/plugins/ml/server/routes/job_validation.ts index 6da052663a002..b52043595327b 100644 --- a/x-pack/plugins/ml/server/routes/job_validation.ts +++ b/x-pack/plugins/ml/server/routes/job_validation.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { AnalysisConfig } from '../../common/types/anomaly_detection_jobs'; import { wrapError } from '../client/error_wrapper'; @@ -27,12 +27,12 @@ type CalculateModelMemoryLimitPayload = TypeOf; */ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, version: string) { function calculateModelMemoryLimit( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, payload: CalculateModelMemoryLimitPayload ) { const { analysisConfig, indexPattern, query, timeFieldName, earliestMs, latestMs } = payload; - return calculateModelMemoryLimitProvider(legacyClient)( + return calculateModelMemoryLimitProvider(client)( analysisConfig as AnalysisConfig, indexPattern, query, @@ -61,10 +61,10 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { let errorResp; - const resp = await estimateBucketSpanFactory(legacyClient)(request.body) + const resp = await estimateBucketSpanFactory(client)(request.body) // this catch gets triggered when the estimation code runs without error // but isn't able to come up with a bucket span estimation. // this doesn't return a HTTP error but an object with an error message @@ -109,9 +109,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await calculateModelMemoryLimit(legacyClient, request.body); + const resp = await calculateModelMemoryLimit(client, request.body); return response.ok({ body: resp, @@ -141,9 +141,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await validateCardinality(legacyClient, request.body); + const resp = await validateCardinality(client, request.body); return response.ok({ body: resp, @@ -173,11 +173,11 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { // version corresponds to the version used in documentation links. const resp = await validateJob( - legacyClient, + client, request.body, version, mlLicense.isSecurityEnabled() === false diff --git a/x-pack/plugins/ml/server/routes/modules.ts b/x-pack/plugins/ml/server/routes/modules.ts index 23e37d2213029..72a4c5e428c2b 100644 --- a/x-pack/plugins/ml/server/routes/modules.ts +++ b/x-pack/plugins/ml/server/routes/modules.ts @@ -6,11 +6,7 @@ import { TypeOf } from '@kbn/config-schema'; -import { - ILegacyScopedClusterClient, - KibanaRequest, - SavedObjectsClientContract, -} from 'kibana/server'; +import { IScopedClusterClient, KibanaRequest, SavedObjectsClientContract } from 'kibana/server'; import { DatafeedOverride, JobOverride } from '../../common/types/modules'; import { wrapError } from '../client/error_wrapper'; import { DataRecognizer } from '../models/data_recognizer'; @@ -23,22 +19,22 @@ import { import { RouteInitialization } from '../types'; function recognize( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest, indexPatternTitle: string ) { - const dr = new DataRecognizer(legacyClient, savedObjectsClient, request); + const dr = new DataRecognizer(client, savedObjectsClient, request); return dr.findMatches(indexPatternTitle); } function getModule( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest, moduleId: string ) { - const dr = new DataRecognizer(legacyClient, savedObjectsClient, request); + const dr = new DataRecognizer(client, savedObjectsClient, request); if (moduleId === undefined) { return dr.listModules(); } else { @@ -47,7 +43,7 @@ function getModule( } function setup( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest, moduleId: string, @@ -63,7 +59,7 @@ function setup( datafeedOverrides?: DatafeedOverride | DatafeedOverride[], estimateModelMemory?: boolean ) { - const dr = new DataRecognizer(legacyClient, savedObjectsClient, request); + const dr = new DataRecognizer(client, savedObjectsClient, request); return dr.setup( moduleId, prefix, @@ -81,12 +77,12 @@ function setup( } function dataRecognizerJobsExist( - legacyClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest, moduleId: string ) { - const dr = new DataRecognizer(legacyClient, savedObjectsClient, request); + const dr = new DataRecognizer(client, savedObjectsClient, request); return dr.dataRecognizerJobsExist(moduleId); } @@ -131,11 +127,11 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { const { indexPatternTitle } = request.params; const results = await recognize( - legacyClient, + client, context.core.savedObjects.client, request, indexPatternTitle @@ -266,7 +262,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { let { moduleId } = request.params; if (moduleId === '') { @@ -275,7 +271,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { moduleId = undefined; } const results = await getModule( - legacyClient, + client, context.core.savedObjects.client, request, moduleId @@ -439,7 +435,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canCreateJob'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { const { moduleId } = request.params; @@ -458,7 +454,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { } = request.body as TypeOf; const result = await setup( - legacyClient, + client, context.core.savedObjects.client, request, moduleId, @@ -544,11 +540,11 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => { try { const { moduleId } = request.params; const result = await dataRecognizerJobsExist( - legacyClient, + client, context.core.savedObjects.client, request, moduleId diff --git a/x-pack/plugins/ml/server/routes/notification_settings.ts b/x-pack/plugins/ml/server/routes/notification_settings.ts index 09c145d6257a8..5bb51bf9e596c 100644 --- a/x-pack/plugins/ml/server/routes/notification_settings.ts +++ b/x-pack/plugins/ml/server/routes/notification_settings.ts @@ -26,16 +26,15 @@ export function notificationRoutes({ router, mlLicense }: RouteInitialization) { tags: ['access:ml:canAccessML'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, response }) => { try { - const params = { - includeDefaults: true, - filterPath: '**.xpack.notification', - }; - const resp = await legacyClient.callAsCurrentUser('cluster.getSettings', params); + const { body } = await client.asCurrentUser.cluster.getSettings({ + include_defaults: true, + filter_path: '**.xpack.notification', + }); return response.ok({ - body: resp, + body, }); } catch (e) { return response.customError(wrapError(e)); diff --git a/x-pack/plugins/ml/server/routes/results_service.ts b/x-pack/plugins/ml/server/routes/results_service.ts index 2af37c17f714a..4e34320d51333 100644 --- a/x-pack/plugins/ml/server/routes/results_service.ts +++ b/x-pack/plugins/ml/server/routes/results_service.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { schema } from '@kbn/config-schema'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; @@ -23,8 +23,8 @@ import { getCategorizerStoppedPartitionsSchema, } from './schemas/results_service_schema'; -function getAnomaliesTableData(legacyClient: ILegacyScopedClusterClient, payload: any) { - const rs = resultsServiceProvider(legacyClient); +function getAnomaliesTableData(client: IScopedClusterClient, payload: any) { + const rs = resultsServiceProvider(client); const { jobIds, criteriaFields, @@ -53,39 +53,39 @@ function getAnomaliesTableData(legacyClient: ILegacyScopedClusterClient, payload ); } -function getCategoryDefinition(legacyClient: ILegacyScopedClusterClient, payload: any) { - const rs = resultsServiceProvider(legacyClient); +function getCategoryDefinition(client: IScopedClusterClient, payload: any) { + const rs = resultsServiceProvider(client); return rs.getCategoryDefinition(payload.jobId, payload.categoryId); } -function getCategoryExamples(legacyClient: ILegacyScopedClusterClient, payload: any) { - const rs = resultsServiceProvider(legacyClient); +function getCategoryExamples(client: IScopedClusterClient, payload: any) { + const rs = resultsServiceProvider(client); const { jobId, categoryIds, maxExamples } = payload; return rs.getCategoryExamples(jobId, categoryIds, maxExamples); } -function getMaxAnomalyScore(legacyClient: ILegacyScopedClusterClient, payload: any) { - const rs = resultsServiceProvider(legacyClient); +function getMaxAnomalyScore(client: IScopedClusterClient, payload: any) { + const rs = resultsServiceProvider(client); const { jobIds, earliestMs, latestMs } = payload; return rs.getMaxAnomalyScore(jobIds, earliestMs, latestMs); } -function getPartitionFieldsValues(legacyClient: ILegacyScopedClusterClient, payload: any) { - const rs = resultsServiceProvider(legacyClient); +function getPartitionFieldsValues(client: IScopedClusterClient, payload: any) { + const rs = resultsServiceProvider(client); const { jobId, searchTerm, criteriaFields, earliestMs, latestMs } = payload; return rs.getPartitionFieldsValues(jobId, searchTerm, criteriaFields, earliestMs, latestMs); } -function getCategorizerStats(legacyClient: ILegacyScopedClusterClient, params: any, query: any) { +function getCategorizerStats(client: IScopedClusterClient, params: any, query: any) { const { jobId } = params; const { partitionByValue } = query; - const rs = resultsServiceProvider(legacyClient); + const rs = resultsServiceProvider(client); return rs.getCategorizerStats(jobId, partitionByValue); } -function getCategoryStoppedPartitions(legacyClient: ILegacyScopedClusterClient, payload: any) { +function getCategoryStoppedPartitions(client: IScopedClusterClient, payload: any) { const { jobIds, fieldToBucket } = payload; - const rs = resultsServiceProvider(legacyClient); + const rs = resultsServiceProvider(client); return rs.getCategoryStoppedPartitions(jobIds, fieldToBucket); } @@ -112,9 +112,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getAnomaliesTableData(legacyClient, request.body); + const resp = await getAnomaliesTableData(client, request.body); return response.ok({ body: resp, @@ -144,9 +144,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getCategoryDefinition(legacyClient, request.body); + const resp = await getCategoryDefinition(client, request.body); return response.ok({ body: resp, @@ -176,9 +176,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getMaxAnomalyScore(legacyClient, request.body); + const resp = await getMaxAnomalyScore(client, request.body); return response.ok({ body: resp, @@ -208,9 +208,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getCategoryExamples(legacyClient, request.body); + const resp = await getCategoryExamples(client, request.body); return response.ok({ body: resp, @@ -240,9 +240,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getPartitionFieldsValues(legacyClient, request.body); + const resp = await getPartitionFieldsValues(client, request.body); return response.ok({ body: resp, @@ -269,14 +269,14 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { - const body = { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { + const { body } = await client.asInternalUser.search({ ...request.body, index: ML_RESULTS_INDEX_PATTERN, - }; + }); try { return response.ok({ - body: await legacyClient.callAsInternalUser('search', body), + body, }); } catch (error) { return response.customError(wrapError(error)); @@ -304,9 +304,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getCategorizerStats(legacyClient, request.params, request.query); + const resp = await getCategorizerStats(client, request.params, request.query); return response.ok({ body: resp, }); @@ -334,9 +334,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { - const resp = await getCategoryStoppedPartitions(legacyClient, request.body); + const resp = await getCategoryStoppedPartitions(client, request.body); return response.ok({ body: resp, }); diff --git a/x-pack/plugins/ml/server/routes/system.ts b/x-pack/plugins/ml/server/routes/system.ts index 273b86163245f..3a66f60943bb3 100644 --- a/x-pack/plugins/ml/server/routes/system.ts +++ b/x-pack/plugins/ml/server/routes/system.ts @@ -7,7 +7,7 @@ import { schema } from '@kbn/config-schema'; import { Request } from 'hapi'; -import { ILegacyScopedClusterClient } from 'kibana/server'; +import { IScopedClusterClient } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { mlLog } from '../client/log'; import { capabilitiesProvider } from '../lib/capabilities'; @@ -21,17 +21,16 @@ export function systemRoutes( { router, mlLicense }: RouteInitialization, { spaces, cloud, resolveMlCapabilities }: SystemRouteDeps ) { - async function getNodeCount(legacyClient: ILegacyScopedClusterClient) { - const filterPath = 'nodes.*.attributes'; - const resp = await legacyClient.callAsInternalUser('nodes.info', { - filterPath, + async function getNodeCount(client: IScopedClusterClient) { + const { body } = await client.asInternalUser.nodes.info({ + filter_path: 'nodes.*.attributes', }); let count = 0; - if (typeof resp.nodes === 'object') { - Object.keys(resp.nodes).forEach((k) => { - if (resp.nodes[k].attributes !== undefined) { - const maxOpenJobs = resp.nodes[k].attributes['ml.max_open_jobs']; + if (typeof body.nodes === 'object') { + Object.keys(body.nodes).forEach((k) => { + if (body.nodes[k].attributes !== undefined) { + const maxOpenJobs = body.nodes[k].attributes['ml.max_open_jobs']; if (maxOpenJobs !== null && maxOpenJobs > 0) { count++; } @@ -58,15 +57,15 @@ export function systemRoutes( tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { - const { callAsCurrentUser, callAsInternalUser } = legacyClient; + const { asCurrentUser, asInternalUser } = client; let upgradeInProgress = false; try { - const info = await callAsInternalUser('ml.info'); + const { body } = await asInternalUser.ml.info(); // if ml indices are currently being migrated, upgrade_mode will be set to true // pass this back with the privileges to allow for the disabling of UI controls. - upgradeInProgress = info.upgrade_mode === true; + upgradeInProgress = body.upgrade_mode === true; } catch (error) { // if the ml.info check fails, it could be due to the user having insufficient privileges // most likely they do not have the ml_user role and therefore will be blocked from using @@ -90,11 +89,12 @@ export function systemRoutes( }, }); } else { - const body = request.body; - const resp = await callAsCurrentUser('ml.privilegeCheck', { body }); - resp.upgradeInProgress = upgradeInProgress; + const { body } = await asCurrentUser.security.hasPrivileges({ body: request.body }); return response.ok({ - body: resp, + body: { + ...body, + upgradeInProgress, + }, }); } } catch (error) { @@ -115,7 +115,7 @@ export function systemRoutes( path: '/api/ml/ml_capabilities', validate: false, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { // if spaces is disabled force isMlEnabledInSpace to be true const { isMlEnabledInSpace } = @@ -129,7 +129,7 @@ export function systemRoutes( } const { getCapabilities } = capabilitiesProvider( - legacyClient, + client, mlCapabilities, mlLicense, isMlEnabledInSpace @@ -159,10 +159,10 @@ export function systemRoutes( }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { return response.ok({ - body: await getNodeCount(legacyClient), + body: await getNodeCount(client), }); } catch (e) { return response.customError(wrapError(e)); @@ -185,12 +185,12 @@ export function systemRoutes( tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { - const info = await legacyClient.callAsInternalUser('ml.info'); + const { body } = await client.asInternalUser.ml.info(); const cloudId = cloud && cloud.cloudId; return response.ok({ - body: { ...info, cloudId }, + body: { ...body, cloudId }, }); } catch (error) { return response.customError(wrapError(error)); @@ -216,10 +216,11 @@ export function systemRoutes( tags: ['access:ml:canGetJobs'], }, }, - mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => { try { + const { body } = await client.asCurrentUser.search(request.body); return response.ok({ - body: await legacyClient.callAsCurrentUser('search', request.body), + body, }); } catch (error) { return response.customError(wrapError(error)); @@ -243,22 +244,21 @@ export function systemRoutes( tags: ['access:ml:canAccessML'], }, }, - mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => { + mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => { try { const { index } = request.body; const options = { index: [index], fields: ['*'], - ignoreUnavailable: true, - allowNoIndices: true, - ignore: 404, + ignore_unavailable: true, + allow_no_indices: true, }; - const fieldsResult = await legacyClient.callAsCurrentUser('fieldCaps', options); + const { body } = await client.asCurrentUser.fieldCaps(options); const result = { exists: false }; - if (Array.isArray(fieldsResult.indices) && fieldsResult.indices.length !== 0) { + if (Array.isArray(body.indices) && body.indices.length !== 0) { result.exists = true; } diff --git a/x-pack/plugins/ml/server/shared_services/errors.ts b/x-pack/plugins/ml/server/shared_services/errors.ts new file mode 100644 index 0000000000000..f15a85a490a46 --- /dev/null +++ b/x-pack/plugins/ml/server/shared_services/errors.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export class MLClusterClientUninitialized extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} diff --git a/x-pack/plugins/ml/server/shared_services/license_checks/errors.ts b/x-pack/plugins/ml/server/shared_services/license_checks/errors.ts new file mode 100644 index 0000000000000..18e7dab43fda7 --- /dev/null +++ b/x-pack/plugins/ml/server/shared_services/license_checks/errors.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/* eslint-disable max-classes-per-file */ + +export class InsufficientFullLicenseError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export class InsufficientBasicLicenseError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} diff --git a/x-pack/plugins/ml/server/shared_services/license_checks/index.ts b/x-pack/plugins/ml/server/shared_services/license_checks/index.ts new file mode 100644 index 0000000000000..6b837dadf5c0d --- /dev/null +++ b/x-pack/plugins/ml/server/shared_services/license_checks/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { LicenseCheck, licenseChecks } from './license_checks'; +export { InsufficientBasicLicenseError, InsufficientFullLicenseError } from './errors'; diff --git a/x-pack/plugins/ml/server/shared_services/license_checks.ts b/x-pack/plugins/ml/server/shared_services/license_checks/license_checks.ts similarity index 66% rename from x-pack/plugins/ml/server/shared_services/license_checks.ts rename to x-pack/plugins/ml/server/shared_services/license_checks/license_checks.ts index 191124ffa5f3a..3d9de1ef70f2d 100644 --- a/x-pack/plugins/ml/server/shared_services/license_checks.ts +++ b/x-pack/plugins/ml/server/shared_services/license_checks/license_checks.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { MlServerLicense } from '../lib/license'; +import { MlServerLicense } from '../../lib/license'; +import { InsufficientFullLicenseError, InsufficientBasicLicenseError } from './errors'; export type LicenseCheck = () => void; @@ -14,12 +15,12 @@ export function licenseChecks( return { isFullLicense() { if (mlLicense.isFullLicense() === false) { - throw Error('Platinum, Enterprise or trial license needed'); + throw new InsufficientFullLicenseError('Platinum, Enterprise or trial license needed'); } }, isMinimumLicense() { if (mlLicense.isMinimumLicense() === false) { - throw Error('Basic license needed'); + throw new InsufficientBasicLicenseError('Basic license needed'); } }, }; diff --git a/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts b/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts index 603b4fba17adb..53898cb64d07f 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts @@ -4,40 +4,33 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; +import { KibanaRequest } from 'kibana/server'; import { Job } from '../../../common/types/anomaly_detection_jobs'; -import { SharedServicesChecks } from '../shared_services'; +import { GetGuards } from '../shared_services'; export interface AnomalyDetectorsProvider { anomalyDetectorsProvider( - mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { jobs(jobId?: string): Promise<{ count: number; jobs: Job[] }>; }; } -export function getAnomalyDetectorsProvider({ - isFullLicense, - getHasMlCapabilities, -}: SharedServicesChecks): AnomalyDetectorsProvider { +export function getAnomalyDetectorsProvider(getGuards: GetGuards): AnomalyDetectorsProvider { return { - anomalyDetectorsProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { - // APM is using this service in anomaly alert, kibana alerting doesn't provide request object - // So we are adding a dummy request for now - // TODO: Remove this once kibana alerting provides request object - const hasMlCapabilities = - request.params !== 'DummyKibanaRequest' - ? getHasMlCapabilities(request) - : (_caps: string[]) => Promise.resolve(); + anomalyDetectorsProvider(request: KibanaRequest) { return { async jobs(jobId?: string) { - isFullLicense(); - await hasMlCapabilities(['canGetJobs']); - return mlClusterClient.callAsInternalUser( - 'ml.jobs', - jobId !== undefined ? { jobId } : {} - ); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const { body } = await scopedClient.asInternalUser.ml.getJobs<{ + count: number; + jobs: Job[]; + }>(jobId !== undefined ? { job_id: jobId } : undefined); + return body; + }); }, }; }, diff --git a/x-pack/plugins/ml/server/shared_services/providers/job_service.ts b/x-pack/plugins/ml/server/shared_services/providers/job_service.ts index c734dcc1583a1..2897bf08717f8 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/job_service.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/job_service.ts @@ -4,38 +4,32 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; +import { KibanaRequest } from 'kibana/server'; import { jobServiceProvider } from '../../models/job_service'; -import { SharedServicesChecks } from '../shared_services'; +import { GetGuards } from '../shared_services'; type OrigJobServiceProvider = ReturnType; export interface JobServiceProvider { jobServiceProvider( - mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { jobsSummary: OrigJobServiceProvider['jobsSummary']; }; } -export function getJobServiceProvider({ - isFullLicense, - getHasMlCapabilities, -}: SharedServicesChecks): JobServiceProvider { +export function getJobServiceProvider(getGuards: GetGuards): JobServiceProvider { return { - jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { - // const hasMlCapabilities = getHasMlCapabilities(request); - const { jobsSummary } = jobServiceProvider(mlClusterClient); + jobServiceProvider(request: KibanaRequest) { return { - async jobsSummary(...args) { - isFullLicense(); - // Removed while https://github.com/elastic/kibana/issues/64588 exists. - // SIEM are calling this endpoint with a dummy request object from their alerting - // integration and currently alerting does not supply a request object. - // await hasMlCapabilities(['canGetJobs']); - - return jobsSummary(...args); + jobsSummary: async (...args) => { + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const { jobsSummary } = jobServiceProvider(scopedClient); + return jobsSummary(...args); + }); }, }; }, diff --git a/x-pack/plugins/ml/server/shared_services/providers/modules.ts b/x-pack/plugins/ml/server/shared_services/providers/modules.ts index fb7d59f9c8218..a727d96433f1d 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/modules.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/modules.ts @@ -4,23 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - ILegacyScopedClusterClient, - KibanaRequest, - SavedObjectsClientContract, -} from 'kibana/server'; +import { IScopedClusterClient, KibanaRequest, SavedObjectsClientContract } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { DataRecognizer } from '../../models/data_recognizer'; -import { SharedServicesChecks } from '../shared_services'; +import { GetGuards } from '../shared_services'; import { moduleIdParamSchema, setupModuleBodySchema } from '../../routes/schemas/modules'; -import { HasMlCapabilities } from '../../lib/capabilities'; export type ModuleSetupPayload = TypeOf & TypeOf; export interface ModulesProvider { modulesProvider( - mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest, savedObjectsClient: SavedObjectsClientContract ): { @@ -31,61 +25,58 @@ export interface ModulesProvider { }; } -export function getModulesProvider({ - isFullLicense, - getHasMlCapabilities, -}: SharedServicesChecks): ModulesProvider { +export function getModulesProvider(getGuards: GetGuards): ModulesProvider { return { - modulesProvider( - mlClusterClient: ILegacyScopedClusterClient, - request: KibanaRequest, - savedObjectsClient: SavedObjectsClientContract - ) { - let hasMlCapabilities: HasMlCapabilities; - if (request.params === 'DummyKibanaRequest') { - hasMlCapabilities = () => Promise.resolve(); - } else { - hasMlCapabilities = getHasMlCapabilities(request); - } - const dr = dataRecognizerFactory(mlClusterClient, savedObjectsClient, request); - + modulesProvider(request: KibanaRequest, savedObjectsClient: SavedObjectsClientContract) { return { async recognize(...args) { - isFullLicense(); - await hasMlCapabilities(['canCreateJob']); - - return dr.findMatches(...args); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request); + return dr.findMatches(...args); + }); }, async getModule(moduleId: string) { - isFullLicense(); - await hasMlCapabilities(['canGetJobs']); - - return dr.getModule(moduleId); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request); + return dr.getModule(moduleId); + }); }, async listModules() { - isFullLicense(); - await hasMlCapabilities(['canGetJobs']); - - return dr.listModules(); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request); + return dr.listModules(); + }); }, async setup(payload: ModuleSetupPayload) { - isFullLicense(); - await hasMlCapabilities(['canCreateJob']); - - return dr.setup( - payload.moduleId, - payload.prefix, - payload.groups, - payload.indexPatternName, - payload.query, - payload.useDedicatedIndex, - payload.startDatafeed, - payload.start, - payload.end, - payload.jobOverrides, - payload.datafeedOverrides, - payload.estimateModelMemory - ); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canCreateJob']) + .ok(async ({ scopedClient }) => { + const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request); + return dr.setup( + payload.moduleId, + payload.prefix, + payload.groups, + payload.indexPatternName, + payload.query, + payload.useDedicatedIndex, + payload.startDatafeed, + payload.start, + payload.end, + payload.jobOverrides, + payload.datafeedOverrides, + payload.estimateModelMemory + ); + }); }, }; }, @@ -93,9 +84,9 @@ export function getModulesProvider({ } function dataRecognizerFactory( - mlClusterClient: ILegacyScopedClusterClient, + client: IScopedClusterClient, savedObjectsClient: SavedObjectsClientContract, request: KibanaRequest ) { - return new DataRecognizer(mlClusterClient, savedObjectsClient, request); + return new DataRecognizer(client, savedObjectsClient, request); } diff --git a/x-pack/plugins/ml/server/shared_services/providers/results_service.ts b/x-pack/plugins/ml/server/shared_services/providers/results_service.ts index 6af4eb008567a..5536765cc376a 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/results_service.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/results_service.ts @@ -4,41 +4,32 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; +import { KibanaRequest } from 'kibana/server'; import { resultsServiceProvider } from '../../models/results_service'; -import { SharedServicesChecks } from '../shared_services'; +import { GetGuards } from '../shared_services'; type OrigResultsServiceProvider = ReturnType; export interface ResultsServiceProvider { resultsServiceProvider( - mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { getAnomaliesTableData: OrigResultsServiceProvider['getAnomaliesTableData']; }; } -export function getResultsServiceProvider({ - isFullLicense, - getHasMlCapabilities, -}: SharedServicesChecks): ResultsServiceProvider { +export function getResultsServiceProvider(getGuards: GetGuards): ResultsServiceProvider { return { - resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { - // Uptime is using this service in anomaly alert, kibana alerting doesn't provide request object - // So we are adding a dummy request for now - // TODO: Remove this once kibana alerting provides request object - const hasMlCapabilities = - request.params !== 'DummyKibanaRequest' - ? getHasMlCapabilities(request) - : (_caps: string[]) => Promise.resolve(); - - const { getAnomaliesTableData } = resultsServiceProvider(mlClusterClient); + resultsServiceProvider(request: KibanaRequest) { return { async getAnomaliesTableData(...args) { - isFullLicense(); - await hasMlCapabilities(['canGetJobs']); - return getAnomaliesTableData(...args); + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canGetJobs']) + .ok(async ({ scopedClient }) => { + const { getAnomaliesTableData } = resultsServiceProvider(scopedClient); + return getAnomaliesTableData(...args); + }); }, }; }, diff --git a/x-pack/plugins/ml/server/shared_services/providers/system.ts b/x-pack/plugins/ml/server/shared_services/providers/system.ts index d292abc438a2f..3217ff13787b0 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/system.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/system.ts @@ -4,8 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; -import { SearchResponse, SearchParams } from 'elasticsearch'; +import { KibanaRequest } from 'kibana/server'; +import { SearchResponse } from 'elasticsearch'; +import { RequestParams } from '@elastic/elasticsearch'; import { MlServerLicense } from '../../lib/license'; import { CloudSetup } from '../../../../cloud/server'; import { spacesUtilsProvider } from '../../lib/spaces_utils'; @@ -14,73 +15,79 @@ import { capabilitiesProvider } from '../../lib/capabilities'; import { MlInfoResponse } from '../../../common/types/ml_server_info'; import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; import { MlCapabilitiesResponse, ResolveMlCapabilities } from '../../../common/types/capabilities'; -import { SharedServicesChecks } from '../shared_services'; +import { GetGuards } from '../shared_services'; export interface MlSystemProvider { mlSystemProvider( - mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { mlCapabilities(): Promise; mlInfo(): Promise; - mlAnomalySearch(searchParams: SearchParams): Promise>; + mlAnomalySearch(searchParams: RequestParams.Search): Promise>; }; } export function getMlSystemProvider( - { isMinimumLicense, isFullLicense, getHasMlCapabilities }: SharedServicesChecks, + getGuards: GetGuards, mlLicense: MlServerLicense, spaces: SpacesPluginSetup | undefined, cloud: CloudSetup | undefined, resolveMlCapabilities: ResolveMlCapabilities ): MlSystemProvider { return { - mlSystemProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { - // const hasMlCapabilities = getHasMlCapabilities(request); - const { callAsInternalUser } = mlClusterClient; + mlSystemProvider(request: KibanaRequest) { return { async mlCapabilities() { - isMinimumLicense(); + return await getGuards(request) + .isMinimumLicense() + .ok(async ({ scopedClient }) => { + const { isMlEnabledInSpace } = + spaces !== undefined + ? spacesUtilsProvider(spaces, request) + : { isMlEnabledInSpace: async () => true }; - const { isMlEnabledInSpace } = - spaces !== undefined - ? spacesUtilsProvider(spaces, request) - : { isMlEnabledInSpace: async () => true }; + const mlCapabilities = await resolveMlCapabilities(request); + if (mlCapabilities === null) { + throw new Error('mlCapabilities is not defined'); + } - const mlCapabilities = await resolveMlCapabilities(request); - if (mlCapabilities === null) { - throw new Error('mlCapabilities is not defined'); - } - - const { getCapabilities } = capabilitiesProvider( - mlClusterClient, - mlCapabilities, - mlLicense, - isMlEnabledInSpace - ); - return getCapabilities(); + const { getCapabilities } = capabilitiesProvider( + scopedClient, + mlCapabilities, + mlLicense, + isMlEnabledInSpace + ); + return getCapabilities(); + }); }, async mlInfo(): Promise { - isMinimumLicense(); + return await getGuards(request) + .isMinimumLicense() + .ok(async ({ scopedClient }) => { + const { asInternalUser } = scopedClient; - const info = await callAsInternalUser('ml.info'); - const cloudId = cloud && cloud.cloudId; - return { - ...info, - cloudId, - }; + const { body: info } = await asInternalUser.ml.info(); + const cloudId = cloud && cloud.cloudId; + return { + ...info, + cloudId, + }; + }); }, - async mlAnomalySearch(searchParams: SearchParams): Promise> { - isFullLicense(); - // Removed while https://github.com/elastic/kibana/issues/64588 exists. - // SIEM are calling this endpoint with a dummy request object from their alerting - // integration and currently alerting does not supply a request object. - // await hasMlCapabilities(['canAccessML']); - - return callAsInternalUser('search', { - ...searchParams, - index: ML_RESULTS_INDEX_PATTERN, - }); + async mlAnomalySearch( + searchParams: RequestParams.Search + ): Promise> { + return await getGuards(request) + .isFullLicense() + .hasMlCapabilities(['canAccessML']) + .ok(async ({ scopedClient }) => { + const { asInternalUser } = scopedClient; + const { body } = await asInternalUser.search>({ + ...searchParams, + index: ML_RESULTS_INDEX_PATTERN, + }); + return body; + }); }, }; }, diff --git a/x-pack/plugins/ml/server/shared_services/shared_services.ts b/x-pack/plugins/ml/server/shared_services/shared_services.ts index 3345111fad4ae..4c568e4515a27 100644 --- a/x-pack/plugins/ml/server/shared_services/shared_services.ts +++ b/x-pack/plugins/ml/server/shared_services/shared_services.ts @@ -4,7 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { KibanaRequest } from 'kibana/server'; +import { IClusterClient, IScopedClusterClient } from 'kibana/server'; +// including KibanaRequest from 'kibana/server' causes an error +// when being used with instanceof +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { KibanaRequest } from '../../.././../../src/core/server/http'; import { MlServerLicense } from '../lib/license'; import { SpacesPluginSetup } from '../../../spaces/server'; @@ -18,8 +22,9 @@ import { AnomalyDetectorsProvider, getAnomalyDetectorsProvider, } from './providers/anomaly_detectors'; -import { ResolveMlCapabilities } from '../../common/types/capabilities'; +import { ResolveMlCapabilities, MlCapabilitiesKey } from '../../common/types/capabilities'; import { hasMlCapabilitiesProvider, HasMlCapabilities } from '../lib/capabilities'; +import { MLClusterClientUninitialized } from './errors'; export type SharedServices = JobServiceProvider & AnomalyDetectorsProvider & @@ -27,31 +32,97 @@ export type SharedServices = JobServiceProvider & ModulesProvider & ResultsServiceProvider; +interface Guards { + isMinimumLicense(): Guards; + isFullLicense(): Guards; + hasMlCapabilities: (caps: MlCapabilitiesKey[]) => Guards; + ok(callback: OkCallback): any; +} + +export type GetGuards = (request: KibanaRequest) => Guards; + export interface SharedServicesChecks { - isFullLicense(): void; - isMinimumLicense(): void; - getHasMlCapabilities(request: KibanaRequest): HasMlCapabilities; + getGuards(request: KibanaRequest): Guards; } +interface OkParams { + scopedClient: IScopedClusterClient; +} + +type OkCallback = (okParams: OkParams) => any; + export function createSharedServices( mlLicense: MlServerLicense, spaces: SpacesPluginSetup | undefined, cloud: CloudSetup, - resolveMlCapabilities: ResolveMlCapabilities + resolveMlCapabilities: ResolveMlCapabilities, + getClusterClient: () => IClusterClient | null ): SharedServices { + const getRequestItems = getRequestItemsProvider(resolveMlCapabilities, getClusterClient); const { isFullLicense, isMinimumLicense } = licenseChecks(mlLicense); - const getHasMlCapabilities = hasMlCapabilitiesProvider(resolveMlCapabilities); - const checks: SharedServicesChecks = { - isFullLicense, - isMinimumLicense, - getHasMlCapabilities, - }; + + function getGuards(request: KibanaRequest): Guards { + const { hasMlCapabilities, scopedClient } = getRequestItems(request); + const asyncGuards: Array> = []; + + const guards: Guards = { + isMinimumLicense: () => { + isMinimumLicense(); + return guards; + }, + isFullLicense: () => { + isFullLicense(); + return guards; + }, + hasMlCapabilities: (caps: MlCapabilitiesKey[]) => { + asyncGuards.push(hasMlCapabilities(caps)); + return guards; + }, + async ok(callback: OkCallback) { + await Promise.all(asyncGuards); + return callback({ scopedClient }); + }, + }; + return guards; + } return { - ...getJobServiceProvider(checks), - ...getAnomalyDetectorsProvider(checks), - ...getModulesProvider(checks), - ...getResultsServiceProvider(checks), - ...getMlSystemProvider(checks, mlLicense, spaces, cloud, resolveMlCapabilities), + ...getJobServiceProvider(getGuards), + ...getAnomalyDetectorsProvider(getGuards), + ...getModulesProvider(getGuards), + ...getResultsServiceProvider(getGuards), + ...getMlSystemProvider(getGuards, mlLicense, spaces, cloud, resolveMlCapabilities), + }; +} + +function getRequestItemsProvider( + resolveMlCapabilities: ResolveMlCapabilities, + getClusterClient: () => IClusterClient | null +) { + return (request: KibanaRequest) => { + const getHasMlCapabilities = hasMlCapabilitiesProvider(resolveMlCapabilities); + let hasMlCapabilities: HasMlCapabilities; + let scopedClient: IScopedClusterClient; + // While https://github.com/elastic/kibana/issues/64588 exists we + // will not receive a real request object when being called from an alert. + // instead a dummy request object will be supplied + const clusterClient = getClusterClient(); + + if (clusterClient === null) { + throw new MLClusterClientUninitialized(`ML's cluster client has not been initialized`); + } + + if (request instanceof KibanaRequest) { + hasMlCapabilities = getHasMlCapabilities(request); + scopedClient = clusterClient.asScoped(request); + } else { + hasMlCapabilities = () => Promise.resolve(); + const { asInternalUser } = clusterClient; + scopedClient = { + asInternalUser, + asCurrentUser: asInternalUser, + }; + } + return { hasMlCapabilities, scopedClient }; }; } diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/find_ml_signals.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/find_ml_signals.ts index 18a64a12431b8..bd9bf50688b58 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/find_ml_signals.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/find_ml_signals.ts @@ -6,13 +6,12 @@ import dateMath from '@elastic/datemath'; -import { ILegacyScopedClusterClient, KibanaRequest } from '../../../../../../../src/core/server'; +import { KibanaRequest } from '../../../../../../../src/core/server'; import { MlPluginSetup } from '../../../../../ml/server'; import { getAnomalies } from '../../machine_learning'; export const findMlSignals = async ({ ml, - clusterClient, request, jobId, anomalyThreshold, @@ -20,14 +19,13 @@ export const findMlSignals = async ({ to, }: { ml: MlPluginSetup; - clusterClient: ILegacyScopedClusterClient; request: KibanaRequest; jobId: string; anomalyThreshold: number; from: string; to: string; }) => { - const { mlAnomalySearch } = ml.mlSystemProvider(clusterClient, request); + const { mlAnomalySearch } = ml.mlSystemProvider(request); const params = { jobIds: [jobId], threshold: anomalyThreshold, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts index c5124edcaf187..9d688736a9846 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/signals/signal_rule_alert_type.ts @@ -185,12 +185,12 @@ export const signalRulesAlertType = ({ ); } - const scopedClusterClient = services.getLegacyScopedClusterClient(ml.mlClient); // Using fake KibanaRequest as it is needed to satisfy the ML Services API, but can be empty as it is // currently unused by the jobsSummary function. - const summaryJobs = await ( - await ml.jobServiceProvider(scopedClusterClient, ({} as unknown) as KibanaRequest) - ).jobsSummary([machineLearningJobId]); + const fakeRequest = {} as KibanaRequest; + const summaryJobs = await ml + .jobServiceProvider(fakeRequest) + .jobsSummary([machineLearningJobId]); const jobSummary = summaryJobs.find((job) => job.id === machineLearningJobId); if (jobSummary == null || !isJobStarted(jobSummary.jobState, jobSummary.datafeedState)) { @@ -207,7 +207,6 @@ export const signalRulesAlertType = ({ const anomalyResults = await findMlSignals({ ml, - clusterClient: scopedClusterClient, // Using fake KibanaRequest as it is needed to satisfy the ML Services API, but can be empty as it is // currently unused by the mlAnomalySearch function. request: ({} as unknown) as KibanaRequest, diff --git a/x-pack/plugins/security_solution/server/lib/machine_learning/authz.ts b/x-pack/plugins/security_solution/server/lib/machine_learning/authz.ts index 98de9536b1baa..386eda5281f0c 100644 --- a/x-pack/plugins/security_solution/server/lib/machine_learning/authz.ts +++ b/x-pack/plugins/security_solution/server/lib/machine_learning/authz.ts @@ -114,7 +114,6 @@ export const isMlAdmin = async ({ request: KibanaRequest; ml: MlPluginSetup; }): Promise => { - const scopedMlClient = ml.mlClient.asScoped(request); - const mlCapabilities = await ml.mlSystemProvider(scopedMlClient, request).mlCapabilities(); + const mlCapabilities = await ml.mlSystemProvider(request).mlCapabilities(); return hasMlAdminPermissions(mlCapabilities); }; diff --git a/x-pack/plugins/security_solution/server/lib/machine_learning/index.ts b/x-pack/plugins/security_solution/server/lib/machine_learning/index.ts index ad2f1e5a8285c..c17d9ab1bb46e 100644 --- a/x-pack/plugins/security_solution/server/lib/machine_learning/index.ts +++ b/x-pack/plugins/security_solution/server/lib/machine_learning/index.ts @@ -4,13 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SearchResponse, SearchParams } from 'elasticsearch'; +import { SearchResponse } from 'elasticsearch'; +import { RequestParams } from '@elastic/elasticsearch'; import { AnomalyRecordDoc as Anomaly } from '../../../../ml/server'; export { Anomaly }; export type AnomalyResults = SearchResponse; -type MlAnomalySearch = (searchParams: SearchParams) => Promise>; +type MlAnomalySearch = (searchParams: RequestParams.Search) => Promise>; export interface AnomaliesSearchParams { jobIds: string[]; diff --git a/x-pack/plugins/security_solution/server/usage/detections/detections_helpers.ts b/x-pack/plugins/security_solution/server/usage/detections/detections_helpers.ts index a6d4dc7a38e14..5cf17af2fa9c0 100644 --- a/x-pack/plugins/security_solution/server/usage/detections/detections_helpers.ts +++ b/x-pack/plugins/security_solution/server/usage/detections/detections_helpers.ts @@ -172,18 +172,12 @@ export const getMlJobsUsage = async (ml: MlPluginSetup | undefined): Promise module.jobs); - const jobs = await ml.jobServiceProvider(internalMlClient, fakeRequest).jobsSummary(); + const jobs = await ml.jobServiceProvider(fakeRequest).jobsSummary(); jobsUsage = jobs.filter(isSecurityJob).reduce((usage, job) => { const isElastic = moduleJobs.some((moduleJob) => moduleJob.id === job.id); diff --git a/x-pack/plugins/transform/server/routes/api/field_histograms.ts b/x-pack/plugins/transform/server/routes/api/field_histograms.ts index f2fd81368ec17..2642040c4cd0d 100644 --- a/x-pack/plugins/transform/server/routes/api/field_histograms.ts +++ b/x-pack/plugins/transform/server/routes/api/field_histograms.ts @@ -34,7 +34,7 @@ export function registerFieldHistogramsRoutes({ router, license }: RouteDependen try { const resp = await getHistogramsForFields( - ctx.transform!.dataClient, + ctx.core.elasticsearch.client, indexPatternTitle, query, fields, diff --git a/x-pack/plugins/uptime/server/lib/alerts/duration_anomaly.ts b/x-pack/plugins/uptime/server/lib/alerts/duration_anomaly.ts index 9ed453d286285..568b0873b8dbb 100644 --- a/x-pack/plugins/uptime/server/lib/alerts/duration_anomaly.ts +++ b/x-pack/plugins/uptime/server/lib/alerts/duration_anomaly.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ +import { KibanaRequest } from 'kibana/server'; import moment from 'moment'; import { schema } from '@kbn/config-schema'; -import { ILegacyScopedClusterClient } from 'kibana/server'; import { updateState } from './common'; import { ACTION_GROUP_DEFINITIONS } from '../../../common/constants/alerts'; import { commonStateTranslations, durationAnomalyTranslations } from './translations'; @@ -36,13 +36,11 @@ export const getAnomalySummary = (anomaly: AnomaliesTableRecord, monitorInfo: Pi const getAnomalies = async ( plugins: UptimeCorePlugins, - mlClusterClient: ILegacyScopedClusterClient, params: Record, lastCheckedAt: string ) => { - const { getAnomaliesTableData } = plugins.ml.resultsServiceProvider(mlClusterClient, { - params: 'DummyKibanaRequest', - } as any); + const fakeRequest = {} as KibanaRequest; + const { getAnomaliesTableData } = plugins.ml.resultsServiceProvider(fakeRequest); return await getAnomaliesTableData( [getMLJobId(params.monitorId)], @@ -82,23 +80,12 @@ export const durationAnomalyAlertFactory: UptimeAlertTypeFactory = (_server, _li producer: 'uptime', async executor(options) { const { - services: { - alertInstanceFactory, - callCluster, - savedObjectsClient, - getLegacyScopedClusterClient, - }, + services: { alertInstanceFactory, callCluster, savedObjectsClient }, state, params, } = options; - const { anomalies } = - (await getAnomalies( - plugins, - getLegacyScopedClusterClient(plugins.ml.mlClient), - params, - state.lastCheckedAt - )) ?? {}; + const { anomalies } = (await getAnomalies(plugins, params, state.lastCheckedAt)) ?? {}; const foundAnomalies = anomalies?.length > 0; diff --git a/x-pack/test/api_integration/apis/ml/data_frame_analytics/update.ts b/x-pack/test/api_integration/apis/ml/data_frame_analytics/update.ts index f4964308cd8c9..7aa5c180c5a02 100644 --- a/x-pack/test/api_integration/apis/ml/data_frame_analytics/update.ts +++ b/x-pack/test/api_integration/apis/ml/data_frame_analytics/update.ts @@ -258,7 +258,7 @@ export default ({ getService }: FtrProviderContext) => { description: 'Not found', }; const id = `${jobId}_invalid`; - const message = `[resource_not_found_exception] No known data frame analytics with id [${id}]`; + const message = 'resource_not_found_exception'; const { body } = await supertest .post(`/api/ml/data_frame/analytics/${id}/_update`) diff --git a/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_histograms.ts b/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_histograms.ts index 299f5f93fd281..a5969bdffbf8a 100644 --- a/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_histograms.ts +++ b/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_histograms.ts @@ -60,8 +60,7 @@ export default ({ getService }: FtrProviderContext) => { responseBody: { statusCode: 404, error: 'Not Found', - message: - '[index_not_found_exception] no such index [ft_farequote_not_exists], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exists" & index_uuid="_na_" & index="ft_farequote_not_exists" }', + message: 'index_not_found_exception', }, }, }; diff --git a/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_stats.ts b/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_stats.ts index 5795eac9637b1..4ec8ae0429a6b 100644 --- a/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_stats.ts +++ b/x-pack/test/api_integration/apis/ml/data_visualizer/get_field_stats.ts @@ -152,8 +152,7 @@ export default ({ getService }: FtrProviderContext) => { responseBody: { statusCode: 404, error: 'Not Found', - message: - '[index_not_found_exception] no such index [ft_farequote_not_exists], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exists" & index_uuid="_na_" & index="ft_farequote_not_exists" }', + message: 'index_not_found_exception', }, }, }; diff --git a/x-pack/test/api_integration/apis/ml/data_visualizer/get_overall_stats.ts b/x-pack/test/api_integration/apis/ml/data_visualizer/get_overall_stats.ts index fa83807be161a..af2c53c577253 100644 --- a/x-pack/test/api_integration/apis/ml/data_visualizer/get_overall_stats.ts +++ b/x-pack/test/api_integration/apis/ml/data_visualizer/get_overall_stats.ts @@ -116,8 +116,7 @@ export default ({ getService }: FtrProviderContext) => { responseBody: { statusCode: 404, error: 'Not Found', - message: - '[index_not_found_exception] no such index [ft_farequote_not_exist], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exist" & index_uuid="_na_" & index="ft_farequote_not_exist" }', + message: 'index_not_found_exception', }, }, }, diff --git a/x-pack/test/api_integration/apis/ml/fields_service/field_cardinality.ts b/x-pack/test/api_integration/apis/ml/fields_service/field_cardinality.ts index 627d9454beeb6..7dbb4a9d03e4f 100644 --- a/x-pack/test/api_integration/apis/ml/fields_service/field_cardinality.ts +++ b/x-pack/test/api_integration/apis/ml/fields_service/field_cardinality.ts @@ -78,8 +78,7 @@ export default ({ getService }: FtrProviderContext) => { responseBody: { statusCode: 404, error: 'Not Found', - message: - '[index_not_found_exception] no such index [ft_ecommerce_not_exist], with { resource.type="index_or_alias" & resource.id="ft_ecommerce_not_exist" & index_uuid="_na_" & index="ft_ecommerce_not_exist" }', + message: 'index_not_found_exception', }, }, }, diff --git a/x-pack/test/api_integration/apis/ml/fields_service/time_field_range.ts b/x-pack/test/api_integration/apis/ml/fields_service/time_field_range.ts index b1c086ddbb456..6588d4df570b7 100644 --- a/x-pack/test/api_integration/apis/ml/fields_service/time_field_range.ts +++ b/x-pack/test/api_integration/apis/ml/fields_service/time_field_range.ts @@ -81,8 +81,7 @@ export default ({ getService }: FtrProviderContext) => { responseBody: { statusCode: 404, error: 'Not Found', - message: - '[index_not_found_exception] no such index [ft_ecommerce_not_exist], with { resource.type="index_or_alias" & resource.id="ft_ecommerce_not_exist" & index_uuid="_na_" & index="ft_ecommerce_not_exist" }', + message: 'index_not_found_exception', }, }, }, diff --git a/x-pack/test/api_integration/apis/ml/filters/create_filters.ts b/x-pack/test/api_integration/apis/ml/filters/create_filters.ts index dfec7798ffc0c..598fab2cb7fa6 100644 --- a/x-pack/test/api_integration/apis/ml/filters/create_filters.ts +++ b/x-pack/test/api_integration/apis/ml/filters/create_filters.ts @@ -73,7 +73,7 @@ export default ({ getService }: FtrProviderContext) => { responseCode: 400, responseBody: { error: 'Bad Request', - message: 'Invalid filter_id', + message: 'status_exception', }, }, }, diff --git a/x-pack/test/api_integration/apis/ml/filters/get_filters.ts b/x-pack/test/api_integration/apis/ml/filters/get_filters.ts index 5d7900ea5e9d9..4a3589634ded9 100644 --- a/x-pack/test/api_integration/apis/ml/filters/get_filters.ts +++ b/x-pack/test/api_integration/apis/ml/filters/get_filters.ts @@ -91,7 +91,7 @@ export default ({ getService }: FtrProviderContext) => { .set(COMMON_REQUEST_HEADERS) .expect(400); expect(body.error).to.eql('Bad Request'); - expect(body.message).to.contain('Unable to find filter'); + expect(body.message).to.contain('resource_not_found_exception'); }); }); }; diff --git a/x-pack/test/api_integration/apis/ml/filters/update_filters.ts b/x-pack/test/api_integration/apis/ml/filters/update_filters.ts index fbbb94d54c035..6f421ad120b51 100644 --- a/x-pack/test/api_integration/apis/ml/filters/update_filters.ts +++ b/x-pack/test/api_integration/apis/ml/filters/update_filters.ts @@ -111,7 +111,7 @@ export default ({ getService }: FtrProviderContext) => { .send(updateFilterRequestBody) .expect(400); - expect(body.message).to.contain('No filter with id'); + expect(body.message).to.contain('resource_not_found_exception'); }); }); }; diff --git a/x-pack/test/api_integration/apis/ml/jobs/close_jobs.ts b/x-pack/test/api_integration/apis/ml/jobs/close_jobs.ts index 5b9c5393e81d9..0b7f9cf927d26 100644 --- a/x-pack/test/api_integration/apis/ml/jobs/close_jobs.ts +++ b/x-pack/test/api_integration/apis/ml/jobs/close_jobs.ts @@ -47,8 +47,8 @@ export default ({ getService }: FtrProviderContext) => { responseCode: 200, responseBody: { - [SINGLE_METRIC_JOB_CONFIG.job_id]: { closed: false, error: { statusCode: 409 } }, - [MULTI_METRIC_JOB_CONFIG.job_id]: { closed: false, error: { statusCode: 409 } }, + [SINGLE_METRIC_JOB_CONFIG.job_id]: { closed: false, error: { status: 409 } }, + [MULTI_METRIC_JOB_CONFIG.job_id]: { closed: false, error: { status: 409 } }, }, }, }, @@ -162,9 +162,7 @@ export default ({ getService }: FtrProviderContext) => { expectedRspJobIds.forEach((id) => { expect(body[id].closed).to.eql(testData.expected.responseBody[id].closed); - expect(body[id].error.statusCode).to.eql( - testData.expected.responseBody[id].error.statusCode - ); + expect(body[id].error.status).to.eql(testData.expected.responseBody[id].error.status); }); // ensure jobs are still open From 70cea4871846e5487cd977a13ef6b488e57ecdfa Mon Sep 17 00:00:00 2001 From: Melissa Alvarez Date: Wed, 2 Sep 2020 09:11:05 -0400 Subject: [PATCH 03/21] [ML] DF Analytics jobs list: persist pagination through refresh interval (#75996) * wip: switch analyticsList inMemoryTable to basic and implement search bar * move basicTable settings to custom hook and update types * update types * add types for empty prompt * ensure sorting works * add refresh to analytics management list * ensure table still updates editing job --- .../analytics_list/analytics_list.tsx | 245 ++++++++---------- .../components/analytics_list/common.ts | 1 + .../analytics_list/empty_prompt.tsx | 51 ++++ .../components/analytics_list/use_columns.tsx | 2 - .../analytics_list/use_table_settings.ts | 119 +++++++++ .../analytics_search_bar.tsx | 157 +++++++++++ .../components/analytics_search_bar/index.ts | 7 + .../components/jobs_list/jobs_list.js | 2 +- 8 files changed, 446 insertions(+), 138 deletions(-) create mode 100644 x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/empty_prompt.tsx create mode 100644 x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_table_settings.ts create mode 100644 x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/analytics_search_bar.tsx create mode 100644 x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/index.ts diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/analytics_list.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/analytics_list.tsx index c4c7a8a4ca11a..88287b963a028 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/analytics_list.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/analytics_list.tsx @@ -5,17 +5,13 @@ */ import React, { FC, useCallback, useState, useEffect } from 'react'; - import { i18n } from '@kbn/i18n'; - import { - Direction, - EuiButton, EuiCallOut, - EuiEmptyPrompt, EuiFlexGroup, EuiFlexItem, - EuiInMemoryTable, + EuiBasicTable, + EuiSearchBar, EuiSearchBarProps, EuiSpacer, } from '@elastic/eui'; @@ -43,6 +39,39 @@ import { getGroupQueryText, } from '../../../../../jobs/jobs_list/components/utils'; import { SourceSelection } from '../source_selection'; +import { filterAnalytics, AnalyticsSearchBar } from '../analytics_search_bar'; +import { AnalyticsEmptyPrompt } from './empty_prompt'; +import { useTableSettings } from './use_table_settings'; +import { RefreshAnalyticsListButton } from '../refresh_analytics_list_button'; + +const filters: EuiSearchBarProps['filters'] = [ + { + type: 'field_value_selection', + field: 'job_type', + name: i18n.translate('xpack.ml.dataframe.analyticsList.typeFilter', { + defaultMessage: 'Type', + }), + multiSelect: 'or', + options: Object.values(ANALYSIS_CONFIG_TYPE).map((val) => ({ + value: val, + name: val, + view: getJobTypeBadge(val), + })), + }, + { + type: 'field_value_selection', + field: 'state', + name: i18n.translate('xpack.ml.dataframe.analyticsList.statusFilter', { + defaultMessage: 'Status', + }), + multiSelect: 'or', + options: Object.values(DATA_FRAME_TASK_STATE).map((val) => ({ + value: val, + name: val, + view: getTaskStateBadge(val), + })), + }, +]; function getItemIdToExpandedRowMap( itemIds: DataFrameAnalyticsId[], @@ -70,23 +99,23 @@ export const DataFrameAnalyticsList: FC = ({ const [isInitialized, setIsInitialized] = useState(false); const [isSourceIndexModalVisible, setIsSourceIndexModalVisible] = useState(false); const [isLoading, setIsLoading] = useState(false); - + const [filteredAnalytics, setFilteredAnalytics] = useState<{ + active: boolean; + items: DataFrameAnalyticsListRow[]; + }>({ + active: false, + items: [], + }); const [searchQueryText, setSearchQueryText] = useState(''); - const [analytics, setAnalytics] = useState([]); const [analyticsStats, setAnalyticsStats] = useState( undefined ); const [expandedRowItemIds, setExpandedRowItemIds] = useState([]); - const [errorMessage, setErrorMessage] = useState(undefined); - const [searchError, setSearchError] = useState(undefined); - - const [pageIndex, setPageIndex] = useState(0); - const [pageSize, setPageSize] = useState(10); - - const [sortField, setSortField] = useState(DataFrameAnalyticsListColumn.id); - const [sortDirection, setSortDirection] = useState('asc'); + // Query text/job_id based on url but only after getAnalytics is done first + // selectedJobIdFromUrlInitialized makes sure the query is only run once since analytics is being refreshed constantly + const [selectedIdFromUrlInitialized, setSelectedIdFromUrlInitialized] = useState(false); const disabled = !checkPermission('canCreateDataFrameAnalytics') || @@ -100,9 +129,29 @@ export const DataFrameAnalyticsList: FC = ({ blockRefresh ); - // Query text/job_id based on url but only after getAnalytics is done first - // selectedJobIdFromUrlInitialized makes sure the query is only run once since analytics is being refreshed constantly - const [selectedIdFromUrlInitialized, setSelectedIdFromUrlInitialized] = useState(false); + const setQueryClauses = (queryClauses: any) => { + if (queryClauses.length) { + const filtered = filterAnalytics(analytics, queryClauses); + setFilteredAnalytics({ active: true, items: filtered }); + } else { + setFilteredAnalytics({ active: false, items: [] }); + } + }; + + const filterList = () => { + if (searchQueryText !== '' && selectedIdFromUrlInitialized === true) { + // trigger table filtering with query for job id to trigger table filter + const query = EuiSearchBar.Query.parse(searchQueryText); + let clauses: any = []; + if (query && query.ast !== undefined && query.ast.clauses !== undefined) { + clauses = query.ast.clauses; + } + setQueryClauses(clauses); + } else { + setQueryClauses([]); + } + }; + useEffect(() => { if (selectedIdFromUrlInitialized === false && analytics.length > 0) { const { jobId, groupIds } = getSelectedIdFromUrl(window.location.href); @@ -116,9 +165,15 @@ export const DataFrameAnalyticsList: FC = ({ setSelectedIdFromUrlInitialized(true); setSearchQueryText(queryText); + } else { + filterList(); } }, [selectedIdFromUrlInitialized, analytics]); + useEffect(() => { + filterList(); + }, [selectedIdFromUrlInitialized, searchQueryText]); + const getAnalyticsCallback = useCallback(() => getAnalytics(true), []); // Subscribe to the refresh observable to trigger reloading the analytics list. @@ -137,6 +192,10 @@ export const DataFrameAnalyticsList: FC = ({ isMlEnabledInSpace ); + const { onTableChange, pageOfItems, pagination, sorting } = useTableSettings( + filteredAnalytics.active ? filteredAnalytics.items : analytics + ); + // Before the analytics have been loaded for the first time, display the loading indicator only. // Otherwise a user would see 'No data frame analytics found' during the initial loading. if (!isInitialized) { @@ -160,34 +219,10 @@ export const DataFrameAnalyticsList: FC = ({ if (analytics.length === 0) { return ( <> - - {i18n.translate('xpack.ml.dataFrame.analyticsList.emptyPromptTitle', { - defaultMessage: 'Create your first data frame analytics job', - })} - - } - actions={ - !isManagementTable - ? [ - setIsSourceIndexModalVisible(true)} - isDisabled={disabled} - color="primary" - iconType="plusInCircle" - fill - data-test-subj="mlAnalyticsCreateFirstButton" - > - {i18n.translate('xpack.ml.dataFrame.analyticsList.emptyPromptButtonText', { - defaultMessage: 'Create job', - })} - , - ] - : [] - } - data-test-subj="mlNoDataFrameAnalyticsFound" + setIsSourceIndexModalVisible(true)} /> {isSourceIndexModalVisible === true && ( setIsSourceIndexModalVisible(false)} /> @@ -196,95 +231,32 @@ export const DataFrameAnalyticsList: FC = ({ ); } - const sorting = { - sort: { - field: sortField, - direction: sortDirection, - }, - }; - const itemIdToExpandedRowMap = getItemIdToExpandedRowMap(expandedRowItemIds, analytics); - const pagination = { - initialPageIndex: pageIndex, - initialPageSize: pageSize, - totalItemCount: analytics.length, - pageSizeOptions: [10, 20, 50], - hidePerPageOptions: false, - }; - - const handleSearchOnChange: EuiSearchBarProps['onChange'] = (search) => { - if (search.error !== null) { - setSearchError(search.error.message); - return false; - } - - setSearchError(undefined); - setSearchQueryText(search.queryText); - return true; - }; - - const search: EuiSearchBarProps = { - query: searchQueryText, - onChange: handleSearchOnChange, - box: { - incremental: true, - }, - filters: [ - { - type: 'field_value_selection', - field: 'job_type', - name: i18n.translate('xpack.ml.dataframe.analyticsList.typeFilter', { - defaultMessage: 'Type', - }), - multiSelect: 'or', - options: Object.values(ANALYSIS_CONFIG_TYPE).map((val) => ({ - value: val, - name: val, - view: getJobTypeBadge(val), - })), - }, - { - type: 'field_value_selection', - field: 'state', - name: i18n.translate('xpack.ml.dataframe.analyticsList.statusFilter', { - defaultMessage: 'Status', - }), - multiSelect: 'or', - options: Object.values(DATA_FRAME_TASK_STATE).map((val) => ({ - value: val, - name: val, - view: getTaskStateBadge(val), - })), - }, - ], - }; - - const onTableChange: EuiInMemoryTable['onTableChange'] = ({ - page = { index: 0, size: 10 }, - sort = { field: DataFrameAnalyticsListColumn.id, direction: 'asc' }, - }) => { - const { index, size } = page; - setPageIndex(index); - setPageSize(size); + const stats = analyticsStats && ( + + + + ); - const { field, direction } = sort; - setSortField(field); - setSortDirection(direction); - }; + const managementStats = ( + + + {stats} + + + + + + ); return ( <> {modals} - + {!isManagementTable && } - - {analyticsStats && ( - - - - )} - + {!isManagementTable && stats} + {isManagementTable && managementStats} {!isManagementTable && ( @@ -300,22 +272,25 @@ export const DataFrameAnalyticsList: FC = ({
- + + className="mlAnalyticsTable" columns={columns} - error={searchError} hasActions={false} isExpandable={true} isSelectable={false} - items={analytics} + items={pageOfItems} itemId={DataFrameAnalyticsListColumn.id} itemIdToExpandedRowMap={itemIdToExpandedRowMap} loading={isLoading} - onTableChange={onTableChange} - pagination={pagination} + onChange={onTableChange} + pagination={pagination!} sorting={sorting} - search={search} data-test-subj={isLoading ? 'mlAnalyticsTable loading' : 'mlAnalyticsTable loaded'} rowProps={(item) => ({ 'data-test-subj': `mlAnalyticsTableRow row-${item.id}`, diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/common.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/common.ts index 774864ae964a8..994357412510d 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/common.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/common.ts @@ -26,6 +26,7 @@ export type Clause = Parameters[0]; type ExtractClauseType = T extends (x: any) => x is infer Type ? Type : never; export type TermClause = ExtractClauseType; export type FieldClause = ExtractClauseType; +export type Value = Parameters[0]; interface ProgressSection { phase: string; diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/empty_prompt.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/empty_prompt.tsx new file mode 100644 index 0000000000000..fb173697b4572 --- /dev/null +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/empty_prompt.tsx @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { FC } from 'react'; +import { EuiButton, EuiEmptyPrompt } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; + +interface Props { + disabled: boolean; + isManagementTable: boolean; + onCreateFirstJobClick: () => void; +} + +export const AnalyticsEmptyPrompt: FC = ({ + disabled, + isManagementTable, + onCreateFirstJobClick, +}) => ( + + {i18n.translate('xpack.ml.dataFrame.analyticsList.emptyPromptTitle', { + defaultMessage: 'Create your first data frame analytics job', + })} + + } + actions={ + !isManagementTable + ? [ + + {i18n.translate('xpack.ml.dataFrame.analyticsList.emptyPromptButtonText', { + defaultMessage: 'Create job', + })} + , + ] + : [] + } + data-test-subj="mlNoDataFrameAnalyticsFound" + /> +); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_columns.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_columns.tsx index 7001681b6917a..ef1d373a55a12 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_columns.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_columns.tsx @@ -23,7 +23,6 @@ import { getJobIdUrl, TAB_IDS } from '../../../../../util/get_selected_ids_url'; import { getAnalysisType, DataFrameAnalyticsId } from '../../../../common'; import { - getDataFrameAnalyticsProgress, getDataFrameAnalyticsProgressPhase, isDataFrameAnalyticsFailed, isDataFrameAnalyticsRunning, @@ -76,7 +75,6 @@ export const progressColumn = { name: i18n.translate('xpack.ml.dataframe.analyticsList.progress', { defaultMessage: 'Progress', }), - sortable: (item: DataFrameAnalyticsListRow) => getDataFrameAnalyticsProgress(item.stats), truncateText: true, render(item: DataFrameAnalyticsListRow) { const { currentPhase, progress, totalPhases } = getDataFrameAnalyticsProgressPhase(item.stats); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_table_settings.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_table_settings.ts new file mode 100644 index 0000000000000..57eb9f6857053 --- /dev/null +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/use_table_settings.ts @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useState } from 'react'; +import { Direction, EuiBasicTableProps, EuiTableSortingType } from '@elastic/eui'; +import sortBy from 'lodash/sortBy'; +import get from 'lodash/get'; +import { DataFrameAnalyticsListColumn, DataFrameAnalyticsListRow } from './common'; + +const PAGE_SIZE = 10; +const PAGE_SIZE_OPTIONS = [10, 25, 50]; + +const jobPropertyMap = { + ID: 'id', + Status: 'state', + Type: 'job_type', +}; + +interface AnalyticsBasicTableSettings { + pageIndex: number; + pageSize: number; + totalItemCount: number; + hidePerPageOptions: boolean; + sortField: string; + sortDirection: Direction; +} + +interface UseTableSettingsReturnValue { + onTableChange: EuiBasicTableProps['onChange']; + pageOfItems: DataFrameAnalyticsListRow[]; + pagination: EuiBasicTableProps['pagination']; + sorting: EuiTableSortingType; +} + +export function useTableSettings(items: DataFrameAnalyticsListRow[]): UseTableSettingsReturnValue { + const [tableSettings, setTableSettings] = useState({ + pageIndex: 0, + pageSize: PAGE_SIZE, + totalItemCount: 0, + hidePerPageOptions: false, + sortField: DataFrameAnalyticsListColumn.id, + sortDirection: 'asc', + }); + + const getPageOfItems = ( + list: any[], + index: number, + size: number, + sortField: string, + sortDirection: Direction + ) => { + list = sortBy(list, (item) => + get(item, jobPropertyMap[sortField as keyof typeof jobPropertyMap] || sortField) + ); + list = sortDirection === 'asc' ? list : list.reverse(); + const listLength = list.length; + + let pageStart = index * size; + if (pageStart >= listLength && listLength !== 0) { + // if the page start is larger than the number of items due to + // filters being applied or items being deleted, calculate a new page start + pageStart = Math.floor((listLength - 1) / size) * size; + + setTableSettings({ ...tableSettings, pageIndex: pageStart / size }); + } + return { + pageOfItems: list.slice(pageStart, pageStart + size), + totalItemCount: listLength, + }; + }; + + const onTableChange = ({ + page = { index: 0, size: PAGE_SIZE }, + sort = { field: DataFrameAnalyticsListColumn.id, direction: 'asc' }, + }: { + page?: { index: number; size: number }; + sort?: { field: string; direction: Direction }; + }) => { + const { index, size } = page; + const { field, direction } = sort; + + setTableSettings({ + ...tableSettings, + pageIndex: index, + pageSize: size, + sortField: field, + sortDirection: direction, + }); + }; + + const { pageIndex, pageSize, sortField, sortDirection } = tableSettings; + + const { pageOfItems, totalItemCount } = getPageOfItems( + items, + pageIndex, + pageSize, + sortField, + sortDirection + ); + + const pagination = { + pageIndex, + pageSize, + totalItemCount, + pageSizeOptions: PAGE_SIZE_OPTIONS, + }; + + const sorting = { + sort: { + field: sortField, + direction: sortDirection, + }, + }; + + return { onTableChange, pageOfItems, pagination, sorting }; +} diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/analytics_search_bar.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/analytics_search_bar.tsx new file mode 100644 index 0000000000000..44a6572a3766c --- /dev/null +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/analytics_search_bar.tsx @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Dispatch, SetStateAction, FC, Fragment, useState } from 'react'; +import { + EuiSearchBar, + EuiSearchBarProps, + EuiFlexGroup, + EuiFlexItem, + EuiFormRow, +} from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { stringMatch } from '../../../../../util/string_utils'; +import { + TermClause, + FieldClause, + Value, + DataFrameAnalyticsListRow, +} from '../analytics_list/common'; + +export function filterAnalytics( + items: DataFrameAnalyticsListRow[], + clauses: Array +) { + if (clauses.length === 0) { + return items; + } + + // keep count of the number of matches we make as we're looping over the clauses + // we only want to return items which match all clauses, i.e. each search term is ANDed + const matches: Record = items.reduce((p: Record, c) => { + p[c.id] = { + job: c, + count: 0, + }; + return p; + }, {}); + + clauses.forEach((c) => { + // the search term could be negated with a minus, e.g. -bananas + const bool = c.match === 'must'; + let js = []; + + if (c.type === 'term') { + // filter term based clauses, e.g. bananas + // match on id, description and memory_status + // if the term has been negated, AND the matches + if (bool === true) { + js = items.filter( + (item) => + stringMatch(item.id, c.value) === bool || + stringMatch(item.config.description, c.value) === bool || + stringMatch(item.stats?.memory_usage?.status, c.value) === bool + ); + } else { + js = items.filter( + (item) => + stringMatch(item.id, c.value) === bool && + stringMatch(item.config.description, c.value) === bool && + stringMatch(item.stats?.memory_usage?.status, c.value) === bool + ); + } + } else { + // filter other clauses, i.e. the filters for type and status + if (Array.isArray(c.value)) { + // job type value and status value are an array of string(s) e.g. c.value => ['failed', 'stopped'] + js = items.filter((item) => + (c.value as Value[]).includes( + item[c.field as keyof Pick] + ) + ); + } else { + js = items.filter( + (item) => item[c.field as keyof Pick] === c.value + ); + } + } + + js.forEach((j) => matches[j.id].count++); + }); + + // loop through the matches and return only those items which have match all the clauses + const filtered = Object.values(matches) + .filter((m) => (m && m.count) >= clauses.length) + .map((m) => m.job); + + return filtered; +} + +function getError(errorMessage: string | null) { + if (errorMessage) { + return i18n.translate('xpack.ml.analyticList.searchBar.invalidSearchErrorMessage', { + defaultMessage: 'Invalid search: {errorMessage}', + values: { errorMessage }, + }); + } + + return ''; +} + +interface Props { + filters: EuiSearchBarProps['filters']; + searchQueryText: string; + setSearchQueryText: Dispatch>; +} + +export const AnalyticsSearchBar: FC = ({ filters, searchQueryText, setSearchQueryText }) => { + const [errorMessage, setErrorMessage] = useState(null); + + const onChange: EuiSearchBarProps['onChange'] = ({ query, error }) => { + if (error) { + setErrorMessage(error.message); + } else if (query !== null && query.text !== undefined) { + setSearchQueryText(query.text); + setErrorMessage(null); + } + }; + + return ( + + + {searchQueryText === undefined && ( + + )} + {searchQueryText !== undefined && ( + + )} + + + + + + ); +}; diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/index.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/index.ts new file mode 100644 index 0000000000000..3b901f5063eb1 --- /dev/null +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_search_bar/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { AnalyticsSearchBar, filterAnalytics } from './analytics_search_bar'; diff --git a/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list/jobs_list.js b/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list/jobs_list.js index f90bbf3cf3fe6..fa4ea09b89ff9 100644 --- a/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list/jobs_list.js +++ b/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list/jobs_list.js @@ -7,7 +7,7 @@ import PropTypes from 'prop-types'; import React, { Component } from 'react'; -import { sortBy } from 'lodash'; +import sortBy from 'lodash/sortBy'; import moment from 'moment'; import { toLocaleString } from '../../../../util/string_utils'; From 093f58872054c8d0b8601ad94865ba3bf50a4886 Mon Sep 17 00:00:00 2001 From: Shahzad Date: Wed, 2 Sep 2020 15:35:33 +0200 Subject: [PATCH 04/21] [RUM Dashboard] Use median values instead of averages to display page load times (#76407) --- .../elasticsearch_fieldnames.test.ts.snap | 12 ++++++++ .../apm/common/elasticsearch_fieldnames.ts | 5 ++++ .../RumDashboard/Charts/PageLoadDistChart.tsx | 2 +- .../PercentileAnnotations.tsx | 4 +-- .../__snapshots__/queries.test.ts.snap | 18 +++++++++--- .../lib/rum_client/get_client_metrics.ts | 28 +++++++++++++------ .../server/lib/transaction_groups/fetcher.ts | 4 +-- .../apm/typings/elasticsearch/aggregations.ts | 2 +- 8 files changed, 57 insertions(+), 18 deletions(-) diff --git a/x-pack/plugins/apm/common/__snapshots__/elasticsearch_fieldnames.test.ts.snap b/x-pack/plugins/apm/common/__snapshots__/elasticsearch_fieldnames.test.ts.snap index 7c42fb6f12a54..aecf4af667603 100644 --- a/x-pack/plugins/apm/common/__snapshots__/elasticsearch_fieldnames.test.ts.snap +++ b/x-pack/plugins/apm/common/__snapshots__/elasticsearch_fieldnames.test.ts.snap @@ -120,6 +120,8 @@ exports[`Error TRACE_ID 1`] = `"trace id"`; exports[`Error TRANSACTION_BREAKDOWN_COUNT 1`] = `undefined`; +exports[`Error TRANSACTION_DOM_INTERACTIVE 1`] = `undefined`; + exports[`Error TRANSACTION_DURATION 1`] = `undefined`; exports[`Error TRANSACTION_ID 1`] = `"transaction id"`; @@ -132,6 +134,8 @@ exports[`Error TRANSACTION_RESULT 1`] = `undefined`; exports[`Error TRANSACTION_SAMPLED 1`] = `undefined`; +exports[`Error TRANSACTION_TIME_TO_FIRST_BYTE 1`] = `undefined`; + exports[`Error TRANSACTION_TYPE 1`] = `"request"`; exports[`Error TRANSACTION_URL 1`] = `undefined`; @@ -268,6 +272,8 @@ exports[`Span TRACE_ID 1`] = `"trace id"`; exports[`Span TRANSACTION_BREAKDOWN_COUNT 1`] = `undefined`; +exports[`Span TRANSACTION_DOM_INTERACTIVE 1`] = `undefined`; + exports[`Span TRANSACTION_DURATION 1`] = `undefined`; exports[`Span TRANSACTION_ID 1`] = `"transaction id"`; @@ -280,6 +286,8 @@ exports[`Span TRANSACTION_RESULT 1`] = `undefined`; exports[`Span TRANSACTION_SAMPLED 1`] = `undefined`; +exports[`Span TRANSACTION_TIME_TO_FIRST_BYTE 1`] = `undefined`; + exports[`Span TRANSACTION_TYPE 1`] = `undefined`; exports[`Span TRANSACTION_URL 1`] = `undefined`; @@ -416,6 +424,8 @@ exports[`Transaction TRACE_ID 1`] = `"trace id"`; exports[`Transaction TRANSACTION_BREAKDOWN_COUNT 1`] = `undefined`; +exports[`Transaction TRANSACTION_DOM_INTERACTIVE 1`] = `undefined`; + exports[`Transaction TRANSACTION_DURATION 1`] = `1337`; exports[`Transaction TRANSACTION_ID 1`] = `"transaction id"`; @@ -428,6 +438,8 @@ exports[`Transaction TRANSACTION_RESULT 1`] = `"transaction result"`; exports[`Transaction TRANSACTION_SAMPLED 1`] = `true`; +exports[`Transaction TRANSACTION_TIME_TO_FIRST_BYTE 1`] = `undefined`; + exports[`Transaction TRANSACTION_TYPE 1`] = `"transaction type"`; exports[`Transaction TRANSACTION_URL 1`] = `undefined`; diff --git a/x-pack/plugins/apm/common/elasticsearch_fieldnames.ts b/x-pack/plugins/apm/common/elasticsearch_fieldnames.ts index 610a32e8e9b99..4aa68de9b8b32 100644 --- a/x-pack/plugins/apm/common/elasticsearch_fieldnames.ts +++ b/x-pack/plugins/apm/common/elasticsearch_fieldnames.ts @@ -99,3 +99,8 @@ export const TRANSACTION_URL = 'transaction.page.url'; export const CLIENT_GEO = 'client.geo'; export const USER_AGENT_DEVICE = 'user_agent.device.name'; export const USER_AGENT_OS = 'user_agent.os.name'; + +export const TRANSACTION_TIME_TO_FIRST_BYTE = + 'transaction.marks.agent.timeToFirstByte'; +export const TRANSACTION_DOM_INTERACTIVE = + 'transaction.marks.agent.domInteractive'; diff --git a/x-pack/plugins/apm/public/components/app/RumDashboard/Charts/PageLoadDistChart.tsx b/x-pack/plugins/apm/public/components/app/RumDashboard/Charts/PageLoadDistChart.tsx index b2b5e66d06ac6..33573052dbcbb 100644 --- a/x-pack/plugins/apm/public/components/app/RumDashboard/Charts/PageLoadDistChart.tsx +++ b/x-pack/plugins/apm/public/components/app/RumDashboard/Charts/PageLoadDistChart.tsx @@ -35,7 +35,7 @@ import { BreakdownSeries } from '../PageLoadDistribution/BreakdownSeries'; interface PageLoadData { pageLoadDistribution: Array<{ x: number; y: number }>; - percentiles: Record | undefined; + percentiles: Record | undefined; minDuration: number; maxDuration: number; } diff --git a/x-pack/plugins/apm/public/components/app/RumDashboard/PageLoadDistribution/PercentileAnnotations.tsx b/x-pack/plugins/apm/public/components/app/RumDashboard/PageLoadDistribution/PercentileAnnotations.tsx index 407ec42f03ff5..7e81dc011bdb5 100644 --- a/x-pack/plugins/apm/public/components/app/RumDashboard/PageLoadDistribution/PercentileAnnotations.tsx +++ b/x-pack/plugins/apm/public/components/app/RumDashboard/PageLoadDistribution/PercentileAnnotations.tsx @@ -16,11 +16,11 @@ import styled from 'styled-components'; import { EuiToolTip } from '@elastic/eui'; interface Props { - percentiles?: Record; + percentiles?: Record; } function generateAnnotationData( - values?: Record + values?: Record ): LineAnnotationDatum[] { return Object.entries(values ?? {}).map((value) => ({ dataValue: value[1], diff --git a/x-pack/plugins/apm/server/lib/rum_client/__snapshots__/queries.test.ts.snap b/x-pack/plugins/apm/server/lib/rum_client/__snapshots__/queries.test.ts.snap index c5264373ea495..22b8c226e9026 100644 --- a/x-pack/plugins/apm/server/lib/rum_client/__snapshots__/queries.test.ts.snap +++ b/x-pack/plugins/apm/server/lib/rum_client/__snapshots__/queries.test.ts.snap @@ -10,15 +10,25 @@ Object { "body": Object { "aggs": Object { "backEnd": Object { - "avg": Object { + "percentiles": Object { "field": "transaction.marks.agent.timeToFirstByte", - "missing": 0, + "hdr": Object { + "number_of_significant_value_digits": 3, + }, + "percents": Array [ + 50, + ], }, }, "domInteractive": Object { - "avg": Object { + "percentiles": Object { "field": "transaction.marks.agent.domInteractive", - "missing": 0, + "hdr": Object { + "number_of_significant_value_digits": 3, + }, + "percents": Array [ + 50, + ], }, }, "pageViews": Object { diff --git a/x-pack/plugins/apm/server/lib/rum_client/get_client_metrics.ts b/x-pack/plugins/apm/server/lib/rum_client/get_client_metrics.ts index 194c136e2b3d0..e0dec183f06d6 100644 --- a/x-pack/plugins/apm/server/lib/rum_client/get_client_metrics.ts +++ b/x-pack/plugins/apm/server/lib/rum_client/get_client_metrics.ts @@ -11,6 +11,10 @@ import { SetupTimeRange, SetupUIFilters, } from '../helpers/setup_request'; +import { + TRANSACTION_DOM_INTERACTIVE, + TRANSACTION_TIME_TO_FIRST_BYTE, +} from '../../../common/elasticsearch_fieldnames'; export async function getClientMetrics({ setup, @@ -30,15 +34,21 @@ export async function getClientMetrics({ aggs: { pageViews: { value_count: { field: 'transaction.type' } }, backEnd: { - avg: { - field: 'transaction.marks.agent.timeToFirstByte', - missing: 0, + percentiles: { + field: TRANSACTION_TIME_TO_FIRST_BYTE, + percents: [50], + hdr: { + number_of_significant_value_digits: 3, + }, }, }, domInteractive: { - avg: { - field: 'transaction.marks.agent.domInteractive', - missing: 0, + percentiles: { + field: TRANSACTION_DOM_INTERACTIVE, + percents: [50], + hdr: { + number_of_significant_value_digits: 3, + }, }, }, }, @@ -53,9 +63,11 @@ export async function getClientMetrics({ // Divide by 1000 to convert ms into seconds return { pageViews, - backEnd: { value: (backEnd.value || 0) / 1000 }, + backEnd: { value: (backEnd.values['50.0'] || 0) / 1000 }, frontEnd: { - value: ((domInteractive.value || 0) - (backEnd.value || 0)) / 1000, + value: + ((domInteractive.values['50.0'] || 0) - (backEnd.values['50.0'] || 0)) / + 1000, }, }; } diff --git a/x-pack/plugins/apm/server/lib/transaction_groups/fetcher.ts b/x-pack/plugins/apm/server/lib/transaction_groups/fetcher.ts index 2a1a581c79574..d0ba31f42c536 100644 --- a/x-pack/plugins/apm/server/lib/transaction_groups/fetcher.ts +++ b/x-pack/plugins/apm/server/lib/transaction_groups/fetcher.ts @@ -65,7 +65,7 @@ function getItemsWithRelativeImpact( key: string | Record; avg?: number | null; count?: number | null; - p95?: number; + p95?: number | null; sample?: Transaction; }> ) { @@ -188,7 +188,7 @@ export interface TransactionGroup { key: Record | string; averageResponseTime: number | null | undefined; transactionsPerMinute: number; - p95: number | undefined; + p95: number | null | undefined; impact: number; sample: Transaction; } diff --git a/x-pack/plugins/apm/typings/elasticsearch/aggregations.ts b/x-pack/plugins/apm/typings/elasticsearch/aggregations.ts index d25ec8709e3be..6a2a0e81e81cd 100644 --- a/x-pack/plugins/apm/typings/elasticsearch/aggregations.ts +++ b/x-pack/plugins/apm/typings/elasticsearch/aggregations.ts @@ -223,7 +223,7 @@ interface AggregationResponsePart< value: number; }; percentiles: { - values: Record; + values: Record; }; extended_stats: { count: number; From 5345af9281406368d8c4520742fbdd3ae08a61fd Mon Sep 17 00:00:00 2001 From: Brent Kimmel Date: Wed, 2 Sep 2020 09:48:33 -0400 Subject: [PATCH 05/21] [Security Solution][Resolver] Add events link to Process Detail Panel (#76195) * [Security_Solution][Resolver]Add events link to Process Detail Panel --- .../resolver/store/data/reducer.test.ts | 19 ++++++++++++++++++ .../public/resolver/store/data/selectors.ts | 20 +++++++++++++++++++ .../public/resolver/store/selectors.ts | 12 +++++++++++ .../public/resolver/view/panels/index.tsx | 12 +++++++---- .../resolver/view/panels/process_details.tsx | 18 +++++++++++++++++ 5 files changed, 77 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/reducer.test.ts b/x-pack/plugins/security_solution/public/resolver/store/data/reducer.test.ts index e087db9f74685..21c4f92f8e502 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/reducer.test.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/reducer.test.ts @@ -59,6 +59,7 @@ describe('Resolver Data Middleware', () => { let firstChildNodeInTree: TreeNode; let eventStatsForFirstChildNode: { total: number; byCategory: Record }; let categoryToOverCount: string; + let aggregateCategoryTotalForFirstChildNode: number; let tree: ResolverTree; /** @@ -73,6 +74,7 @@ describe('Resolver Data Middleware', () => { firstChildNodeInTree, eventStatsForFirstChildNode, categoryToOverCount, + aggregateCategoryTotalForFirstChildNode, } = mockedTree()); if (tree) { dispatchTree(tree); @@ -138,6 +140,13 @@ describe('Resolver Data Middleware', () => { expect(notDisplayed(typeCounted)).toBe(0); } }); + it('should return an overall correct count for the number of related events', () => { + const aggregateTotalByEntityId = selectors.relatedEventAggregateTotalByEntityId( + store.getState() + ); + const countForId = aggregateTotalByEntityId(firstChildNodeInTree.id); + expect(countForId).toBe(aggregateCategoryTotalForFirstChildNode); + }); }); describe('when data was received and stats show more related events than the API can provide', () => { beforeEach(() => { @@ -262,6 +271,7 @@ function mockedTree() { tree: tree!, firstChildNodeInTree, eventStatsForFirstChildNode: statsResults.eventStats, + aggregateCategoryTotalForFirstChildNode: statsResults.aggregateCategoryTotal, categoryToOverCount: statsResults.firstCategory, }; } @@ -288,6 +298,7 @@ function compileStatsForChild( }; /** The category of the first event. */ firstCategory: string; + aggregateCategoryTotal: number; } { const totalRelatedEvents = node.relatedEvents.length; // For the purposes of testing, we pick one category to fake an extra event for @@ -295,6 +306,12 @@ function compileStatsForChild( let firstCategory: string | undefined; + // This is the "aggregate total" which is displayed to users as the total count + // of related events for the node. It is tallied by incrementing for every discrete + // event.category in an event.category array (or just 1 for a plain string). E.g. two events + // categories 'file' and ['dns','network'] would have an `aggregate total` of 3. + let aggregateCategoryTotal: number = 0; + const compiledStats = node.relatedEvents.reduce( (counts: Record, relatedEvent) => { // `relatedEvent.event.category` is `string | string[]`. @@ -310,6 +327,7 @@ function compileStatsForChild( // Increment the count of events with this category counts[category] = counts[category] ? counts[category] + 1 : 1; + aggregateCategoryTotal++; } return counts; }, @@ -327,5 +345,6 @@ function compileStatsForChild( byCategory: compiledStats, }, firstCategory, + aggregateCategoryTotal, }; } diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts index 965547f1e309a..eaa80b46471fa 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts @@ -170,6 +170,26 @@ export const relatedEventsStats: ( } ); +/** + * This returns the "aggregate total" for related events, tallied as the sum + * of their individual `event.category`s. E.g. a [DNS, Network] would count as two + * towards the aggregate total. + */ +export const relatedEventAggregateTotalByEntityId: ( + state: DataState +) => (entityId: string) => number = createSelector(relatedEventsStats, (relatedStats) => { + return (entityId) => { + const statsForEntity = relatedStats(entityId); + if (statsForEntity === undefined) { + return 0; + } + return Object.values(statsForEntity?.events?.byCategory || {}).reduce( + (sum, val) => sum + val, + 0 + ); + }; +}); + /** * returns a map of entity_ids to related event data. */ diff --git a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts index 909a907626f30..bdea08df3d7f5 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts @@ -114,6 +114,18 @@ export const relatedEventsStats: ( dataSelectors.relatedEventsStats ); +/** + * This returns the "aggregate total" for related events, tallied as the sum + * of their individual `event.category`s. E.g. a [DNS, Network] would count as two + * towards the aggregate total. + */ +export const relatedEventAggregateTotalByEntityId: ( + state: ResolverState +) => (nodeID: string) => number = composeSelectors( + dataStateSelector, + dataSelectors.relatedEventAggregateTotalByEntityId +); + /** * Map of related events... by entity id */ diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/index.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/index.tsx index 98b737de8fa59..133dcd21e7f56 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/panels/index.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/panels/index.tsx @@ -17,6 +17,7 @@ import { EventCountsForProcess } from './event_counts_for_process'; import { ProcessDetails } from './process_details'; import { ProcessListWithCounts } from './process_list_with_counts'; import { RelatedEventDetail } from './related_event_detail'; +import { ResolverState } from '../../types'; /** * The team decided to use this table to determine which breadcrumbs/view to display: @@ -102,6 +103,12 @@ const PanelContent = memo(function PanelContent() { ? relatedEventStats(idFromParams) : undefined; + const parentCount = useSelector((state: ResolverState) => { + if (idFromParams === '') { + return 0; + } + return selectors.relatedEventAggregateTotalByEntityId(state)(idFromParams); + }); /** * Determine which set of breadcrumbs to display based on the query parameters * for the table & breadcrumb nav. @@ -186,9 +193,6 @@ const PanelContent = memo(function PanelContent() { } if (panelToShow === 'relatedEventDetail') { - const parentCount: number = Object.values( - relatedStatsForIdFromParams?.events.byCategory || {} - ).reduce((sum, val) => sum + val, 0); return ( ; - }, [uiSelectedEvent, crumbEvent, crumbId, relatedStatsForIdFromParams, panelToShow]); + }, [uiSelectedEvent, crumbEvent, crumbId, relatedStatsForIdFromParams, panelToShow, parentCount]); return <>{panelInstance}; }); diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/process_details.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/process_details.tsx index 01fa912caa866..4fcc557742643 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/panels/process_details.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/panels/process_details.tsx @@ -13,6 +13,7 @@ import { EuiText, EuiTextColor, EuiDescriptionList, + EuiLink, } from '@elastic/eui'; import styled from 'styled-components'; import { FormattedMessage } from 'react-intl'; @@ -58,6 +59,9 @@ export const ProcessDetails = memo(function ProcessDetails({ const isProcessTerminated = useSelector((state: ResolverState) => selectors.isProcessTerminated(state)(entityId) ); + const relatedEventTotal = useSelector((state: ResolverState) => { + return selectors.relatedEventAggregateTotalByEntityId(state)(entityId); + }); const processInfoEntry: EuiDescriptionListProps['listItems'] = useMemo(() => { const eventTime = event.eventTimestamp(processEvent); const dateTime = eventTime === undefined ? null : formatDate(eventTime); @@ -164,6 +168,12 @@ export const ProcessDetails = memo(function ProcessDetails({ return cubeAssetsForNode(isProcessTerminated, false); }, [processEvent, cubeAssetsForNode, isProcessTerminated]); + const handleEventsLinkClick = useMemo(() => { + return () => { + pushToQueryParams({ crumbId: entityId, crumbEvent: 'all' }); + }; + }, [entityId, pushToQueryParams]); + const titleID = useMemo(() => htmlIdGenerator('resolverTable')(), []); return ( <> @@ -185,6 +195,14 @@ export const ProcessDetails = memo(function ProcessDetails({ {descriptionText} + + + + Date: Wed, 2 Sep 2020 16:00:38 +0200 Subject: [PATCH 06/21] [uiActions] notify action usage (#76294) Notify feature usage when dynamic actions with specified license requirements are executed Co-authored-by: Elastic Machine --- .../dashboard_to_url_drilldown/index.tsx | 1 + .../services/feature_usage_service.mock.ts | 4 + .../action_wizard/action_wizard.test.tsx | 8 +- .../action_wizard/action_wizard.tsx | 10 +-- .../components/action_wizard/test_data.tsx | 16 ++-- .../connected_flyout_manage_drilldowns.tsx | 2 +- .../form_drilldown_wizard.tsx | 2 +- .../public/drilldowns/drilldown_definition.ts | 9 +- .../dynamic_actions/action_factory.test.ts | 87 ++++++++++++++++--- .../public/dynamic_actions/action_factory.ts | 45 ++++++++-- .../action_factory_definition.ts | 11 ++- .../dynamic_action_manager.test.ts | 6 +- .../ui_actions_enhanced/public/mocks.ts | 4 +- .../ui_actions_enhanced/public/plugin.ts | 36 +++++--- .../ui_actions_service_enhancements.test.ts | 43 +++++++-- .../ui_actions_service_enhancements.ts | 31 +++++-- 16 files changed, 247 insertions(+), 68 deletions(-) diff --git a/x-pack/examples/ui_actions_enhanced_examples/public/dashboard_to_url_drilldown/index.tsx b/x-pack/examples/ui_actions_enhanced_examples/public/dashboard_to_url_drilldown/index.tsx index 7d915ea23c66f..58916f26121d4 100644 --- a/x-pack/examples/ui_actions_enhanced_examples/public/dashboard_to_url_drilldown/index.tsx +++ b/x-pack/examples/ui_actions_enhanced_examples/public/dashboard_to_url_drilldown/index.tsx @@ -44,6 +44,7 @@ export class DashboardToUrlDrilldown implements Drilldown { public readonly order = 8; readonly minimalLicense = 'gold'; // example of minimal license support + readonly licenseFeatureName = 'Sample URL Drilldown'; public readonly getDisplayName = () => 'Go to URL (example)'; diff --git a/x-pack/plugins/licensing/public/services/feature_usage_service.mock.ts b/x-pack/plugins/licensing/public/services/feature_usage_service.mock.ts index fc9d4f9381151..b2390ea35c140 100644 --- a/x-pack/plugins/licensing/public/services/feature_usage_service.mock.ts +++ b/x-pack/plugins/licensing/public/services/feature_usage_service.mock.ts @@ -15,6 +15,8 @@ const createSetupMock = (): jest.Mocked => { register: jest.fn(), }; + mock.register.mockImplementation(() => Promise.resolve()); + return mock; }; @@ -23,6 +25,8 @@ const createStartMock = (): jest.Mocked => { notifyUsage: jest.fn(), }; + mock.notifyUsage.mockImplementation(() => Promise.resolve()); + return mock; }; diff --git a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.test.tsx b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.test.tsx index 78252dccd20d2..9cc64defc1795 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.test.tsx +++ b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.test.tsx @@ -15,7 +15,7 @@ import { urlDrilldownActionFactory, } from './test_data'; import { ActionFactory } from '../../dynamic_actions'; -import { licenseMock } from '../../../../licensing/common/licensing.mock'; +import { licensingMock } from '../../../../licensing/public/mocks'; // TODO: afterEach is not available for it globally during setup // https://github.com/elastic/kibana/issues/59469 @@ -68,8 +68,12 @@ test('If not enough license, button is disabled', () => { { ...urlDrilldownActionFactory, minimalLicense: 'gold', + licenseFeatureName: 'Url Drilldown', }, - () => licenseMock.createLicense() + { + getLicense: () => licensingMock.createLicense(), + getFeatureUsageStart: () => licensingMock.createStart().featureUsage, + } ); const screen = render(); diff --git a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.tsx b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.tsx index 7e4fe1de8be8d..a49251811239f 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.tsx +++ b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/action_wizard.tsx @@ -93,7 +93,7 @@ export const ActionWizard: React.FC = ({ if ( !currentActionFactory && actionFactories.length === 1 && - actionFactories[0].isCompatibleLicence() + actionFactories[0].isCompatibleLicense() ) { onActionFactoryChange(actionFactories[0]); } @@ -314,8 +314,8 @@ const ActionFactorySelector: React.FC = ({ * make sure not compatible factories are in the end */ const ensureOrder = (factories: ActionFactory[]) => { - const compatibleLicense = factories.filter((f) => f.isCompatibleLicence()); - const notCompatibleLicense = factories.filter((f) => !f.isCompatibleLicence()); + const compatibleLicense = factories.filter((f) => f.isCompatibleLicense()); + const notCompatibleLicense = factories.filter((f) => !f.isCompatibleLicense()); return [ ...compatibleLicense.sort((f1, f2) => f2.order - f1.order), ...notCompatibleLicense.sort((f1, f2) => f2.order - f1.order), @@ -328,7 +328,7 @@ const ActionFactorySelector: React.FC = ({ = ({ label={actionFactory.getDisplayName(context)} data-test-subj={`${TEST_SUBJ_ACTION_FACTORY_ITEM}-${actionFactory.id}`} onClick={() => onActionFactorySelected(actionFactory)} - disabled={!actionFactory.isCompatibleLicence()} + disabled={!actionFactory.isCompatibleLicense()} > {actionFactory.getIconType(context) && ( diff --git a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/test_data.tsx b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/test_data.tsx index d48cb13b1a470..71286e9a59c06 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/test_data.tsx +++ b/x-pack/plugins/ui_actions_enhanced/public/components/action_wizard/test_data.tsx @@ -10,7 +10,7 @@ import { reactToUiComponent } from '../../../../../../src/plugins/kibana_react/p import { ActionWizard } from './action_wizard'; import { ActionFactory, ActionFactoryDefinition } from '../../dynamic_actions'; import { CollectConfigProps } from '../../../../../../src/plugins/kibana_utils/public'; -import { licenseMock } from '../../../../licensing/common/licensing.mock'; +import { licensingMock } from '../../../../licensing/public/mocks'; import { APPLY_FILTER_TRIGGER, SELECT_RANGE_TRIGGER, @@ -116,9 +116,10 @@ export const dashboardDrilldownActionFactory: ActionFactoryDefinition< }, }; -export const dashboardFactory = new ActionFactory(dashboardDrilldownActionFactory, () => - licenseMock.createLicense() -); +export const dashboardFactory = new ActionFactory(dashboardDrilldownActionFactory, { + getLicense: () => licensingMock.createLicense(), + getFeatureUsageStart: () => licensingMock.createStart().featureUsage, +}); interface UrlDrilldownConfig { url: string; @@ -176,9 +177,10 @@ export const urlDrilldownActionFactory: ActionFactoryDefinition - licenseMock.createLicense() -); +export const urlFactory = new ActionFactory(urlDrilldownActionFactory, { + getLicense: () => licensingMock.createLicense(), + getFeatureUsageStart: () => licensingMock.createStart().featureUsage, +}); export const mockSupportedTriggers: TriggerId[] = [ VALUE_CLICK_TRIGGER, diff --git a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/connected_flyout_manage_drilldowns/connected_flyout_manage_drilldowns.tsx b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/connected_flyout_manage_drilldowns/connected_flyout_manage_drilldowns.tsx index 9fca785ec9072..b708bbc57375d 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/connected_flyout_manage_drilldowns/connected_flyout_manage_drilldowns.tsx +++ b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/connected_flyout_manage_drilldowns/connected_flyout_manage_drilldowns.tsx @@ -148,7 +148,7 @@ export function createFlyoutManageDrilldowns({ icon: actionFactory?.getIconType(drilldownFactoryContext), error: !actionFactory ? invalidDrilldownType(drilldown.action.factoryId) // this shouldn't happen for the end user, but useful during development - : !actionFactory.isCompatibleLicence() + : !actionFactory.isCompatibleLicense() ? insufficientLicenseLevel : undefined, triggers: drilldown.triggers.map((trigger) => getTrigger(trigger as TriggerId)), diff --git a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/form_drilldown_wizard/form_drilldown_wizard.tsx b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/form_drilldown_wizard/form_drilldown_wizard.tsx index bb3eb89d8f199..d7f94a52088b7 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/form_drilldown_wizard/form_drilldown_wizard.tsx +++ b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/components/form_drilldown_wizard/form_drilldown_wizard.tsx @@ -75,7 +75,7 @@ export const FormDrilldownWizard: React.FC = ({ ); const hasNotCompatibleLicenseFactory = () => - actionFactories?.some((f) => !f.isCompatibleLicence()); + actionFactories?.some((f) => !f.isCompatibleLicense()); const renderGetMoreActionsLink = () => ( diff --git a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/drilldown_definition.ts b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/drilldown_definition.ts index ff455c6ae45b6..8faccc088a327 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/drilldowns/drilldown_definition.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/drilldowns/drilldown_definition.ts @@ -37,11 +37,18 @@ export interface DrilldownDefinition< id: string; /** - * Minimal licence level + * Minimal license level * Empty means no restrictions */ minimalLicense?: LicenseType; + /** + * Required when `minimalLicense` is used. + * Is a user-facing string. Has to be unique. Doesn't need i18n. + * The feature's name will be displayed to Cloud end-users when they're billed based on their feature usage. + */ + licenseFeatureName?: string; + /** * Determines the display order of the drilldowns in the flyout picker. * Higher numbers are displayed first. diff --git a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.test.ts b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.test.ts index a07fed8486438..032a4a63fe2e9 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.test.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.test.ts @@ -7,6 +7,7 @@ import { ActionFactory } from './action_factory'; import { ActionFactoryDefinition } from './action_factory_definition'; import { licensingMock } from '../../../licensing/public/mocks'; +import { PublicLicense } from '../../../licensing/public'; const def: ActionFactoryDefinition = { id: 'ACTION_FACTORY_1', @@ -22,34 +23,94 @@ const def: ActionFactoryDefinition = { supportedTriggers: () => [], }; +const featureUsage = licensingMock.createStart().featureUsage; + +const createActionFactory = ( + defOverride: Partial = {}, + license?: Partial +) => { + return new ActionFactory( + { ...def, ...defOverride }, + { + getLicense: () => licensingMock.createLicense({ license }), + getFeatureUsageStart: () => featureUsage, + } + ); +}; + describe('License & ActionFactory', () => { test('no license requirements', async () => { - const factory = new ActionFactory(def, () => licensingMock.createLicense()); + const factory = createActionFactory(); expect(await factory.isCompatible({ triggers: [] })).toBe(true); - expect(factory.isCompatibleLicence()).toBe(true); + expect(factory.isCompatibleLicense()).toBe(true); }); test('not enough license level', async () => { - const factory = new ActionFactory({ ...def, minimalLicense: 'gold' }, () => - licensingMock.createLicense() - ); + const factory = createActionFactory({ minimalLicense: 'gold', licenseFeatureName: 'Feature' }); expect(await factory.isCompatible({ triggers: [] })).toBe(true); - expect(factory.isCompatibleLicence()).toBe(false); + expect(factory.isCompatibleLicense()).toBe(false); }); - test('licence has expired', async () => { - const factory = new ActionFactory({ ...def, minimalLicense: 'gold' }, () => - licensingMock.createLicense({ license: { type: 'gold', status: 'expired' } }) + test('license has expired', async () => { + const factory = createActionFactory( + { minimalLicense: 'gold', licenseFeatureName: 'Feature' }, + { type: 'gold', status: 'expired' } ); expect(await factory.isCompatible({ triggers: [] })).toBe(true); - expect(factory.isCompatibleLicence()).toBe(false); + expect(factory.isCompatibleLicense()).toBe(false); }); test('enough license level', async () => { - const factory = new ActionFactory({ ...def, minimalLicense: 'gold' }, () => - licensingMock.createLicense({ license: { type: 'gold' } }) + const factory = createActionFactory( + { minimalLicense: 'gold', licenseFeatureName: 'Feature' }, + { type: 'gold' } ); + expect(await factory.isCompatible({ triggers: [] })).toBe(true); - expect(factory.isCompatibleLicence()).toBe(true); + expect(factory.isCompatibleLicense()).toBe(true); + }); + + describe('licenseFeatureName', () => { + test('licenseFeatureName is required, if minimalLicense is provided', () => { + expect(() => { + createActionFactory(); + }).not.toThrow(); + + expect(() => { + createActionFactory({ minimalLicense: 'gold', licenseFeatureName: 'feature' }); + }).not.toThrow(); + + expect(() => { + createActionFactory({ minimalLicense: 'gold' }); + }).toThrow(); + }); + + test('"licenseFeatureName"', () => { + expect( + createActionFactory({ minimalLicense: 'gold', licenseFeatureName: 'feature' }) + .licenseFeatureName + ).toBe('feature'); + expect(createActionFactory().licenseFeatureName).toBeUndefined(); + }); + }); + + describe('notifyFeatureUsage', () => { + const spy = jest.spyOn(featureUsage, 'notifyUsage'); + beforeEach(() => { + spy.mockClear(); + }); + test('is not called if no license requirements', async () => { + const action = createActionFactory().create({ name: 'fake', config: {} }); + await action.execute({}); + expect(spy).not.toBeCalled(); + }); + test('is called if has license requirements', async () => { + const action = createActionFactory({ + minimalLicense: 'gold', + licenseFeatureName: 'feature', + }).create({ name: 'fake', config: {} }); + await action.execute({}); + expect(spy).toBeCalledWith('feature'); + }); }); }); diff --git a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.ts b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.ts index 35e06ab036fc9..35a82adf9896d 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory.ts @@ -13,9 +13,14 @@ import { import { ActionFactoryDefinition } from './action_factory_definition'; import { Configurable } from '../../../../../src/plugins/kibana_utils/public'; import { BaseActionFactoryContext, SerializedAction } from './types'; -import { ILicense } from '../../../licensing/public'; +import { ILicense, LicensingPluginStart } from '../../../licensing/public'; import { UiActionsActionDefinition as ActionDefinition } from '../../../../../src/plugins/ui_actions/public'; +export interface ActionFactoryDeps { + readonly getLicense: () => ILicense; + readonly getFeatureUsageStart: () => LicensingPluginStart['featureUsage']; +} + export class ActionFactory< Config extends object = object, SupportedTriggers extends TriggerId = TriggerId, @@ -31,11 +36,18 @@ export class ActionFactory< FactoryContext, ActionContext >, - protected readonly getLicence: () => ILicense - ) {} + protected readonly deps: ActionFactoryDeps + ) { + if (def.minimalLicense && !def.licenseFeatureName) { + throw new Error( + `ActionFactory [actionFactory.id = ${def.id}] "licenseFeatureName" is required, if "minimalLicense" is provided` + ); + } + } public readonly id = this.def.id; public readonly minimalLicense = this.def.minimalLicense; + public readonly licenseFeatureName = this.def.licenseFeatureName; public readonly order = this.def.order || 0; public readonly MenuItem? = this.def.MenuItem; public readonly ReactMenuItem? = this.MenuItem ? uiToReactComponent(this.MenuItem) : undefined; @@ -65,13 +77,13 @@ export class ActionFactory< } /** - * Does this action factory licence requirements + * Does this action factory license requirements * compatible with current license? */ - public isCompatibleLicence() { + public isCompatibleLicense() { if (!this.minimalLicense) return true; - const licence = this.getLicence(); - return licence.isAvailable && licence.isActive && licence.hasAtLeast(this.minimalLicense); + const license = this.deps.getLicense(); + return license.isAvailable && license.isActive && license.hasAtLeast(this.minimalLicense); } public create( @@ -81,14 +93,31 @@ export class ActionFactory< return { ...action, isCompatible: async (context: ActionContext): Promise => { - if (!this.isCompatibleLicence()) return false; + if (!this.isCompatibleLicense()) return false; if (!action.isCompatible) return true; return action.isCompatible(context); }, + execute: async (context: ActionContext): Promise => { + this.notifyFeatureUsage(); + return action.execute(context); + }, }; } public supportedTriggers(): SupportedTriggers[] { return this.def.supportedTriggers(); } + + private notifyFeatureUsage(): void { + if (!this.minimalLicense || !this.licenseFeatureName) return; + this.deps + .getFeatureUsageStart() + .notifyUsage(this.licenseFeatureName) + .catch(() => { + // eslint-disable-next-line no-console + console.warn( + `ActionFactory [actionFactory.id = ${this.def.id}] fail notify feature usage.` + ); + }); + } } diff --git a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory_definition.ts b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory_definition.ts index d79614e47ccd4..91b8c8ec1e5ef 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory_definition.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/action_factory_definition.ts @@ -34,11 +34,18 @@ export interface ActionFactoryDefinition< id: string; /** - * Minimal licence level - * Empty means no licence restrictions + * Minimal license level + * Empty means no license restrictions */ readonly minimalLicense?: LicenseType; + /** + * Required when `minimalLicense` is used. + * Is a user-facing string. Has to be unique. Doesn't need i18n. + * The feature's name will be displayed to Cloud end-users when they're billed based on their feature usage. + */ + licenseFeatureName?: string; + /** * This method should return a definition of a new action, normally used to * register it in `ui_actions` registry. diff --git a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/dynamic_action_manager.test.ts b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/dynamic_action_manager.test.ts index 0b0cd39e35e25..39d9dfeca2fd6 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/dynamic_action_manager.test.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/dynamic_actions/dynamic_action_manager.test.ts @@ -87,7 +87,9 @@ const setup = ( actions, }); const uiActionsEnhancements = new UiActionsServiceEnhancements({ - getLicenseInfo, + getLicense: getLicenseInfo, + featureUsageSetup: licensingMock.createSetup().featureUsage, + getFeatureUsageStart: () => licensingMock.createStart().featureUsage, }); const manager = new DynamicActionManager({ isCompatible, @@ -671,11 +673,13 @@ describe('DynamicActionManager', () => { const basicActionFactory: ActionFactoryDefinition = { ...actionFactoryDefinition1, minimalLicense: 'basic', + licenseFeatureName: 'Feature 1', }; const goldActionFactory: ActionFactoryDefinition = { ...actionFactoryDefinition2, minimalLicense: 'gold', + licenseFeatureName: 'Feature 2', }; uiActions.registerActionFactory(basicActionFactory); diff --git a/x-pack/plugins/ui_actions_enhanced/public/mocks.ts b/x-pack/plugins/ui_actions_enhanced/public/mocks.ts index ff07d6e74a9c0..17a6fc1b955df 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/mocks.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/mocks.ts @@ -11,6 +11,7 @@ import { embeddablePluginMock } from '../../../../src/plugins/embeddable/public/ import { AdvancedUiActionsSetup, AdvancedUiActionsStart } from '.'; import { plugin as pluginInitializer } from '.'; import { licensingMock } from '../../licensing/public/mocks'; +import { StartDependencies } from './plugin'; export type Setup = jest.Mocked; export type Start = jest.Mocked; @@ -35,7 +36,7 @@ const createStartContract = (): Start => { }; const createPlugin = ( - coreSetup: CoreSetup = coreMock.createSetup(), + coreSetup: CoreSetup = coreMock.createSetup(), coreStart: CoreStart = coreMock.createStart() ) => { const pluginInitializerContext = coreMock.createPluginInitializerContext(); @@ -47,6 +48,7 @@ const createPlugin = ( const setup = plugin.setup(coreSetup, { uiActions: uiActions.setup, embeddable: embeddable.setup, + licensing: licensingMock.createSetup(), }); return { diff --git a/x-pack/plugins/ui_actions_enhanced/public/plugin.ts b/x-pack/plugins/ui_actions_enhanced/public/plugin.ts index 5069b485b198d..015531aab9743 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/plugin.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/plugin.ts @@ -36,16 +36,17 @@ import { } from './custom_time_range_badge'; import { CommonlyUsedRange } from './types'; import { UiActionsServiceEnhancements } from './services'; -import { ILicense, LicensingPluginStart } from '../../licensing/public'; +import { ILicense, LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public'; import { createFlyoutManageDrilldowns } from './drilldowns'; -import { Storage } from '../../../../src/plugins/kibana_utils/public'; +import { createStartServicesGetter, Storage } from '../../../../src/plugins/kibana_utils/public'; interface SetupDependencies { embeddable: EmbeddableSetup; // Embeddable are needed because they register basic triggers/actions. uiActions: UiActionsSetup; + licensing: LicensingPluginSetup; } -interface StartDependencies { +export interface StartDependencies { embeddable: EmbeddableStart; uiActions: UiActionsStart; licensing: LicensingPluginStart; @@ -70,23 +71,30 @@ declare module '../../../../src/plugins/ui_actions/public' { export class AdvancedUiActionsPublicPlugin implements Plugin { - readonly licenceInfo = new BehaviorSubject(undefined); + readonly licenseInfo = new BehaviorSubject(undefined); private getLicenseInfo(): ILicense { - if (!this.licenceInfo.getValue()) { + if (!this.licenseInfo.getValue()) { throw new Error( - 'AdvancedUiActionsPublicPlugin: Licence is not ready! Licence becomes available only after setup.' + 'AdvancedUiActionsPublicPlugin: License is not ready! License becomes available only after setup.' ); } - return this.licenceInfo.getValue()!; + return this.licenseInfo.getValue()!; } - private readonly enhancements = new UiActionsServiceEnhancements({ - getLicenseInfo: () => this.getLicenseInfo(), - }); + private enhancements?: UiActionsServiceEnhancements; private subs: Subscription[] = []; constructor(initializerContext: PluginInitializerContext) {} - public setup(core: CoreSetup, { uiActions }: SetupDependencies): SetupContract { + public setup( + core: CoreSetup, + { uiActions, licensing }: SetupDependencies + ): SetupContract { + const startServices = createStartServicesGetter(core.getStartServices); + this.enhancements = new UiActionsServiceEnhancements({ + getLicense: () => this.getLicenseInfo(), + featureUsageSetup: licensing.featureUsage, + getFeatureUsageStart: () => startServices().plugins.licensing.featureUsage, + }); return { ...uiActions, ...this.enhancements, @@ -94,7 +102,7 @@ export class AdvancedUiActionsPublicPlugin } public start(core: CoreStart, { uiActions, licensing }: StartDependencies): StartContract { - this.subs.push(licensing.license$.subscribe(this.licenceInfo)); + this.subs.push(licensing.license$.subscribe(this.licenseInfo)); const dateFormat = core.uiSettings.get('dateFormat') as string; const commonlyUsedRanges = core.uiSettings.get( @@ -117,9 +125,9 @@ export class AdvancedUiActionsPublicPlugin return { ...uiActions, - ...this.enhancements, + ...this.enhancements!, FlyoutManageDrilldowns: createFlyoutManageDrilldowns({ - actionFactories: this.enhancements.getActionFactories(), + actionFactories: this.enhancements!.getActionFactories(), getTrigger: (triggerId: TriggerId) => uiActions.getTrigger(triggerId), storage: new Storage(window?.localStorage), toastService: core.notifications.toasts, diff --git a/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.test.ts b/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.test.ts index 08823833b9af2..3a0b65d2ed844 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.test.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.test.ts @@ -4,11 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { UiActionsServiceEnhancements } from './ui_actions_service_enhancements'; +import { + UiActionsServiceEnhancements, + UiActionsServiceEnhancementsParams, +} from './ui_actions_service_enhancements'; import { ActionFactoryDefinition, ActionFactory } from '../dynamic_actions'; import { licensingMock } from '../../../licensing/public/mocks'; -const getLicenseInfo = () => licensingMock.createLicense(); +const deps: UiActionsServiceEnhancementsParams = { + getLicense: () => licensingMock.createLicense(), + featureUsageSetup: licensingMock.createSetup().featureUsage, + getFeatureUsageStart: () => licensingMock.createStart().featureUsage, +}; describe('UiActionsService', () => { describe('action factories', () => { @@ -34,7 +41,7 @@ describe('UiActionsService', () => { }; test('.getActionFactories() returns empty array if no action factories registered', () => { - const service = new UiActionsServiceEnhancements({ getLicenseInfo }); + const service = new UiActionsServiceEnhancements(deps); const factories = service.getActionFactories(); @@ -42,7 +49,7 @@ describe('UiActionsService', () => { }); test('can register and retrieve an action factory', () => { - const service = new UiActionsServiceEnhancements({ getLicenseInfo }); + const service = new UiActionsServiceEnhancements(deps); service.registerActionFactory(factoryDefinition1); @@ -53,7 +60,7 @@ describe('UiActionsService', () => { }); test('can retrieve all action factories', () => { - const service = new UiActionsServiceEnhancements({ getLicenseInfo }); + const service = new UiActionsServiceEnhancements(deps); service.registerActionFactory(factoryDefinition1); service.registerActionFactory(factoryDefinition2); @@ -67,7 +74,7 @@ describe('UiActionsService', () => { }); test('throws when retrieving action factory that does not exist', () => { - const service = new UiActionsServiceEnhancements({ getLicenseInfo }); + const service = new UiActionsServiceEnhancements(deps); service.registerActionFactory(factoryDefinition1); @@ -77,7 +84,7 @@ describe('UiActionsService', () => { }); test('isCompatible from definition is used on registered factory', async () => { - const service = new UiActionsServiceEnhancements({ getLicenseInfo }); + const service = new UiActionsServiceEnhancements(deps); service.registerActionFactory({ ...factoryDefinition1, @@ -88,5 +95,27 @@ describe('UiActionsService', () => { service.getActionFactory(factoryDefinition1.id).isCompatible({ triggers: [] }) ).resolves.toBe(false); }); + + describe('registerFeature for licensing', () => { + const spy = jest.spyOn(deps.featureUsageSetup, 'register'); + beforeEach(() => { + spy.mockClear(); + }); + test('registerFeature is not called if no license requirements', () => { + const service = new UiActionsServiceEnhancements(deps); + service.registerActionFactory(factoryDefinition1); + expect(spy).not.toBeCalled(); + }); + + test('registerFeature is called if has license requirements', () => { + const service = new UiActionsServiceEnhancements(deps); + service.registerActionFactory({ + ...factoryDefinition1, + minimalLicense: 'gold', + licenseFeatureName: 'a name', + }); + expect(spy).toBeCalledWith('a name', 'gold'); + }); + }); }); }); diff --git a/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.ts b/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.ts index 9575329514835..b8086c16f5e71 100644 --- a/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.ts +++ b/x-pack/plugins/ui_actions_enhanced/public/services/ui_actions_service_enhancements.ts @@ -13,19 +13,22 @@ import { import { DrilldownDefinition } from '../drilldowns'; import { ILicense } from '../../../licensing/common/types'; import { TriggerContextMapping, TriggerId } from '../../../../../src/plugins/ui_actions/public'; +import { LicensingPluginSetup, LicensingPluginStart } from '../../../licensing/public'; export interface UiActionsServiceEnhancementsParams { readonly actionFactories?: ActionFactoryRegistry; - readonly getLicenseInfo: () => ILicense; + readonly getLicense: () => ILicense; + readonly featureUsageSetup: LicensingPluginSetup['featureUsage']; + readonly getFeatureUsageStart: () => LicensingPluginStart['featureUsage']; } export class UiActionsServiceEnhancements { protected readonly actionFactories: ActionFactoryRegistry; - protected readonly getLicenseInfo: () => ILicense; + protected readonly deps: Omit; - constructor({ actionFactories = new Map(), getLicenseInfo }: UiActionsServiceEnhancementsParams) { + constructor({ actionFactories = new Map(), ...deps }: UiActionsServiceEnhancementsParams) { this.actionFactories = actionFactories; - this.getLicenseInfo = getLicenseInfo; + this.deps = deps; } /** @@ -51,9 +54,10 @@ export class UiActionsServiceEnhancements { SupportedTriggers, FactoryContext, ActionContext - >(definition, this.getLicenseInfo); + >(definition, this.deps); this.actionFactories.set(actionFactory.id, actionFactory as ActionFactory); + this.registerFeatureUsage(definition); }; public readonly getActionFactory = (actionFactoryId: string): ActionFactory => { @@ -94,6 +98,7 @@ export class UiActionsServiceEnhancements { execute, getHref, minimalLicense, + licenseFeatureName, supportedTriggers, isCompatible, }: DrilldownDefinition): void => { @@ -105,6 +110,7 @@ export class UiActionsServiceEnhancements { > = { id: factoryId, minimalLicense, + licenseFeatureName, order, CollectConfig, createConfig, @@ -128,4 +134,19 @@ export class UiActionsServiceEnhancements { this.registerActionFactory(actionFactory); }; + + private registerFeatureUsage = (definition: ActionFactoryDefinition): void => { + if (!definition.minimalLicense || !definition.licenseFeatureName) return; + + // Intentionally don't wait for response because + // happens in setup phase and has to be sync + this.deps.featureUsageSetup + .register(definition.licenseFeatureName, definition.minimalLicense) + .catch(() => { + // eslint-disable-next-line no-console + console.warn( + `ActionFactory [actionFactory.id = ${definition.id}] fail to register feature for featureUsage.` + ); + }); + }; } From 3e077973e8a5b73c03ab149d74c13ae2b9a700f3 Mon Sep 17 00:00:00 2001 From: Dario Gieselaar Date: Wed, 2 Sep 2020 16:11:12 +0200 Subject: [PATCH 07/21] [APM] Immediately return terms for unbound queries (#74543) Co-authored-by: Elastic Machine --- .../apm/server/lib/environments/get_all_environments.ts | 5 +++++ .../lib/helpers/create_es_client/call_client_with_debug.ts | 2 +- .../agent_configuration/__snapshots__/queries.test.ts.snap | 2 ++ .../lib/settings/agent_configuration/get_service_names.ts | 2 ++ 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/x-pack/plugins/apm/server/lib/environments/get_all_environments.ts b/x-pack/plugins/apm/server/lib/environments/get_all_environments.ts index 29aaa98169fa5..8060bf10da99c 100644 --- a/x-pack/plugins/apm/server/lib/environments/get_all_environments.ts +++ b/x-pack/plugins/apm/server/lib/environments/get_all_environments.ts @@ -37,6 +37,9 @@ export async function getAllEnvironments({ ], }, body: { + // use timeout + min_doc_count to return as early as possible + // if filter is not defined to prevent timeouts + ...(!serviceName ? { timeout: '1ms' } : {}), size: 0, query: { bool: { @@ -48,6 +51,7 @@ export async function getAllEnvironments({ terms: { field: SERVICE_ENVIRONMENT, size: 100, + ...(!serviceName ? { min_doc_count: 0 } : {}), missing: includeMissing ? ENVIRONMENT_NOT_DEFINED.value : undefined, }, }, @@ -56,6 +60,7 @@ export async function getAllEnvironments({ }; const resp = await apmEventClient.search(params); + const environments = resp.aggregations?.environments.buckets.map( (bucket) => bucket.key as string diff --git a/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_client_with_debug.ts b/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_client_with_debug.ts index c475640595227..9f7aaafbefb8c 100644 --- a/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_client_with_debug.ts +++ b/x-pack/plugins/apm/server/lib/helpers/create_es_client/call_client_with_debug.ts @@ -34,7 +34,7 @@ export async function callClientWithDebug({ let res: any; let esError = null; try { - res = apiCaller(operationName, params); + res = await apiCaller(operationName, params); } catch (e) { // catch error and throw after outputting debug info esError = e; diff --git a/x-pack/plugins/apm/server/lib/settings/agent_configuration/__snapshots__/queries.test.ts.snap b/x-pack/plugins/apm/server/lib/settings/agent_configuration/__snapshots__/queries.test.ts.snap index 2b465a0f87475..c01e5c87eeea2 100644 --- a/x-pack/plugins/apm/server/lib/settings/agent_configuration/__snapshots__/queries.test.ts.snap +++ b/x-pack/plugins/apm/server/lib/settings/agent_configuration/__snapshots__/queries.test.ts.snap @@ -127,11 +127,13 @@ Object { "services": Object { "terms": Object { "field": "service.name", + "min_doc_count": 0, "size": 50, }, }, }, "size": 0, + "timeout": "1ms", }, } `; diff --git a/x-pack/plugins/apm/server/lib/settings/agent_configuration/get_service_names.ts b/x-pack/plugins/apm/server/lib/settings/agent_configuration/get_service_names.ts index 8b6c1d82beab0..91bdfeef003f1 100644 --- a/x-pack/plugins/apm/server/lib/settings/agent_configuration/get_service_names.ts +++ b/x-pack/plugins/apm/server/lib/settings/agent_configuration/get_service_names.ts @@ -25,12 +25,14 @@ export async function getServiceNames({ setup }: { setup: Setup }) { ], }, body: { + timeout: '1ms', size: 0, aggs: { services: { terms: { field: SERVICE_NAME, size: 50, + min_doc_count: 0, }, }, }, From 01e442081f666322a99266f443b2651228306cf4 Mon Sep 17 00:00:00 2001 From: Wylie Conlon Date: Wed, 2 Sep 2020 10:34:37 -0400 Subject: [PATCH 08/21] [Visualize] Remove alias icon for Lens and Maps (#76418) * [Visualize] Remove alias icon for Lens and Maps * Fix snapshots * Fix translations --- .../__snapshots__/new_vis_modal.test.tsx.snap | 140 +----------------- .../wizard/type_selection/type_selection.tsx | 42 ++---- .../translations/translations/ja-JP.json | 2 - .../translations/translations/zh-CN.json | 2 - 4 files changed, 16 insertions(+), 170 deletions(-) diff --git a/src/plugins/visualizations/public/wizard/__snapshots__/new_vis_modal.test.tsx.snap b/src/plugins/visualizations/public/wizard/__snapshots__/new_vis_modal.test.tsx.snap index 6aed16e937713..3c4c983efa9fa 100644 --- a/src/plugins/visualizations/public/wizard/__snapshots__/new_vis_modal.test.tsx.snap +++ b/src/plugins/visualizations/public/wizard/__snapshots__/new_vis_modal.test.tsx.snap @@ -248,7 +248,7 @@ exports[`NewVisModal filter for visualization types should render as expected 1`