(settings, 'server.xsrf.whitelist').length > 0
+ ) {
+ log(
+ 'It is not recommended to disable xsrf protections for API endpoints via [server.xsrf.whitelist]. ' +
+ 'It will be removed in 8.0 release. Instead, supply the "kbn-xsrf" header.'
+ );
+ }
+ return settings;
+};
+
const rewriteBasePathDeprecation: ConfigDeprecation = (settings, fromPath, log) => {
if (has(settings, 'server.basePath') && !has(settings, 'server.rewriteBasePath')) {
log(
@@ -177,4 +190,5 @@ export const coreDeprecationProvider: ConfigDeprecationProvider = ({
rewriteBasePathDeprecation,
cspRulesDeprecation,
mapManifestServiceUrlDeprecation,
+ xsrfDeprecation,
];
diff --git a/src/core/server/http/http_server.mocks.ts b/src/core/server/http/http_server.mocks.ts
index 0a9541393284..741c723ca936 100644
--- a/src/core/server/http/http_server.mocks.ts
+++ b/src/core/server/http/http_server.mocks.ts
@@ -29,6 +29,7 @@ import {
RouteMethod,
KibanaResponseFactory,
RouteValidationSpec,
+ KibanaRouteState,
} from './router';
import { OnPreResponseToolkit } from './lifecycle/on_pre_response';
import { OnPostAuthToolkit } from './lifecycle/on_post_auth';
@@ -43,6 +44,7 @@ interface RequestFixtureOptions {
method?: RouteMethod;
socket?: Socket;
routeTags?: string[];
+ kibanaRouteState?: KibanaRouteState;
routeAuthRequired?: false;
validation?: {
params?: RouteValidationSpec
;
@@ -62,6 +64,7 @@ function createKibanaRequestMock
({
routeTags,
routeAuthRequired,
validation = {},
+ kibanaRouteState = { xsrfRequired: true },
}: RequestFixtureOptions
= {}) {
const queryString = stringify(query, { sort: false });
@@ -80,7 +83,7 @@ function createKibanaRequestMock
({
search: queryString ? `?${queryString}` : queryString,
},
route: {
- settings: { tags: routeTags, auth: routeAuthRequired },
+ settings: { tags: routeTags, auth: routeAuthRequired, app: kibanaRouteState },
},
raw: {
req: { socket },
@@ -109,6 +112,7 @@ function createRawRequestMock(customization: DeepPartial = {}) {
return merge(
{},
{
+ app: { xsrfRequired: true } as any,
headers: {},
path: '/',
route: { settings: {} },
diff --git a/src/core/server/http/http_server.test.ts b/src/core/server/http/http_server.test.ts
index a9fc80c86d87..27db79bb94d2 100644
--- a/src/core/server/http/http_server.test.ts
+++ b/src/core/server/http/http_server.test.ts
@@ -811,6 +811,7 @@ test('exposes route details of incoming request to a route handler', async () =>
path: '/',
options: {
authRequired: true,
+ xsrfRequired: false,
tags: [],
},
});
@@ -923,6 +924,7 @@ test('exposes route details of incoming request to a route handler (POST + paylo
path: '/',
options: {
authRequired: true,
+ xsrfRequired: true,
tags: [],
body: {
parse: true, // hapi populates the default
diff --git a/src/core/server/http/http_server.ts b/src/core/server/http/http_server.ts
index 025ab2bf56ac..cffdffab0d0c 100644
--- a/src/core/server/http/http_server.ts
+++ b/src/core/server/http/http_server.ts
@@ -27,7 +27,7 @@ import { adoptToHapiOnPostAuthFormat, OnPostAuthHandler } from './lifecycle/on_p
import { adoptToHapiOnPreAuthFormat, OnPreAuthHandler } from './lifecycle/on_pre_auth';
import { adoptToHapiOnPreResponseFormat, OnPreResponseHandler } from './lifecycle/on_pre_response';
-import { IRouter } from './router';
+import { IRouter, KibanaRouteState, isSafeMethod } from './router';
import {
SessionStorageCookieOptions,
createCookieSessionStorageFactory,
@@ -147,9 +147,14 @@ export class HttpServer {
for (const route of router.getRoutes()) {
this.log.debug(`registering route handler for [${route.path}]`);
// Hapi does not allow payload validation to be specified for 'head' or 'get' requests
- const validate = ['head', 'get'].includes(route.method) ? undefined : { payload: true };
+ const validate = isSafeMethod(route.method) ? undefined : { payload: true };
const { authRequired = true, tags, body = {} } = route.options;
const { accepts: allow, maxBytes, output, parse } = body;
+
+ const kibanaRouteState: KibanaRouteState = {
+ xsrfRequired: route.options.xsrfRequired ?? !isSafeMethod(route.method),
+ };
+
this.server.route({
handler: route.handler,
method: route.method,
@@ -157,6 +162,7 @@ export class HttpServer {
options: {
// Enforcing the comparison with true because plugins could overwrite the auth strategy by doing `options: { authRequired: authStrategy as any }`
auth: authRequired === true ? undefined : false,
+ app: kibanaRouteState,
tags: tags ? Array.from(tags) : undefined,
// TODO: This 'validate' section can be removed once the legacy platform is completely removed.
// We are telling Hapi that NP routes can accept any payload, so that it can bypass the default
diff --git a/src/core/server/http/index.ts b/src/core/server/http/index.ts
index d31afe1670e4..8f4c02680f8a 100644
--- a/src/core/server/http/index.ts
+++ b/src/core/server/http/index.ts
@@ -58,6 +58,8 @@ export {
RouteValidationError,
RouteValidatorFullConfig,
RouteValidationResultFactory,
+ DestructiveRouteMethod,
+ SafeRouteMethod,
} from './router';
export { BasePathProxyServer } from './base_path_proxy_server';
export { OnPreAuthHandler, OnPreAuthToolkit } from './lifecycle/on_pre_auth';
diff --git a/src/core/server/http/integration_tests/lifecycle_handlers.test.ts b/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
index f4c5f16870c7..b5364c616f17 100644
--- a/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
+++ b/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
@@ -36,6 +36,7 @@ const versionHeader = 'kbn-version';
const xsrfHeader = 'kbn-xsrf';
const nameHeader = 'kbn-name';
const whitelistedTestPath = '/xsrf/test/route/whitelisted';
+const xsrfDisabledTestPath = '/xsrf/test/route/disabled';
const kibanaName = 'my-kibana-name';
const setupDeps = {
context: contextServiceMock.createSetupContract(),
@@ -188,6 +189,12 @@ describe('core lifecycle handlers', () => {
return res.ok({ body: 'ok' });
}
);
+ ((router as any)[method.toLowerCase()] as RouteRegistrar)(
+ { path: xsrfDisabledTestPath, validate: false, options: { xsrfRequired: false } },
+ (context, req, res) => {
+ return res.ok({ body: 'ok' });
+ }
+ );
});
await server.start();
@@ -235,6 +242,10 @@ describe('core lifecycle handlers', () => {
it('accepts whitelisted requests without either an xsrf or version header', async () => {
await getSupertest(method.toLowerCase(), whitelistedTestPath).expect(200, 'ok');
});
+
+ it('accepts requests on a route with disabled xsrf protection', async () => {
+ await getSupertest(method.toLowerCase(), xsrfDisabledTestPath).expect(200, 'ok');
+ });
});
});
});
diff --git a/src/core/server/http/lifecycle_handlers.test.ts b/src/core/server/http/lifecycle_handlers.test.ts
index 48a6973b741b..a80e432e0d4c 100644
--- a/src/core/server/http/lifecycle_handlers.test.ts
+++ b/src/core/server/http/lifecycle_handlers.test.ts
@@ -24,7 +24,7 @@ import {
} from './lifecycle_handlers';
import { httpServerMock } from './http_server.mocks';
import { HttpConfig } from './http_config';
-import { KibanaRequest, RouteMethod } from './router';
+import { KibanaRequest, RouteMethod, KibanaRouteState } from './router';
const createConfig = (partial: Partial): HttpConfig => partial as HttpConfig;
@@ -32,12 +32,14 @@ const forgeRequest = ({
headers = {},
path = '/',
method = 'get',
+ kibanaRouteState,
}: Partial<{
headers: Record;
path: string;
method: RouteMethod;
+ kibanaRouteState: KibanaRouteState;
}>): KibanaRequest => {
- return httpServerMock.createKibanaRequest({ headers, path, method });
+ return httpServerMock.createKibanaRequest({ headers, path, method, kibanaRouteState });
};
describe('xsrf post-auth handler', () => {
@@ -142,6 +144,29 @@ describe('xsrf post-auth handler', () => {
expect(toolkit.next).toHaveBeenCalledTimes(1);
expect(result).toEqual('next');
});
+
+ it('accepts requests if xsrf protection on a route is disabled', () => {
+ const config = createConfig({
+ xsrf: { whitelist: [], disableProtection: false },
+ });
+ const handler = createXsrfPostAuthHandler(config);
+ const request = forgeRequest({
+ method: 'post',
+ headers: {},
+ path: '/some-path',
+ kibanaRouteState: {
+ xsrfRequired: false,
+ },
+ });
+
+ toolkit.next.mockReturnValue('next' as any);
+
+ const result = handler(request, responseFactory, toolkit);
+
+ expect(responseFactory.badRequest).not.toHaveBeenCalled();
+ expect(toolkit.next).toHaveBeenCalledTimes(1);
+ expect(result).toEqual('next');
+ });
});
});
diff --git a/src/core/server/http/lifecycle_handlers.ts b/src/core/server/http/lifecycle_handlers.ts
index ee877ee031a2..7ef7e8632603 100644
--- a/src/core/server/http/lifecycle_handlers.ts
+++ b/src/core/server/http/lifecycle_handlers.ts
@@ -20,6 +20,7 @@
import { OnPostAuthHandler } from './lifecycle/on_post_auth';
import { OnPreResponseHandler } from './lifecycle/on_pre_response';
import { HttpConfig } from './http_config';
+import { isSafeMethod } from './router';
import { Env } from '../config';
import { LifecycleRegistrar } from './http_server';
@@ -31,15 +32,18 @@ export const createXsrfPostAuthHandler = (config: HttpConfig): OnPostAuthHandler
const { whitelist, disableProtection } = config.xsrf;
return (request, response, toolkit) => {
- if (disableProtection || whitelist.includes(request.route.path)) {
+ if (
+ disableProtection ||
+ whitelist.includes(request.route.path) ||
+ request.route.options.xsrfRequired === false
+ ) {
return toolkit.next();
}
- const isSafeMethod = request.route.method === 'get' || request.route.method === 'head';
const hasVersionHeader = VERSION_HEADER in request.headers;
const hasXsrfHeader = XSRF_HEADER in request.headers;
- if (!isSafeMethod && !hasVersionHeader && !hasXsrfHeader) {
+ if (!isSafeMethod(request.route.method) && !hasVersionHeader && !hasXsrfHeader) {
return response.badRequest({ body: `Request must contain a ${XSRF_HEADER} header.` });
}
diff --git a/src/core/server/http/router/index.ts b/src/core/server/http/router/index.ts
index 32663d1513f3..d254f391ca5e 100644
--- a/src/core/server/http/router/index.ts
+++ b/src/core/server/http/router/index.ts
@@ -24,16 +24,20 @@ export {
KibanaRequestEvents,
KibanaRequestRoute,
KibanaRequestRouteOptions,
+ KibanaRouteState,
isRealRequest,
LegacyRequest,
ensureRawRequest,
} from './request';
export {
+ DestructiveRouteMethod,
+ isSafeMethod,
RouteMethod,
RouteConfig,
RouteConfigOptions,
RouteContentType,
RouteConfigOptionsBody,
+ SafeRouteMethod,
validBodyOutput,
} from './route';
export { HapiResponseAdapter } from './response_adapter';
diff --git a/src/core/server/http/router/request.ts b/src/core/server/http/router/request.ts
index 703571ba53c0..bb2db6367f70 100644
--- a/src/core/server/http/router/request.ts
+++ b/src/core/server/http/router/request.ts
@@ -18,18 +18,24 @@
*/
import { Url } from 'url';
-import { Request } from 'hapi';
+import { Request, ApplicationState } from 'hapi';
import { Observable, fromEvent, merge } from 'rxjs';
import { shareReplay, first, takeUntil } from 'rxjs/operators';
import { deepFreeze, RecursiveReadonly } from '../../../utils';
import { Headers } from './headers';
-import { RouteMethod, RouteConfigOptions, validBodyOutput } from './route';
+import { RouteMethod, RouteConfigOptions, validBodyOutput, isSafeMethod } from './route';
import { KibanaSocket, IKibanaSocket } from './socket';
import { RouteValidator, RouteValidatorFullConfig } from './validator';
const requestSymbol = Symbol('request');
+/**
+ * @internal
+ */
+export interface KibanaRouteState extends ApplicationState {
+ xsrfRequired: boolean;
+}
/**
* Route options: If 'GET' or 'OPTIONS' method, body options won't be returned.
* @public
@@ -184,8 +190,10 @@ export class KibanaRequest<
const options = ({
authRequired: request.route.settings.auth !== false,
+ // some places in LP call KibanaRequest.from(request) manually. remove fallback to true before v8
+ xsrfRequired: (request.route.settings.app as KibanaRouteState)?.xsrfRequired ?? true,
tags: request.route.settings.tags || [],
- body: ['get', 'options'].includes(method)
+ body: isSafeMethod(method)
? undefined
: {
parse,
diff --git a/src/core/server/http/router/route.ts b/src/core/server/http/router/route.ts
index 4439a80b1eac..d1458ef4ad06 100644
--- a/src/core/server/http/router/route.ts
+++ b/src/core/server/http/router/route.ts
@@ -19,11 +19,27 @@
import { RouteValidatorFullConfig } from './validator';
+export function isSafeMethod(method: RouteMethod): method is SafeRouteMethod {
+ return method === 'get' || method === 'options';
+}
+
+/**
+ * Set of HTTP methods changing the state of the server.
+ * @public
+ */
+export type DestructiveRouteMethod = 'post' | 'put' | 'delete' | 'patch';
+
+/**
+ * Set of HTTP methods not changing the state of the server.
+ * @public
+ */
+export type SafeRouteMethod = 'get' | 'options';
+
/**
* The set of common HTTP methods supported by Kibana routing.
* @public
*/
-export type RouteMethod = 'get' | 'post' | 'put' | 'delete' | 'patch' | 'options';
+export type RouteMethod = SafeRouteMethod | DestructiveRouteMethod;
/**
* The set of valid body.output
@@ -108,6 +124,15 @@ export interface RouteConfigOptions {
*/
authRequired?: boolean;
+ /**
+ * Defines xsrf protection requirements for a route:
+ * - true. Requires an incoming POST/PUT/DELETE request to contain `kbn-xsrf` header.
+ * - false. Disables xsrf protection.
+ *
+ * Set to true by default
+ */
+ xsrfRequired?: Method extends 'get' ? never : boolean;
+
/**
* Additional metadata tag strings to attach to the route.
*/
diff --git a/src/core/server/index.ts b/src/core/server/index.ts
index e45d4f28edcc..7d856ae10117 100644
--- a/src/core/server/index.ts
+++ b/src/core/server/index.ts
@@ -159,6 +159,8 @@ export {
SessionStorageCookieOptions,
SessionCookieValidationResult,
SessionStorageFactory,
+ DestructiveRouteMethod,
+ SafeRouteMethod,
} from './http';
export { RenderingServiceSetup, IRenderOptions } from './rendering';
export { Logger, LoggerFactory, LogMeta, LogRecord, LogLevel } from './logging';
@@ -229,6 +231,9 @@ export {
SavedObjectsType,
SavedObjectMigrationMap,
SavedObjectMigrationFn,
+ exportSavedObjectsToStream,
+ importSavedObjectsFromStream,
+ resolveSavedObjectsImportErrors,
} from './saved_objects';
export {
@@ -245,6 +250,14 @@ export {
StringValidationRegexString,
} from './ui_settings';
+export {
+ OpsMetrics,
+ OpsOsMetrics,
+ OpsServerMetrics,
+ OpsProcessMetrics,
+ MetricsServiceSetup,
+} from './metrics';
+
export { RecursiveReadonly } from '../utils';
export {
diff --git a/src/core/server/internal_types.ts b/src/core/server/internal_types.ts
index ff68d1544d11..37d1061dc618 100644
--- a/src/core/server/internal_types.ts
+++ b/src/core/server/internal_types.ts
@@ -30,6 +30,7 @@ import {
} from './saved_objects';
import { InternalUiSettingsServiceSetup, InternalUiSettingsServiceStart } from './ui_settings';
import { UuidServiceSetup } from './uuid';
+import { InternalMetricsServiceSetup } from './metrics';
/** @internal */
export interface InternalCoreSetup {
@@ -40,6 +41,7 @@ export interface InternalCoreSetup {
uiSettings: InternalUiSettingsServiceSetup;
savedObjects: InternalSavedObjectsServiceSetup;
uuid: UuidServiceSetup;
+ metrics: InternalMetricsServiceSetup;
}
/**
diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts
index 46436461505c..50468db8a504 100644
--- a/src/core/server/legacy/legacy_service.test.ts
+++ b/src/core/server/legacy/legacy_service.test.ts
@@ -43,6 +43,7 @@ import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.
import { capabilitiesServiceMock } from '../capabilities/capabilities_service.mock';
import { setupMock as renderingServiceMock } from '../rendering/__mocks__/rendering_service';
import { uuidServiceMock } from '../uuid/uuid_service.mock';
+import { metricsServiceMock } from '../metrics/metrics_service.mock';
import { findLegacyPluginSpecs } from './plugins';
import { LegacyVars, LegacyServiceSetupDeps, LegacyServiceStartDeps } from './types';
import { LegacyService } from './legacy_service';
@@ -93,6 +94,7 @@ beforeEach(() => {
},
},
rendering: renderingServiceMock,
+ metrics: metricsServiceMock.createInternalSetupContract(),
uuid: uuidSetup,
},
plugins: { 'plugin-id': 'plugin-value' },
diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts
index 44f77b5ad215..ca83a287c57e 100644
--- a/src/core/server/legacy/legacy_service.ts
+++ b/src/core/server/legacy/legacy_service.ts
@@ -300,6 +300,7 @@ export class LegacyService implements CoreService {
setClientFactoryProvider: setupDeps.core.savedObjects.setClientFactoryProvider,
addClientWrapper: setupDeps.core.savedObjects.addClientWrapper,
registerType: setupDeps.core.savedObjects.registerType,
+ getImportExportObjectLimit: setupDeps.core.savedObjects.getImportExportObjectLimit,
},
uiSettings: {
register: setupDeps.core.uiSettings.register,
diff --git a/src/legacy/core_plugins/visualizations/public/legacy_imports.ts b/src/core/server/metrics/collectors/index.ts
similarity index 76%
rename from src/legacy/core_plugins/visualizations/public/legacy_imports.ts
rename to src/core/server/metrics/collectors/index.ts
index 0a3b1938436c..f58ab02e6388 100644
--- a/src/legacy/core_plugins/visualizations/public/legacy_imports.ts
+++ b/src/core/server/metrics/collectors/index.ts
@@ -17,11 +17,7 @@
* under the License.
*/
-export {
- IAggConfig,
- IAggConfigs,
- isDateHistogramBucketAggConfig,
- setBounds,
-} from '../../data/public';
-export { createAggConfigs } from 'ui/agg_types';
-export { createSavedSearchesLoader } from '../../../../plugins/discover/public';
+export { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics, MetricsCollector } from './types';
+export { OsMetricsCollector } from './os';
+export { ProcessMetricsCollector } from './process';
+export { ServerMetricsCollector } from './server';
diff --git a/src/core/server/metrics/collectors/os.test.ts b/src/core/server/metrics/collectors/os.test.ts
new file mode 100644
index 000000000000..7d5a6da90b7d
--- /dev/null
+++ b/src/core/server/metrics/collectors/os.test.ts
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+jest.mock('getos', () => (cb: Function) => cb(null, { dist: 'distrib', release: 'release' }));
+
+import os from 'os';
+import { OsMetricsCollector } from './os';
+
+describe('OsMetricsCollector', () => {
+ let collector: OsMetricsCollector;
+
+ beforeEach(() => {
+ collector = new OsMetricsCollector();
+ });
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ });
+
+ it('collects platform info from the os package', async () => {
+ const platform = 'darwin';
+ const release = '10.14.1';
+
+ jest.spyOn(os, 'platform').mockImplementation(() => platform);
+ jest.spyOn(os, 'release').mockImplementation(() => release);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.platform).toBe(platform);
+ expect(metrics.platformRelease).toBe(`${platform}-${release}`);
+ });
+
+ it('collects distribution info when platform is linux', async () => {
+ const platform = 'linux';
+
+ jest.spyOn(os, 'platform').mockImplementation(() => platform);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.distro).toBe('distrib');
+ expect(metrics.distroRelease).toBe('distrib-release');
+ });
+
+ it('collects memory info from the os package', async () => {
+ const totalMemory = 1457886;
+ const freeMemory = 456786;
+
+ jest.spyOn(os, 'totalmem').mockImplementation(() => totalMemory);
+ jest.spyOn(os, 'freemem').mockImplementation(() => freeMemory);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.memory.total_in_bytes).toBe(totalMemory);
+ expect(metrics.memory.free_in_bytes).toBe(freeMemory);
+ expect(metrics.memory.used_in_bytes).toBe(totalMemory - freeMemory);
+ });
+
+ it('collects uptime info from the os package', async () => {
+ const uptime = 325;
+
+ jest.spyOn(os, 'uptime').mockImplementation(() => uptime);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.uptime_in_millis).toBe(uptime * 1000);
+ });
+
+ it('collects load info from the os package', async () => {
+ const oneMinLoad = 1;
+ const fiveMinLoad = 2;
+ const fifteenMinLoad = 3;
+
+ jest.spyOn(os, 'loadavg').mockImplementation(() => [oneMinLoad, fiveMinLoad, fifteenMinLoad]);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.load).toEqual({
+ '1m': oneMinLoad,
+ '5m': fiveMinLoad,
+ '15m': fifteenMinLoad,
+ });
+ });
+});
diff --git a/src/core/server/metrics/collectors/os.ts b/src/core/server/metrics/collectors/os.ts
new file mode 100644
index 000000000000..d3d9bb0be86f
--- /dev/null
+++ b/src/core/server/metrics/collectors/os.ts
@@ -0,0 +1,60 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import os from 'os';
+import getosAsync, { LinuxOs } from 'getos';
+import { promisify } from 'util';
+import { OpsOsMetrics, MetricsCollector } from './types';
+
+const getos = promisify(getosAsync);
+
+export class OsMetricsCollector implements MetricsCollector {
+ public async collect(): Promise {
+ const platform = os.platform();
+ const load = os.loadavg();
+
+ const metrics: OpsOsMetrics = {
+ platform,
+ platformRelease: `${platform}-${os.release()}`,
+ load: {
+ '1m': load[0],
+ '5m': load[1],
+ '15m': load[2],
+ },
+ memory: {
+ total_in_bytes: os.totalmem(),
+ free_in_bytes: os.freemem(),
+ used_in_bytes: os.totalmem() - os.freemem(),
+ },
+ uptime_in_millis: os.uptime() * 1000,
+ };
+
+ if (platform === 'linux') {
+ try {
+ const distro = (await getos()) as LinuxOs;
+ metrics.distro = distro.dist;
+ metrics.distroRelease = `${distro.dist}-${distro.release}`;
+ } catch (e) {
+ // ignore errors
+ }
+ }
+
+ return metrics;
+ }
+}
diff --git a/src/core/server/metrics/collectors/process.test.ts b/src/core/server/metrics/collectors/process.test.ts
new file mode 100644
index 000000000000..a437d799371f
--- /dev/null
+++ b/src/core/server/metrics/collectors/process.test.ts
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import v8, { HeapInfo } from 'v8';
+import { ProcessMetricsCollector } from './process';
+
+describe('ProcessMetricsCollector', () => {
+ let collector: ProcessMetricsCollector;
+
+ beforeEach(() => {
+ collector = new ProcessMetricsCollector();
+ });
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ });
+
+ it('collects pid from the process', async () => {
+ const metrics = await collector.collect();
+
+ expect(metrics.pid).toEqual(process.pid);
+ });
+
+ it('collects event loop delay', async () => {
+ const metrics = await collector.collect();
+
+ expect(metrics.event_loop_delay).toBeGreaterThan(0);
+ });
+
+ it('collects uptime info from the process', async () => {
+ const uptime = 58986;
+ jest.spyOn(process, 'uptime').mockImplementation(() => uptime);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.uptime_in_millis).toEqual(uptime * 1000);
+ });
+
+ it('collects memory info from the process', async () => {
+ const heapTotal = 58986;
+ const heapUsed = 4688;
+ const heapSizeLimit = 5788;
+ const rss = 5865;
+ jest.spyOn(process, 'memoryUsage').mockImplementation(() => ({
+ rss,
+ heapTotal,
+ heapUsed,
+ external: 0,
+ }));
+
+ jest.spyOn(v8, 'getHeapStatistics').mockImplementation(
+ () =>
+ ({
+ heap_size_limit: heapSizeLimit,
+ } as HeapInfo)
+ );
+
+ const metrics = await collector.collect();
+
+ expect(metrics.memory.heap.total_in_bytes).toEqual(heapTotal);
+ expect(metrics.memory.heap.used_in_bytes).toEqual(heapUsed);
+ expect(metrics.memory.heap.size_limit).toEqual(heapSizeLimit);
+ expect(metrics.memory.resident_set_size_in_bytes).toEqual(rss);
+ });
+});
diff --git a/src/core/server/metrics/collectors/process.ts b/src/core/server/metrics/collectors/process.ts
new file mode 100644
index 000000000000..aa68abaf74e4
--- /dev/null
+++ b/src/core/server/metrics/collectors/process.ts
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import v8 from 'v8';
+import { Bench } from 'hoek';
+import { OpsProcessMetrics, MetricsCollector } from './types';
+
+export class ProcessMetricsCollector implements MetricsCollector {
+ public async collect(): Promise {
+ const heapStats = v8.getHeapStatistics();
+ const memoryUsage = process.memoryUsage();
+ const [eventLoopDelay] = await Promise.all([getEventLoopDelay()]);
+ return {
+ memory: {
+ heap: {
+ total_in_bytes: memoryUsage.heapTotal,
+ used_in_bytes: memoryUsage.heapUsed,
+ size_limit: heapStats.heap_size_limit,
+ },
+ resident_set_size_in_bytes: memoryUsage.rss,
+ },
+ pid: process.pid,
+ event_loop_delay: eventLoopDelay,
+ uptime_in_millis: process.uptime() * 1000,
+ };
+ }
+}
+
+const getEventLoopDelay = (): Promise => {
+ const bench = new Bench();
+ return new Promise(resolve => {
+ setImmediate(() => {
+ return resolve(bench.elapsed());
+ });
+ });
+};
diff --git a/src/core/server/metrics/collectors/server.ts b/src/core/server/metrics/collectors/server.ts
new file mode 100644
index 000000000000..e46ac2f653df
--- /dev/null
+++ b/src/core/server/metrics/collectors/server.ts
@@ -0,0 +1,80 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { ResponseObject, Server as HapiServer } from 'hapi';
+import { OpsServerMetrics, MetricsCollector } from './types';
+
+interface ServerResponseTime {
+ count: number;
+ total: number;
+ max: number;
+}
+
+export class ServerMetricsCollector implements MetricsCollector {
+ private readonly requests: OpsServerMetrics['requests'] = {
+ disconnects: 0,
+ total: 0,
+ statusCodes: {},
+ };
+ private readonly responseTimes: ServerResponseTime = {
+ count: 0,
+ total: 0,
+ max: 0,
+ };
+
+ constructor(private readonly server: HapiServer) {
+ this.server.ext('onRequest', (request, h) => {
+ this.requests.total++;
+ request.events.once('disconnect', () => {
+ this.requests.disconnects++;
+ });
+ return h.continue;
+ });
+ this.server.events.on('response', request => {
+ const statusCode = (request.response as ResponseObject)?.statusCode;
+ if (statusCode) {
+ if (!this.requests.statusCodes[statusCode]) {
+ this.requests.statusCodes[statusCode] = 0;
+ }
+ this.requests.statusCodes[statusCode]++;
+ }
+
+ const duration = Date.now() - request.info.received;
+ this.responseTimes.count++;
+ this.responseTimes.total += duration;
+ this.responseTimes.max = Math.max(this.responseTimes.max, duration);
+ });
+ }
+
+ public async collect(): Promise {
+ const connections = await new Promise(resolve => {
+ this.server.listener.getConnections((_, count) => {
+ resolve(count);
+ });
+ });
+
+ return {
+ requests: this.requests,
+ response_times: {
+ avg_in_millis: this.responseTimes.total / Math.max(this.responseTimes.count, 1),
+ max_in_millis: this.responseTimes.max,
+ },
+ concurrent_connections: connections,
+ };
+ }
+}
diff --git a/src/core/server/metrics/collectors/types.ts b/src/core/server/metrics/collectors/types.ts
new file mode 100644
index 000000000000..5a83bc70af3c
--- /dev/null
+++ b/src/core/server/metrics/collectors/types.ts
@@ -0,0 +1,110 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/** Base interface for all metrics gatherers */
+export interface MetricsCollector {
+ collect(): Promise;
+}
+
+/**
+ * Process related metrics
+ * @public
+ */
+export interface OpsProcessMetrics {
+ /** process memory usage */
+ memory: {
+ /** heap memory usage */
+ heap: {
+ /** total heap available */
+ total_in_bytes: number;
+ /** used heap */
+ used_in_bytes: number;
+ /** v8 heap size limit */
+ size_limit: number;
+ };
+ /** node rss */
+ resident_set_size_in_bytes: number;
+ };
+ /** node event loop delay */
+ event_loop_delay: number;
+ /** pid of the kibana process */
+ pid: number;
+ /** uptime of the kibana process */
+ uptime_in_millis: number;
+}
+
+/**
+ * OS related metrics
+ * @public
+ */
+export interface OpsOsMetrics {
+ /** The os platform */
+ platform: NodeJS.Platform;
+ /** The os platform release, prefixed by the platform name */
+ platformRelease: string;
+ /** The os distrib. Only present for linux platforms */
+ distro?: string;
+ /** The os distrib release, prefixed by the os distrib. Only present for linux platforms */
+ distroRelease?: string;
+ /** cpu load metrics */
+ load: {
+ /** load for last minute */
+ '1m': number;
+ /** load for last 5 minutes */
+ '5m': number;
+ /** load for last 15 minutes */
+ '15m': number;
+ };
+ /** system memory usage metrics */
+ memory: {
+ /** total memory available */
+ total_in_bytes: number;
+ /** current free memory */
+ free_in_bytes: number;
+ /** current used memory */
+ used_in_bytes: number;
+ };
+ /** the OS uptime */
+ uptime_in_millis: number;
+}
+
+/**
+ * server related metrics
+ * @public
+ */
+export interface OpsServerMetrics {
+ /** server response time stats */
+ response_times: {
+ /** average response time */
+ avg_in_millis: number;
+ /** maximum response time */
+ max_in_millis: number;
+ };
+ /** server requests stats */
+ requests: {
+ /** number of disconnected requests since startup */
+ disconnects: number;
+ /** total number of requests handled since startup */
+ total: number;
+ /** number of request handled per response status code */
+ statusCodes: Record;
+ };
+ /** number of current concurrent connections to the server */
+ concurrent_connections: number;
+}
diff --git a/src/core/server/metrics/index.ts b/src/core/server/metrics/index.ts
new file mode 100644
index 000000000000..fdcf637c0cd7
--- /dev/null
+++ b/src/core/server/metrics/index.ts
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export {
+ InternalMetricsServiceStart,
+ InternalMetricsServiceSetup,
+ MetricsServiceSetup,
+ MetricsServiceStart,
+ OpsMetrics,
+} from './types';
+export { OpsProcessMetrics, OpsServerMetrics, OpsOsMetrics } from './collectors';
+export { MetricsService } from './metrics_service';
+export { opsConfig } from './ops_config';
diff --git a/src/core/server/metrics/integration_tests/server_collector.test.ts b/src/core/server/metrics/integration_tests/server_collector.test.ts
new file mode 100644
index 000000000000..6baf95894b9b
--- /dev/null
+++ b/src/core/server/metrics/integration_tests/server_collector.test.ts
@@ -0,0 +1,203 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { BehaviorSubject, Subject } from 'rxjs';
+import { take, filter } from 'rxjs/operators';
+import supertest from 'supertest';
+import { Server as HapiServer } from 'hapi';
+import { createHttpServer } from '../../http/test_utils';
+import { HttpService, IRouter } from '../../http';
+import { contextServiceMock } from '../../context/context_service.mock';
+import { ServerMetricsCollector } from '../collectors/server';
+
+const requestWaitDelay = 25;
+
+describe('ServerMetricsCollector', () => {
+ let server: HttpService;
+ let collector: ServerMetricsCollector;
+ let hapiServer: HapiServer;
+ let router: IRouter;
+
+ const delay = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
+ const sendGet = (path: string) => supertest(hapiServer.listener).get(path);
+
+ beforeEach(async () => {
+ server = createHttpServer();
+ const contextSetup = contextServiceMock.createSetupContract();
+ const httpSetup = await server.setup({ context: contextSetup });
+ hapiServer = httpSetup.server;
+ router = httpSetup.createRouter('/');
+ collector = new ServerMetricsCollector(hapiServer);
+ });
+
+ afterEach(async () => {
+ await server.stop();
+ });
+
+ it('collect requests infos', async () => {
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ let metrics = await collector.collect();
+
+ expect(metrics.requests).toEqual({
+ total: 0,
+ disconnects: 0,
+ statusCodes: {},
+ });
+
+ await sendGet('/');
+ await sendGet('/');
+ await sendGet('/not-found');
+
+ metrics = await collector.collect();
+
+ expect(metrics.requests).toEqual({
+ total: 3,
+ disconnects: 0,
+ statusCodes: {
+ '200': 2,
+ '404': 1,
+ },
+ });
+ });
+
+ it('collect disconnects requests infos', async () => {
+ const never = new Promise(resolve => undefined);
+ const hitSubject = new BehaviorSubject(0);
+
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/disconnect', validate: false }, async (ctx, req, res) => {
+ hitSubject.next(hitSubject.value + 1);
+ await never;
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ await sendGet('/');
+ const discoReq1 = sendGet('/disconnect').end();
+ const discoReq2 = sendGet('/disconnect').end();
+
+ await hitSubject
+ .pipe(
+ filter(count => count >= 2),
+ take(1)
+ )
+ .toPromise();
+
+ let metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 0,
+ })
+ );
+
+ discoReq1.abort();
+ await delay(requestWaitDelay);
+
+ metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 1,
+ })
+ );
+
+ discoReq2.abort();
+ await delay(requestWaitDelay);
+
+ metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 2,
+ })
+ );
+ });
+
+ it('collect response times', async () => {
+ router.get({ path: '/no-delay', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/500-ms', validate: false }, async (ctx, req, res) => {
+ await delay(500);
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/250-ms', validate: false }, async (ctx, req, res) => {
+ await delay(250);
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ await Promise.all([sendGet('/no-delay'), sendGet('/250-ms')]);
+ let metrics = await collector.collect();
+
+ expect(metrics.response_times.avg_in_millis).toBeGreaterThanOrEqual(125);
+ expect(metrics.response_times.max_in_millis).toBeGreaterThanOrEqual(250);
+
+ await Promise.all([sendGet('/500-ms'), sendGet('/500-ms')]);
+ metrics = await collector.collect();
+
+ expect(metrics.response_times.avg_in_millis).toBeGreaterThanOrEqual(250);
+ expect(metrics.response_times.max_in_millis).toBeGreaterThanOrEqual(500);
+ });
+
+ it('collect connection count', async () => {
+ const waitSubject = new Subject();
+ const hitSubject = new BehaviorSubject(0);
+
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ hitSubject.next(hitSubject.value + 1);
+ await waitSubject.pipe(take(1)).toPromise();
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ const waitForHits = (hits: number) =>
+ hitSubject
+ .pipe(
+ filter(count => count >= hits),
+ take(1)
+ )
+ .toPromise();
+
+ let metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(0);
+
+ sendGet('/').end(() => null);
+ await waitForHits(1);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(1);
+
+ sendGet('/').end(() => null);
+ await waitForHits(2);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(2);
+
+ waitSubject.next('go');
+ await delay(requestWaitDelay);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(0);
+ });
+});
diff --git a/src/core/server/metrics/metrics_service.mock.ts b/src/core/server/metrics/metrics_service.mock.ts
new file mode 100644
index 000000000000..cc53a4e27d57
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.mock.ts
@@ -0,0 +1,67 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { MetricsService } from './metrics_service';
+import {
+ InternalMetricsServiceSetup,
+ InternalMetricsServiceStart,
+ MetricsServiceSetup,
+ MetricsServiceStart,
+} from './types';
+
+const createSetupContractMock = () => {
+ const setupContract: jest.Mocked = {
+ getOpsMetrics$: jest.fn(),
+ };
+ return setupContract;
+};
+
+const createInternalSetupContractMock = () => {
+ const setupContract: jest.Mocked = createSetupContractMock();
+ return setupContract;
+};
+
+const createStartContractMock = () => {
+ const startContract: jest.Mocked = {};
+ return startContract;
+};
+
+const createInternalStartContractMock = () => {
+ const startContract: jest.Mocked = createStartContractMock();
+ return startContract;
+};
+
+type MetricsServiceContract = PublicMethodsOf;
+
+const createMock = () => {
+ const mocked: jest.Mocked = {
+ setup: jest.fn().mockReturnValue(createInternalSetupContractMock()),
+ start: jest.fn().mockReturnValue(createInternalStartContractMock()),
+ stop: jest.fn(),
+ };
+ return mocked;
+};
+
+export const metricsServiceMock = {
+ create: createMock,
+ createSetupContract: createSetupContractMock,
+ createStartContract: createStartContractMock,
+ createInternalSetupContract: createInternalSetupContractMock,
+ createInternalStartContract: createInternalStartContractMock,
+};
diff --git a/src/core/server/metrics/metrics_service.test.mocks.ts b/src/core/server/metrics/metrics_service.test.mocks.ts
new file mode 100644
index 000000000000..8e9177528304
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.test.mocks.ts
@@ -0,0 +1,25 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const mockOpsCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./ops_metrics_collector', () => ({
+ OpsMetricsCollector: jest.fn().mockImplementation(() => mockOpsCollector),
+}));
diff --git a/src/core/server/metrics/metrics_service.test.ts b/src/core/server/metrics/metrics_service.test.ts
new file mode 100644
index 000000000000..10d6761adbe7
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.test.ts
@@ -0,0 +1,134 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import moment from 'moment';
+import { mockOpsCollector } from './metrics_service.test.mocks';
+import { MetricsService } from './metrics_service';
+import { mockCoreContext } from '../core_context.mock';
+import { configServiceMock } from '../config/config_service.mock';
+import { httpServiceMock } from '../http/http_service.mock';
+import { take } from 'rxjs/operators';
+
+const testInterval = 100;
+
+const dummyMetrics = { metricA: 'value', metricB: 'otherValue' };
+
+describe('MetricsService', () => {
+ const httpMock = httpServiceMock.createSetupContract();
+ let metricsService: MetricsService;
+
+ beforeEach(() => {
+ jest.useFakeTimers();
+
+ const configService = configServiceMock.create({
+ atPath: { interval: moment.duration(testInterval) },
+ });
+ const coreContext = mockCoreContext.create({ configService });
+ metricsService = new MetricsService(coreContext);
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ jest.clearAllTimers();
+ });
+
+ describe('#start', () => {
+ it('invokes setInterval with the configured interval', async () => {
+ await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ expect(setInterval).toHaveBeenCalledTimes(1);
+ expect(setInterval).toHaveBeenCalledWith(expect.any(Function), testInterval);
+ });
+
+ it('emits the metrics at start', async () => {
+ mockOpsCollector.collect.mockResolvedValue(dummyMetrics);
+
+ const { getOpsMetrics$ } = await metricsService.setup({
+ http: httpMock,
+ });
+
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+ expect(
+ await getOpsMetrics$()
+ .pipe(take(1))
+ .toPromise()
+ ).toEqual(dummyMetrics);
+ });
+
+ it('collects the metrics at every interval', async () => {
+ mockOpsCollector.collect.mockResolvedValue(dummyMetrics);
+
+ await metricsService.setup({ http: httpMock });
+
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(3);
+ });
+
+ it('throws when called before setup', async () => {
+ await expect(metricsService.start()).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"#setup() needs to be run first"`
+ );
+ });
+ });
+
+ describe('#stop', () => {
+ it('stops the metrics interval', async () => {
+ const { getOpsMetrics$ } = await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ await metricsService.stop();
+ jest.advanceTimersByTime(10 * testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ getOpsMetrics$().subscribe({ complete: () => {} });
+ });
+
+ it('completes the metrics observable', async () => {
+ const { getOpsMetrics$ } = await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ let completed = false;
+
+ getOpsMetrics$().subscribe({
+ complete: () => {
+ completed = true;
+ },
+ });
+
+ await metricsService.stop();
+
+ expect(completed).toEqual(true);
+ });
+ });
+});
diff --git a/src/core/server/metrics/metrics_service.ts b/src/core/server/metrics/metrics_service.ts
new file mode 100644
index 000000000000..1aed89a4aad6
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.ts
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { ReplaySubject } from 'rxjs';
+import { first, shareReplay } from 'rxjs/operators';
+import { CoreService } from '../../types';
+import { CoreContext } from '../core_context';
+import { Logger } from '../logging';
+import { InternalHttpServiceSetup } from '../http';
+import { InternalMetricsServiceSetup, InternalMetricsServiceStart, OpsMetrics } from './types';
+import { OpsMetricsCollector } from './ops_metrics_collector';
+import { opsConfig, OpsConfigType } from './ops_config';
+
+interface MetricsServiceSetupDeps {
+ http: InternalHttpServiceSetup;
+}
+
+/** @internal */
+export class MetricsService
+ implements CoreService {
+ private readonly logger: Logger;
+ private metricsCollector?: OpsMetricsCollector;
+ private collectInterval?: NodeJS.Timeout;
+ private metrics$ = new ReplaySubject(1);
+
+ constructor(private readonly coreContext: CoreContext) {
+ this.logger = coreContext.logger.get('metrics');
+ }
+
+ public async setup({ http }: MetricsServiceSetupDeps): Promise {
+ this.metricsCollector = new OpsMetricsCollector(http.server);
+
+ const metricsObservable = this.metrics$.pipe(shareReplay(1));
+
+ return {
+ getOpsMetrics$: () => metricsObservable,
+ };
+ }
+
+ public async start(): Promise {
+ if (!this.metricsCollector) {
+ throw new Error('#setup() needs to be run first');
+ }
+ const config = await this.coreContext.configService
+ .atPath(opsConfig.path)
+ .pipe(first())
+ .toPromise();
+
+ await this.refreshMetrics();
+
+ this.collectInterval = setInterval(() => {
+ this.refreshMetrics();
+ }, config.interval.asMilliseconds());
+
+ return {};
+ }
+
+ private async refreshMetrics() {
+ this.logger.debug('Refreshing metrics');
+ const metrics = await this.metricsCollector!.collect();
+ this.metrics$.next(metrics);
+ }
+
+ public async stop() {
+ if (this.collectInterval) {
+ clearInterval(this.collectInterval);
+ }
+ this.metrics$.complete();
+ }
+}
diff --git a/src/legacy/core_plugins/kibana/public/home/index.ts b/src/core/server/metrics/ops_config.ts
similarity index 69%
rename from src/legacy/core_plugins/kibana/public/home/index.ts
rename to src/core/server/metrics/ops_config.ts
index 74b6da33c654..bd6ae5cc5474 100644
--- a/src/legacy/core_plugins/kibana/public/home/index.ts
+++ b/src/core/server/metrics/ops_config.ts
@@ -17,13 +17,13 @@
* under the License.
*/
-import { PluginInitializerContext } from 'kibana/public';
-import { npSetup, npStart } from 'ui/new_platform';
-import { HomePlugin } from './plugin';
+import { schema, TypeOf } from '@kbn/config-schema';
-const instance = new HomePlugin({
- env: npSetup.plugins.kibanaLegacy.env,
-} as PluginInitializerContext);
-instance.setup(npSetup.core, npSetup.plugins);
+export const opsConfig = {
+ path: 'ops',
+ schema: schema.object({
+ interval: schema.duration({ defaultValue: '5s' }),
+ }),
+};
-instance.start(npStart.core, npStart.plugins);
+export type OpsConfigType = TypeOf;
diff --git a/src/core/server/metrics/ops_metrics_collector.test.mocks.ts b/src/core/server/metrics/ops_metrics_collector.test.mocks.ts
new file mode 100644
index 000000000000..8265796d5797
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.test.mocks.ts
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const mockOsCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/os', () => ({
+ OsMetricsCollector: jest.fn().mockImplementation(() => mockOsCollector),
+}));
+
+export const mockProcessCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/process', () => ({
+ ProcessMetricsCollector: jest.fn().mockImplementation(() => mockProcessCollector),
+}));
+
+export const mockServerCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/server', () => ({
+ ServerMetricsCollector: jest.fn().mockImplementation(() => mockServerCollector),
+}));
diff --git a/src/core/server/metrics/ops_metrics_collector.test.ts b/src/core/server/metrics/ops_metrics_collector.test.ts
new file mode 100644
index 000000000000..04302a195fb6
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.test.ts
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ mockOsCollector,
+ mockProcessCollector,
+ mockServerCollector,
+} from './ops_metrics_collector.test.mocks';
+import { httpServiceMock } from '../http/http_service.mock';
+import { OpsMetricsCollector } from './ops_metrics_collector';
+
+describe('OpsMetricsCollector', () => {
+ let collector: OpsMetricsCollector;
+
+ beforeEach(() => {
+ const hapiServer = httpServiceMock.createSetupContract().server;
+ collector = new OpsMetricsCollector(hapiServer);
+
+ mockOsCollector.collect.mockResolvedValue('osMetrics');
+ });
+
+ it('gathers metrics from the underlying collectors', async () => {
+ mockOsCollector.collect.mockResolvedValue('osMetrics');
+ mockProcessCollector.collect.mockResolvedValue('processMetrics');
+ mockServerCollector.collect.mockResolvedValue({
+ requests: 'serverRequestsMetrics',
+ response_times: 'serverTimingMetrics',
+ });
+
+ const metrics = await collector.collect();
+
+ expect(mockOsCollector.collect).toHaveBeenCalledTimes(1);
+ expect(mockProcessCollector.collect).toHaveBeenCalledTimes(1);
+ expect(mockServerCollector.collect).toHaveBeenCalledTimes(1);
+
+ expect(metrics).toEqual({
+ process: 'processMetrics',
+ os: 'osMetrics',
+ requests: 'serverRequestsMetrics',
+ response_times: 'serverTimingMetrics',
+ });
+ });
+});
diff --git a/src/core/server/metrics/ops_metrics_collector.ts b/src/core/server/metrics/ops_metrics_collector.ts
new file mode 100644
index 000000000000..04344f21f57f
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.ts
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Server as HapiServer } from 'hapi';
+import {
+ ProcessMetricsCollector,
+ OsMetricsCollector,
+ ServerMetricsCollector,
+ MetricsCollector,
+} from './collectors';
+import { OpsMetrics } from './types';
+
+export class OpsMetricsCollector implements MetricsCollector {
+ private readonly processCollector: ProcessMetricsCollector;
+ private readonly osCollector: OsMetricsCollector;
+ private readonly serverCollector: ServerMetricsCollector;
+
+ constructor(server: HapiServer) {
+ this.processCollector = new ProcessMetricsCollector();
+ this.osCollector = new OsMetricsCollector();
+ this.serverCollector = new ServerMetricsCollector(server);
+ }
+
+ public async collect(): Promise {
+ const [process, os, server] = await Promise.all([
+ this.processCollector.collect(),
+ this.osCollector.collect(),
+ this.serverCollector.collect(),
+ ]);
+ return {
+ process,
+ os,
+ ...server,
+ };
+ }
+}
diff --git a/src/core/server/metrics/types.ts b/src/core/server/metrics/types.ts
new file mode 100644
index 000000000000..5c8f18fff380
--- /dev/null
+++ b/src/core/server/metrics/types.ts
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Observable } from 'rxjs';
+import { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics } from './collectors';
+
+/**
+ * APIs to retrieves metrics gathered and exposed by the core platform.
+ *
+ * @public
+ */
+export interface MetricsServiceSetup {
+ /**
+ * Retrieve an observable emitting the {@link OpsMetrics} gathered.
+ * The observable will emit an initial value during core's `start` phase, and a new value every fixed interval of time,
+ * based on the `opts.interval` configuration property.
+ *
+ * @example
+ * ```ts
+ * core.metrics.getOpsMetrics$().subscribe(metrics => {
+ * // do something with the metrics
+ * })
+ * ```
+ */
+ getOpsMetrics$: () => Observable;
+}
+// eslint-disable-next-line @typescript-eslint/no-empty-interface
+export interface MetricsServiceStart {}
+
+export type InternalMetricsServiceSetup = MetricsServiceSetup;
+export type InternalMetricsServiceStart = MetricsServiceStart;
+
+/**
+ * Regroups metrics gathered by all the collectors.
+ * This contains metrics about the os/runtime, the kibana process and the http server.
+ *
+ * @public
+ */
+export interface OpsMetrics {
+ /** Process related metrics */
+ process: OpsProcessMetrics;
+ /** OS related metrics */
+ os: OpsOsMetrics;
+ /** server response time stats */
+ response_times: OpsServerMetrics['response_times'];
+ /** server requests stats */
+ requests: OpsServerMetrics['requests'];
+ /** number of current concurrent connections to the server */
+ concurrent_connections: OpsServerMetrics['concurrent_connections'];
+}
diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts
index 96b28ab5827e..037f3bbed67e 100644
--- a/src/core/server/mocks.ts
+++ b/src/core/server/mocks.ts
@@ -30,6 +30,8 @@ import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
import { SharedGlobalConfig } from './plugins';
import { InternalCoreSetup, InternalCoreStart } from './internal_types';
import { capabilitiesServiceMock } from './capabilities/capabilities_service.mock';
+import { metricsServiceMock } from './metrics/metrics_service.mock';
+import { uuidServiceMock } from './uuid/uuid_service.mock';
export { httpServerMock } from './http/http_server.mocks';
export { sessionStorageMock } from './http/cookie_session_storage.mocks';
@@ -40,7 +42,7 @@ export { loggingServiceMock } from './logging/logging_service.mock';
export { savedObjectsRepositoryMock } from './saved_objects/service/lib/repository.mock';
export { typeRegistryMock as savedObjectsTypeRegistryMock } from './saved_objects/saved_objects_type_registry.mock';
export { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
-import { uuidServiceMock } from './uuid/uuid_service.mock';
+export { metricsServiceMock } from './metrics/metrics_service.mock';
export function pluginInitializerContextConfigMock(config: T) {
const globalConfig: SharedGlobalConfig = {
@@ -153,6 +155,7 @@ function createInternalCoreSetupMock() {
uiSettings: uiSettingsServiceMock.createSetupContract(),
savedObjects: savedObjectsServiceMock.createInternalSetupContract(),
uuid: uuidServiceMock.createSetupContract(),
+ metrics: metricsServiceMock.createInternalSetupContract(),
};
return setupDeps;
}
diff --git a/src/core/server/plugins/plugin_context.ts b/src/core/server/plugins/plugin_context.ts
index a8a16713f69a..f2a44e9f78d4 100644
--- a/src/core/server/plugins/plugin_context.ts
+++ b/src/core/server/plugins/plugin_context.ts
@@ -170,6 +170,7 @@ export function createPluginSetupContext(
setClientFactoryProvider: deps.savedObjects.setClientFactoryProvider,
addClientWrapper: deps.savedObjects.addClientWrapper,
registerType: deps.savedObjects.registerType,
+ getImportExportObjectLimit: deps.savedObjects.getImportExportObjectLimit,
},
uiSettings: {
register: deps.uiSettings.register,
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
index 1088478add13..32485f461f59 100644
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
+++ b/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
@@ -17,7 +17,7 @@
* under the License.
*/
-import { getSortedObjectsForExport } from './get_sorted_objects_for_export';
+import { exportSavedObjectsToStream } from './get_sorted_objects_for_export';
import { savedObjectsClientMock } from '../service/saved_objects_client.mock';
import { Readable } from 'stream';
import { createPromiseFromStreams, createConcatStream } from '../../../../legacy/utils/streams';
@@ -65,7 +65,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -151,7 +151,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -210,7 +210,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -297,7 +297,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -385,7 +385,7 @@ describe('getSortedObjectsForExport()', () => {
page: 0,
});
await expect(
- getSortedObjectsForExport({
+ exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 1,
types: ['index-pattern', 'search'],
@@ -425,7 +425,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
types: ['index-pattern'],
@@ -489,7 +489,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
objects: [
@@ -587,7 +587,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
objects: [
@@ -681,7 +681,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
};
- await expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ await expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Can't export more than 1 objects"`
);
});
@@ -694,7 +694,7 @@ describe('getSortedObjectsForExport()', () => {
objects: undefined,
};
- expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Either \`type\` or \`objects\` are required."`
);
});
@@ -707,7 +707,7 @@ describe('getSortedObjectsForExport()', () => {
search: 'foo',
};
- expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Can't specify both \\"search\\" and \\"objects\\" properties when exporting"`
);
});
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
index 4b4cf1146aca..a703c9f9fbd9 100644
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
+++ b/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
@@ -124,7 +124,13 @@ async function fetchObjectsToExport({
}
}
-export async function getSortedObjectsForExport({
+/**
+ * Generates sorted saved object stream to be used for export.
+ * See the {@link SavedObjectsExportOptions | options} for more detailed information.
+ *
+ * @public
+ */
+export async function exportSavedObjectsToStream({
types,
objects,
search,
diff --git a/src/core/server/saved_objects/export/index.ts b/src/core/server/saved_objects/export/index.ts
index 7533b8e50003..37824cceb18c 100644
--- a/src/core/server/saved_objects/export/index.ts
+++ b/src/core/server/saved_objects/export/index.ts
@@ -18,7 +18,7 @@
*/
export {
- getSortedObjectsForExport,
+ exportSavedObjectsToStream,
SavedObjectsExportOptions,
SavedObjectsExportResultDetails,
} from './get_sorted_objects_for_export';
diff --git a/src/core/server/saved_objects/import/import_saved_objects.test.ts b/src/core/server/saved_objects/import/import_saved_objects.test.ts
index f0719cbf4c82..b43e5063c13e 100644
--- a/src/core/server/saved_objects/import/import_saved_objects.test.ts
+++ b/src/core/server/saved_objects/import/import_saved_objects.test.ts
@@ -19,7 +19,7 @@
import { Readable } from 'stream';
import { SavedObject } from '../types';
-import { importSavedObjects } from './import_saved_objects';
+import { importSavedObjectsFromStream } from './import_saved_objects';
import { savedObjectsClientMock } from '../../mocks';
const emptyResponse = {
@@ -76,7 +76,7 @@ describe('importSavedObjects()', () => {
this.push(null);
},
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 1,
overwrite: false,
@@ -103,7 +103,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -186,7 +186,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -270,7 +270,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: true,
@@ -362,7 +362,7 @@ describe('importSavedObjects()', () => {
references: [],
})),
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -460,7 +460,7 @@ describe('importSavedObjects()', () => {
},
],
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -536,7 +536,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 5,
overwrite: false,
diff --git a/src/core/server/saved_objects/import/import_saved_objects.ts b/src/core/server/saved_objects/import/import_saved_objects.ts
index ef3b4a214c2c..cb1d70e5c8dc 100644
--- a/src/core/server/saved_objects/import/import_saved_objects.ts
+++ b/src/core/server/saved_objects/import/import_saved_objects.ts
@@ -26,7 +26,13 @@ import {
} from './types';
import { validateReferences } from './validate_references';
-export async function importSavedObjects({
+/**
+ * Import saved objects from given stream. See the {@link SavedObjectsImportOptions | options} for more
+ * detailed information.
+ *
+ * @public
+ */
+export async function importSavedObjectsFromStream({
readStream,
objectLimit,
overwrite,
diff --git a/src/core/server/saved_objects/import/index.ts b/src/core/server/saved_objects/import/index.ts
index 95fa8aa192f3..e268e970b94a 100644
--- a/src/core/server/saved_objects/import/index.ts
+++ b/src/core/server/saved_objects/import/index.ts
@@ -17,8 +17,8 @@
* under the License.
*/
-export { importSavedObjects } from './import_saved_objects';
-export { resolveImportErrors } from './resolve_import_errors';
+export { importSavedObjectsFromStream } from './import_saved_objects';
+export { resolveSavedObjectsImportErrors } from './resolve_import_errors';
export {
SavedObjectsImportResponse,
SavedObjectsImportError,
diff --git a/src/core/server/saved_objects/import/resolve_import_errors.test.ts b/src/core/server/saved_objects/import/resolve_import_errors.test.ts
index c522d76f1ff0..2c6d89e0a0a4 100644
--- a/src/core/server/saved_objects/import/resolve_import_errors.test.ts
+++ b/src/core/server/saved_objects/import/resolve_import_errors.test.ts
@@ -19,7 +19,7 @@
import { Readable } from 'stream';
import { SavedObject } from '../types';
-import { resolveImportErrors } from './resolve_import_errors';
+import { resolveSavedObjectsImportErrors } from './resolve_import_errors';
import { savedObjectsClientMock } from '../../mocks';
describe('resolveImportErrors()', () => {
@@ -80,7 +80,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: [],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [],
@@ -107,7 +107,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValueOnce({
saved_objects: savedObjects.filter(obj => obj.type === 'visualization' && obj.id === '3'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -168,7 +168,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'index-pattern' && obj.id === '1'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -230,7 +230,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'dashboard' && obj.id === '4'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -312,7 +312,7 @@ describe('resolveImportErrors()', () => {
references: [],
})),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: savedObjects.map(obj => ({
@@ -415,7 +415,7 @@ describe('resolveImportErrors()', () => {
},
],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 2,
retries: [
@@ -503,7 +503,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: [],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 5,
retries: [
@@ -547,7 +547,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'index-pattern' && obj.id === '1'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
diff --git a/src/core/server/saved_objects/import/resolve_import_errors.ts b/src/core/server/saved_objects/import/resolve_import_errors.ts
index 6f56f283b4ae..d9ac56788257 100644
--- a/src/core/server/saved_objects/import/resolve_import_errors.ts
+++ b/src/core/server/saved_objects/import/resolve_import_errors.ts
@@ -27,7 +27,13 @@ import {
} from './types';
import { validateReferences } from './validate_references';
-export async function resolveImportErrors({
+/**
+ * Resolve and return saved object import errors.
+ * See the {@link SavedObjectsResolveImportErrorsOptions | options} for more detailed informations.
+ *
+ * @public
+ */
+export async function resolveSavedObjectsImportErrors({
readStream,
objectLimit,
retries,
diff --git a/src/core/server/saved_objects/import/types.ts b/src/core/server/saved_objects/import/types.ts
index 44046378a7b9..067579f54eda 100644
--- a/src/core/server/saved_objects/import/types.ts
+++ b/src/core/server/saved_objects/import/types.ts
@@ -107,11 +107,17 @@ export interface SavedObjectsImportResponse {
* @public
*/
export interface SavedObjectsImportOptions {
+ /** The stream of {@link SavedObject | saved objects} to import */
readStream: Readable;
+ /** The maximum number of object to import */
objectLimit: number;
+ /** if true, will override existing object if present */
overwrite: boolean;
+ /** {@link SavedObjectsClientContract | client} to use to perform the import operation */
savedObjectsClient: SavedObjectsClientContract;
+ /** the list of allowed types to import */
supportedTypes: string[];
+ /** if specified, will import in given namespace, else will import as global object */
namespace?: string;
}
@@ -120,10 +126,16 @@ export interface SavedObjectsImportOptions {
* @public
*/
export interface SavedObjectsResolveImportErrorsOptions {
+ /** The stream of {@link SavedObject | saved objects} to resolve errors from */
readStream: Readable;
+ /** The maximum number of object to import */
objectLimit: number;
+ /** client to use to perform the import operation */
savedObjectsClient: SavedObjectsClientContract;
+ /** saved object import references to retry */
retries: SavedObjectsImportRetry[];
+ /** the list of allowed types to import */
supportedTypes: string[];
+ /** if specified, will import in given namespace */
namespace?: string;
}
diff --git a/src/core/server/saved_objects/index.ts b/src/core/server/saved_objects/index.ts
index 9bfe65802825..661c6cbb79e5 100644
--- a/src/core/server/saved_objects/index.ts
+++ b/src/core/server/saved_objects/index.ts
@@ -26,7 +26,7 @@ export { SavedObjectsManagement } from './management';
export * from './import';
export {
- getSortedObjectsForExport,
+ exportSavedObjectsToStream,
SavedObjectsExportOptions,
SavedObjectsExportResultDetails,
} from './export';
diff --git a/src/core/server/saved_objects/routes/export.ts b/src/core/server/saved_objects/routes/export.ts
index ab287332d8a6..04d310681aec 100644
--- a/src/core/server/saved_objects/routes/export.ts
+++ b/src/core/server/saved_objects/routes/export.ts
@@ -26,7 +26,7 @@ import {
} from '../../../../legacy/utils/streams';
import { IRouter } from '../../http';
import { SavedObjectConfig } from '../saved_objects_config';
-import { getSortedObjectsForExport } from '../export';
+import { exportSavedObjectsToStream } from '../export';
export const registerExportRoute = (
router: IRouter,
@@ -67,7 +67,7 @@ export const registerExportRoute = (
router.handleLegacyErrors(async (context, req, res) => {
const savedObjectsClient = context.core.savedObjects.client;
const { type, objects, search, excludeExportDetails, includeReferencesDeep } = req.body;
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
types: typeof type === 'string' ? [type] : type,
search,
diff --git a/src/core/server/saved_objects/routes/import.ts b/src/core/server/saved_objects/routes/import.ts
index e3f249dca05f..313e84c0b301 100644
--- a/src/core/server/saved_objects/routes/import.ts
+++ b/src/core/server/saved_objects/routes/import.ts
@@ -21,7 +21,7 @@ import { Readable } from 'stream';
import { extname } from 'path';
import { schema } from '@kbn/config-schema';
import { IRouter } from '../../http';
-import { importSavedObjects } from '../import';
+import { importSavedObjectsFromStream } from '../import';
import { SavedObjectConfig } from '../saved_objects_config';
import { createSavedObjectsStreamFromNdJson } from './utils';
@@ -65,7 +65,7 @@ export const registerImportRoute = (
return res.badRequest({ body: `Invalid file extension ${fileExtension}` });
}
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
supportedTypes,
savedObjectsClient: context.core.savedObjects.client,
readStream: createSavedObjectsStreamFromNdJson(file),
diff --git a/src/core/server/saved_objects/routes/integration_tests/export.test.ts b/src/core/server/saved_objects/routes/integration_tests/export.test.ts
index b52a8957176c..a81079b6825d 100644
--- a/src/core/server/saved_objects/routes/integration_tests/export.test.ts
+++ b/src/core/server/saved_objects/routes/integration_tests/export.test.ts
@@ -18,7 +18,7 @@
*/
jest.mock('../../export', () => ({
- getSortedObjectsForExport: jest.fn(),
+ exportSavedObjectsToStream: jest.fn(),
}));
import * as exportMock from '../../export';
@@ -30,7 +30,7 @@ import { registerExportRoute } from '../export';
import { setupServer } from './test_utils';
type setupServerReturn = UnwrapPromise>;
-const getSortedObjectsForExport = exportMock.getSortedObjectsForExport as jest.Mock;
+const exportSavedObjectsToStream = exportMock.exportSavedObjectsToStream as jest.Mock;
const allowedTypes = ['index-pattern', 'search'];
const config = {
maxImportPayloadBytes: 10485760,
@@ -76,7 +76,7 @@ describe('POST /api/saved_objects/_export', () => {
],
},
];
- getSortedObjectsForExport.mockResolvedValueOnce(createListStream(sortedObjects));
+ exportSavedObjectsToStream.mockResolvedValueOnce(createListStream(sortedObjects));
const result = await supertest(httpSetup.server.listener)
.post('/api/saved_objects/_export')
@@ -96,7 +96,7 @@ describe('POST /api/saved_objects/_export', () => {
const objects = (result.text as string).split('\n').map(row => JSON.parse(row));
expect(objects).toEqual(sortedObjects);
- expect(getSortedObjectsForExport.mock.calls[0][0]).toEqual(
+ expect(exportSavedObjectsToStream.mock.calls[0][0]).toEqual(
expect.objectContaining({
excludeExportDetails: false,
exportSizeLimit: 10000,
diff --git a/src/core/server/saved_objects/routes/resolve_import_errors.ts b/src/core/server/saved_objects/routes/resolve_import_errors.ts
index efa7add7951b..a10a19ba1d8f 100644
--- a/src/core/server/saved_objects/routes/resolve_import_errors.ts
+++ b/src/core/server/saved_objects/routes/resolve_import_errors.ts
@@ -21,7 +21,7 @@ import { extname } from 'path';
import { Readable } from 'stream';
import { schema } from '@kbn/config-schema';
import { IRouter } from '../../http';
-import { resolveImportErrors } from '../import';
+import { resolveSavedObjectsImportErrors } from '../import';
import { SavedObjectConfig } from '../saved_objects_config';
import { createSavedObjectsStreamFromNdJson } from './utils';
@@ -75,7 +75,7 @@ export const registerResolveImportErrorsRoute = (
if (fileExtension !== '.ndjson') {
return res.badRequest({ body: `Invalid file extension ${fileExtension}` });
}
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
supportedTypes,
savedObjectsClient: context.core.savedObjects.client,
readStream: createSavedObjectsStreamFromNdJson(file),
diff --git a/src/core/server/saved_objects/saved_objects_service.mock.ts b/src/core/server/saved_objects/saved_objects_service.mock.ts
index cbdff1632453..9fe32b14e645 100644
--- a/src/core/server/saved_objects/saved_objects_service.mock.ts
+++ b/src/core/server/saved_objects/saved_objects_service.mock.ts
@@ -64,8 +64,11 @@ const createSetupContractMock = () => {
setClientFactoryProvider: jest.fn(),
addClientWrapper: jest.fn(),
registerType: jest.fn(),
+ getImportExportObjectLimit: jest.fn(),
};
+ setupContract.getImportExportObjectLimit.mockReturnValue(100);
+
return setupContract;
};
diff --git a/src/core/server/saved_objects/saved_objects_service.ts b/src/core/server/saved_objects/saved_objects_service.ts
index 62e25ad5fb45..89f7990c771c 100644
--- a/src/core/server/saved_objects/saved_objects_service.ts
+++ b/src/core/server/saved_objects/saved_objects_service.ts
@@ -154,6 +154,11 @@ export interface SavedObjectsServiceSetup {
* This API is the single entry point to register saved object types in the new platform.
*/
registerType: (type: SavedObjectsType) => void;
+
+ /**
+ * Returns the maximum number of objects allowed for import or export operations.
+ */
+ getImportExportObjectLimit: () => number;
}
/**
@@ -344,6 +349,7 @@ export class SavedObjectsService
}
this.typeRegistry.registerType(type);
},
+ getImportExportObjectLimit: () => this.config!.maxImportExportSize,
};
}
diff --git a/src/core/server/saved_objects/types.ts b/src/core/server/saved_objects/types.ts
index 495d896ad12c..c9c672d0f8b1 100644
--- a/src/core/server/saved_objects/types.ts
+++ b/src/core/server/saved_objects/types.ts
@@ -62,7 +62,6 @@ export interface SavedObjectsMigrationVersion {
}
/**
- *
* @public
*/
export interface SavedObject {
diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md
index 42bc1ce214b1..6b0d962aedcd 100644
--- a/src/core/server/server.api.md
+++ b/src/core/server/server.api.md
@@ -685,6 +685,9 @@ export interface DeprecationSettings {
message: string;
}
+// @public
+export type DestructiveRouteMethod = 'post' | 'put' | 'delete' | 'patch';
+
// @public
export interface DiscoveredPlugin {
readonly configPath: ConfigPath;
@@ -763,6 +766,9 @@ export interface ErrorHttpResponseOptions {
headers?: ResponseHeaders;
}
+// @public
+export function exportSavedObjectsToStream({ types, objects, search, savedObjectsClient, exportSizeLimit, includeReferencesDeep, excludeExportDetails, namespace, }: SavedObjectsExportOptions): Promise;
+
// @public
export interface FakeRequest {
headers: Headers;
@@ -891,6 +897,9 @@ export interface ImageValidation {
};
}
+// @public
+export function importSavedObjectsFromStream({ readStream, objectLimit, overwrite, savedObjectsClient, supportedTypes, namespace, }: SavedObjectsImportOptions): Promise;
+
// @public (undocumented)
export interface IndexSettingsDeprecationInfo {
// (undocumented)
@@ -1176,6 +1185,11 @@ export interface LogRecord {
timestamp: Date;
}
+// @public
+export interface MetricsServiceSetup {
+ getOpsMetrics$: () => Observable;
+}
+
// @public (undocumented)
export type MIGRATION_ASSISTANCE_INDEX_ACTION = 'upgrade' | 'reindex';
@@ -1227,6 +1241,63 @@ export interface OnPreResponseToolkit {
next: (responseExtensions?: OnPreResponseExtensions) => OnPreResponseResult;
}
+// @public
+export interface OpsMetrics {
+ concurrent_connections: OpsServerMetrics['concurrent_connections'];
+ os: OpsOsMetrics;
+ process: OpsProcessMetrics;
+ requests: OpsServerMetrics['requests'];
+ response_times: OpsServerMetrics['response_times'];
+}
+
+// @public
+export interface OpsOsMetrics {
+ distro?: string;
+ distroRelease?: string;
+ load: {
+ '1m': number;
+ '5m': number;
+ '15m': number;
+ };
+ memory: {
+ total_in_bytes: number;
+ free_in_bytes: number;
+ used_in_bytes: number;
+ };
+ platform: NodeJS.Platform;
+ platformRelease: string;
+ uptime_in_millis: number;
+}
+
+// @public
+export interface OpsProcessMetrics {
+ event_loop_delay: number;
+ memory: {
+ heap: {
+ total_in_bytes: number;
+ used_in_bytes: number;
+ size_limit: number;
+ };
+ resident_set_size_in_bytes: number;
+ };
+ pid: number;
+ uptime_in_millis: number;
+}
+
+// @public
+export interface OpsServerMetrics {
+ concurrent_connections: number;
+ requests: {
+ disconnects: number;
+ total: number;
+ statusCodes: Record;
+ };
+ response_times: {
+ avg_in_millis: number;
+ max_in_millis: number;
+ };
+}
+
// @public (undocumented)
export interface PackageInfo {
// (undocumented)
@@ -1369,6 +1440,9 @@ export type RequestHandlerContextContainer = IContextContainer = IContextProvider, TContextName>;
+// @public
+export function resolveSavedObjectsImportErrors({ readStream, objectLimit, retries, savedObjectsClient, supportedTypes, namespace, }: SavedObjectsResolveImportErrorsOptions): Promise;
+
// @public
export type ResponseError = string | Error | {
message: string | Error;
@@ -1397,6 +1471,7 @@ export interface RouteConfigOptions {
authRequired?: boolean;
body?: Method extends 'get' | 'options' ? undefined : RouteConfigOptionsBody;
tags?: readonly string[];
+ xsrfRequired?: Method extends 'get' ? never : boolean;
}
// @public
@@ -1411,7 +1486,7 @@ export interface RouteConfigOptionsBody {
export type RouteContentType = 'application/json' | 'application/*+json' | 'application/octet-stream' | 'application/x-www-form-urlencoded' | 'multipart/form-data' | 'text/*';
// @public
-export type RouteMethod = 'get' | 'post' | 'put' | 'delete' | 'patch' | 'options';
+export type RouteMethod = SafeRouteMethod | DestructiveRouteMethod;
// @public
export type RouteRegistrar = (route: RouteConfig
, handler: RequestHandler
) => void;
@@ -1464,6 +1539,9 @@ export interface RouteValidatorOptions {
};
}
+// @public
+export type SafeRouteMethod = 'get' | 'options';
+
// @public (undocumented)
export interface SavedObject {
attributes: T;
@@ -1827,17 +1905,11 @@ export interface SavedObjectsImportMissingReferencesError {
// @public
export interface SavedObjectsImportOptions {
- // (undocumented)
namespace?: string;
- // (undocumented)
objectLimit: number;
- // (undocumented)
overwrite: boolean;
- // (undocumented)
readStream: Readable;
- // (undocumented)
savedObjectsClient: SavedObjectsClientContract;
- // (undocumented)
supportedTypes: string[];
}
@@ -1991,17 +2063,11 @@ export interface SavedObjectsRepositoryFactory {
// @public
export interface SavedObjectsResolveImportErrorsOptions {
- // (undocumented)
namespace?: string;
- // (undocumented)
objectLimit: number;
- // (undocumented)
readStream: Readable;
- // (undocumented)
retries: SavedObjectsImportRetry[];
- // (undocumented)
savedObjectsClient: SavedObjectsClientContract;
- // (undocumented)
supportedTypes: string[];
}
@@ -2032,6 +2098,7 @@ export class SavedObjectsSerializer {
// @public
export interface SavedObjectsServiceSetup {
addClientWrapper: (priority: number, id: string, factory: SavedObjectsClientWrapperFactory) => void;
+ getImportExportObjectLimit: () => number;
registerType: (type: SavedObjectsType) => void;
setClientFactoryProvider: (clientFactoryProvider: SavedObjectsClientFactoryProvider) => void;
}
diff --git a/src/core/server/server.test.mocks.ts b/src/core/server/server.test.mocks.ts
index 038c4651ff5a..53d1b742a649 100644
--- a/src/core/server/server.test.mocks.ts
+++ b/src/core/server/server.test.mocks.ts
@@ -79,3 +79,9 @@ export const mockUuidService = uuidServiceMock.create();
jest.doMock('./uuid/uuid_service', () => ({
UuidService: jest.fn(() => mockUuidService),
}));
+
+import { metricsServiceMock } from './metrics/metrics_service.mock';
+export const mockMetricsService = metricsServiceMock.create();
+jest.doMock('./metrics/metrics_service', () => ({
+ MetricsService: jest.fn(() => mockMetricsService),
+}));
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index 161dd3759a21..a4b5a9d81df2 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -28,6 +28,7 @@ import {
mockEnsureValidConfiguration,
mockUiSettingsService,
mockRenderingService,
+ mockMetricsService,
} from './server.test.mocks';
import { BehaviorSubject } from 'rxjs';
@@ -61,6 +62,7 @@ test('sets up services on "setup"', async () => {
expect(mockSavedObjectsService.setup).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
expect(mockRenderingService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
await server.setup();
@@ -71,6 +73,7 @@ test('sets up services on "setup"', async () => {
expect(mockSavedObjectsService.setup).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.setup).toHaveBeenCalledTimes(1);
expect(mockRenderingService.setup).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.setup).toHaveBeenCalledTimes(1);
});
test('injects legacy dependency to context#setup()', async () => {
@@ -107,6 +110,7 @@ test('runs services on "start"', async () => {
expect(mockLegacyService.start).not.toHaveBeenCalled();
expect(mockSavedObjectsService.start).not.toHaveBeenCalled();
expect(mockUiSettingsService.start).not.toHaveBeenCalled();
+ expect(mockMetricsService.start).not.toHaveBeenCalled();
await server.start();
@@ -114,6 +118,7 @@ test('runs services on "start"', async () => {
expect(mockLegacyService.start).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.start).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.start).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.start).toHaveBeenCalledTimes(1);
});
test('does not fail on "setup" if there are unused paths detected', async () => {
@@ -135,6 +140,7 @@ test('stops services on "stop"', async () => {
expect(mockLegacyService.stop).not.toHaveBeenCalled();
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
expect(mockUiSettingsService.stop).not.toHaveBeenCalled();
+ expect(mockMetricsService.stop).not.toHaveBeenCalled();
await server.stop();
@@ -144,6 +150,7 @@ test('stops services on "stop"', async () => {
expect(mockLegacyService.stop).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.stop).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.stop).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.stop).toHaveBeenCalledTimes(1);
});
test(`doesn't setup core services if config validation fails`, async () => {
@@ -159,6 +166,7 @@ test(`doesn't setup core services if config validation fails`, async () => {
expect(mockLegacyService.setup).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
expect(mockRenderingService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
});
test(`doesn't setup core services if legacy config validation fails`, async () => {
@@ -178,4 +186,5 @@ test(`doesn't setup core services if legacy config validation fails`, async () =
expect(mockLegacyService.setup).not.toHaveBeenCalled();
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
});
diff --git a/src/core/server/server.ts b/src/core/server/server.ts
index db2493b38d6e..8603f5fba1da 100644
--- a/src/core/server/server.ts
+++ b/src/core/server/server.ts
@@ -34,6 +34,7 @@ import { Logger, LoggerFactory } from './logging';
import { UiSettingsService } from './ui_settings';
import { PluginsService, config as pluginsConfig } from './plugins';
import { SavedObjectsService } from '../server/saved_objects';
+import { MetricsService, opsConfig } from './metrics';
import { config as cspConfig } from './csp';
import { config as elasticsearchConfig } from './elasticsearch';
@@ -67,6 +68,7 @@ export class Server {
private readonly savedObjects: SavedObjectsService;
private readonly uiSettings: UiSettingsService;
private readonly uuid: UuidService;
+ private readonly metrics: MetricsService;
private coreStart?: InternalCoreStart;
@@ -89,6 +91,7 @@ export class Server {
this.uiSettings = new UiSettingsService(core);
this.capabilities = new CapabilitiesService(core);
this.uuid = new UuidService(core);
+ this.metrics = new MetricsService(core);
}
public async setup() {
@@ -137,6 +140,8 @@ export class Server {
legacyPlugins,
});
+ const metricsSetup = await this.metrics.setup({ http: httpSetup });
+
const coreSetup: InternalCoreSetup = {
capabilities: capabilitiesSetup,
context: contextServiceSetup,
@@ -145,6 +150,7 @@ export class Server {
uiSettings: uiSettingsSetup,
savedObjects: savedObjectsSetup,
uuid: uuidSetup,
+ metrics: metricsSetup,
};
const pluginsSetup = await this.plugins.setup(coreSetup);
@@ -193,6 +199,7 @@ export class Server {
await this.http.start();
await this.rendering.start();
+ await this.metrics.start();
return this.coreStart;
}
@@ -207,6 +214,7 @@ export class Server {
await this.http.stop();
await this.uiSettings.stop();
await this.rendering.stop();
+ await this.metrics.stop();
}
private registerDefaultRoute(httpSetup: InternalHttpServiceSetup) {
@@ -260,6 +268,7 @@ export class Server {
[savedObjectsConfig.path, savedObjectsConfig.schema],
[savedObjectsMigrationConfig.path, savedObjectsMigrationConfig.schema],
[uiSettingsConfig.path, uiSettingsConfig.schema],
+ [opsConfig.path, opsConfig.schema],
];
this.configService.addDeprecationProvider(rootConfigPath, coreDeprecationProvider);
diff --git a/src/dev/storybook/aliases.ts b/src/dev/storybook/aliases.ts
index fb91b865097f..35ac4e27f9c8 100644
--- a/src/dev/storybook/aliases.ts
+++ b/src/dev/storybook/aliases.ts
@@ -20,6 +20,7 @@
export const storybookAliases = {
apm: 'x-pack/legacy/plugins/apm/scripts/storybook.js',
canvas: 'x-pack/legacy/plugins/canvas/scripts/storybook_new.js',
+ codeeditor: 'src/plugins/kibana_react/public/code_editor/scripts/storybook.ts',
drilldowns: 'x-pack/plugins/drilldowns/scripts/storybook.js',
embeddable: 'src/plugins/embeddable/scripts/storybook.js',
infra: 'x-pack/legacy/plugins/infra/scripts/storybook.js',
diff --git a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts
new file mode 100644
index 000000000000..bfba4d7f4c8d
--- /dev/null
+++ b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ fieldFormats,
+ FieldFormatsGetConfigFn,
+ esFilters,
+ IndexPatternsContract,
+} from '../../../../../../plugins/data/public';
+// eslint-disable-next-line @kbn/eslint/no-restricted-paths
+import { setIndexPatterns } from '../../../../../../plugins/data/public/services';
+import { dataPluginMock } from '../../../../../../plugins/data/public/mocks';
+import { createFiltersFromEvent, EventData } from './create_filters_from_event';
+import { mockDataServices } from '../../search/aggs/test_helpers';
+
+jest.mock('ui/new_platform');
+
+const mockField = {
+ name: 'bytes',
+ indexPattern: {
+ id: 'logstash-*',
+ },
+ filterable: true,
+ format: new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn),
+};
+
+describe('createFiltersFromEvent', () => {
+ let dataPoints: EventData[];
+
+ beforeEach(() => {
+ dataPoints = [
+ {
+ table: {
+ columns: [
+ {
+ name: 'test',
+ id: '1-1',
+ meta: {
+ type: 'histogram',
+ indexPatternId: 'logstash-*',
+ aggConfigParams: {
+ field: 'bytes',
+ interval: 30,
+ otherBucket: true,
+ },
+ },
+ },
+ ],
+ rows: [
+ {
+ '1-1': '2048',
+ },
+ ],
+ },
+ column: 0,
+ row: 0,
+ value: 'test',
+ },
+ ];
+
+ mockDataServices();
+ setIndexPatterns(({
+ ...dataPluginMock.createStartContract().indexPatterns,
+ get: async () => ({
+ id: 'logstash-*',
+ fields: {
+ getByName: () => mockField,
+ filter: () => [mockField],
+ },
+ }),
+ } as unknown) as IndexPatternsContract);
+ });
+
+ test('ignores event when value for rows is not provided', async () => {
+ dataPoints[0].table.rows[0]['1-1'] = null;
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(0);
+ });
+
+ test('handles an event when aggregations type is a terms', async () => {
+ if (dataPoints[0].table.columns[0].meta) {
+ dataPoints[0].table.columns[0].meta.type = 'terms';
+ }
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(1);
+ expect(filters[0].query.match_phrase.bytes).toEqual('2048');
+ });
+
+ test('handles an event when aggregations type is not terms', async () => {
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(1);
+
+ const [rangeFilter] = filters;
+
+ if (esFilters.isRangeFilter(rangeFilter)) {
+ expect(rangeFilter.range.bytes.gte).toEqual(2048);
+ expect(rangeFilter.range.bytes.lt).toEqual(2078);
+ }
+ });
+});
diff --git a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
similarity index 70%
rename from src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js
rename to src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
index 1037c718d000..3713c781b095 100644
--- a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js
+++ b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
@@ -17,21 +17,33 @@
* under the License.
*/
-import { esFilters } from '../../../../../../plugins/data/public';
+import { KibanaDatatable } from '../../../../../../plugins/expressions/public';
+import { esFilters, Filter } from '../../../../../../plugins/data/public';
import { deserializeAggConfig } from '../../search/expressions/utils';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { getIndexPatterns } from '../../../../../../plugins/data/public/services';
+export interface EventData {
+ table: Pick;
+ column: number;
+ row: number;
+ value: any;
+}
+
/**
* For terms aggregations on `__other__` buckets, this assembles a list of applicable filter
* terms based on a specific cell in the tabified data.
*
- * @param {object} table - tabified table data
+ * @param {EventData['table']} table - tabified table data
* @param {number} columnIndex - current column index
* @param {number} rowIndex - current row index
* @return {array} - array of terms to filter against
*/
-const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
+const getOtherBucketFilterTerms = (
+ table: EventData['table'],
+ columnIndex: number,
+ rowIndex: number
+) => {
if (rowIndex === -1) {
return [];
}
@@ -42,7 +54,7 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
return row[column.id] === table.rows[rowIndex][column.id] || i >= columnIndex;
});
});
- const terms = rows.map(row => row[table.columns[columnIndex].id]);
+ const terms: any[] = rows.map(row => row[table.columns[columnIndex].id]);
return [
...new Set(
@@ -59,22 +71,27 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
* Assembles the filters needed to apply filtering against a specific cell value, while accounting
* for cases like if the value is a terms agg in an `__other__` or `__missing__` bucket.
*
- * @param {object} table - tabified table data
+ * @param {EventData['table']} table - tabified table data
* @param {number} columnIndex - current column index
* @param {number} rowIndex - current row index
* @param {string} cellValue - value of the current cell
- * @return {array|string} - filter or list of filters to provide to queryFilter.addFilters()
+ * @return {Filter[]|undefined} - list of filters to provide to queryFilter.addFilters()
*/
-const createFilter = async (table, columnIndex, rowIndex) => {
- if (!table || !table.columns || !table.columns[columnIndex]) return;
+const createFilter = async (table: EventData['table'], columnIndex: number, rowIndex: number) => {
+ if (!table || !table.columns || !table.columns[columnIndex]) {
+ return;
+ }
const column = table.columns[columnIndex];
+ if (!column.meta || !column.meta.indexPatternId) {
+ return;
+ }
const aggConfig = deserializeAggConfig({
type: column.meta.type,
- aggConfigParams: column.meta.aggConfigParams,
+ aggConfigParams: column.meta.aggConfigParams ? column.meta.aggConfigParams : {},
indexPattern: await getIndexPatterns().get(column.meta.indexPatternId),
});
- let filter = [];
- const value = rowIndex > -1 ? table.rows[rowIndex][column.id] : null;
+ let filter: Filter[] = [];
+ const value: any = rowIndex > -1 ? table.rows[rowIndex][column.id] : null;
if (value === null || value === undefined || !aggConfig.isFilterable()) {
return;
}
@@ -85,6 +102,10 @@ const createFilter = async (table, columnIndex, rowIndex) => {
filter = aggConfig.createFilter(value);
}
+ if (!filter) {
+ return;
+ }
+
if (!Array.isArray(filter)) {
filter = [filter];
}
@@ -92,19 +113,18 @@ const createFilter = async (table, columnIndex, rowIndex) => {
return filter;
};
-const createFiltersFromEvent = async event => {
- const filters = [];
- const dataPoints = event.data || [event];
+const createFiltersFromEvent = async (dataPoints: EventData[], negate?: boolean) => {
+ const filters: Filter[] = [];
await Promise.all(
dataPoints
.filter(point => point)
.map(async val => {
const { table, column, row } = val;
- const filter = await createFilter(table, column, row);
+ const filter: Filter[] = (await createFilter(table, column, row)) || [];
if (filter) {
filter.forEach(f => {
- if (event.negate) {
+ if (negate) {
f = esFilters.toggleFilterNegated(f);
}
filters.push(f);
diff --git a/src/legacy/core_plugins/data/public/actions/select_range_action.ts b/src/legacy/core_plugins/data/public/actions/select_range_action.ts
index 7f1c5d78ab80..21046f8bb834 100644
--- a/src/legacy/core_plugins/data/public/actions/select_range_action.ts
+++ b/src/legacy/core_plugins/data/public/actions/select_range_action.ts
@@ -19,21 +19,21 @@
import { i18n } from '@kbn/i18n';
import {
- Action,
createAction,
IncompatibleActionError,
+ ActionByType,
} from '../../../../../plugins/ui_actions/public';
import { onBrushEvent } from './filters/brush_event';
import { FilterManager, TimefilterContract, esFilters } from '../../../../../plugins/data/public';
-export const SELECT_RANGE_ACTION = 'SELECT_RANGE_ACTION';
+export const ACTION_SELECT_RANGE = 'ACTION_SELECT_RANGE';
-interface ActionContext {
+export interface SelectRangeActionContext {
data: any;
timeFieldName: string;
}
-async function isCompatible(context: ActionContext) {
+async function isCompatible(context: SelectRangeActionContext) {
try {
return Boolean(await onBrushEvent(context.data));
} catch {
@@ -44,17 +44,17 @@ async function isCompatible(context: ActionContext) {
export function selectRangeAction(
filterManager: FilterManager,
timeFilter: TimefilterContract
-): Action {
- return createAction({
- type: SELECT_RANGE_ACTION,
- id: SELECT_RANGE_ACTION,
+): ActionByType {
+ return createAction({
+ type: ACTION_SELECT_RANGE,
+ id: ACTION_SELECT_RANGE,
getDisplayName: () => {
return i18n.translate('data.filter.applyFilterActionTitle', {
defaultMessage: 'Apply filter to current view',
});
},
isCompatible,
- execute: async ({ timeFieldName, data }: ActionContext) => {
+ execute: async ({ timeFieldName, data }: SelectRangeActionContext) => {
if (!(await isCompatible({ timeFieldName, data }))) {
throw new IncompatibleActionError();
}
diff --git a/src/legacy/core_plugins/data/public/actions/value_click_action.ts b/src/legacy/core_plugins/data/public/actions/value_click_action.ts
index 260b401e6d65..4c69bc826292 100644
--- a/src/legacy/core_plugins/data/public/actions/value_click_action.ts
+++ b/src/legacy/core_plugins/data/public/actions/value_click_action.ts
@@ -20,7 +20,7 @@
import { i18n } from '@kbn/i18n';
import { toMountPoint } from '../../../../../plugins/kibana_react/public';
import {
- Action,
+ ActionByType,
createAction,
IncompatibleActionError,
} from '../../../../../plugins/ui_actions/public';
@@ -37,16 +37,18 @@ import {
esFilters,
} from '../../../../../plugins/data/public';
-export const VALUE_CLICK_ACTION = 'VALUE_CLICK_ACTION';
+export const ACTION_VALUE_CLICK = 'ACTION_VALUE_CLICK';
-interface ActionContext {
+export interface ValueClickActionContext {
data: any;
timeFieldName: string;
}
-async function isCompatible(context: ActionContext) {
+async function isCompatible(context: ValueClickActionContext) {
try {
- const filters: Filter[] = (await createFiltersFromEvent(context.data)) || [];
+ const filters: Filter[] =
+ (await createFiltersFromEvent(context.data.data || [context.data], context.data.negate)) ||
+ [];
return filters.length > 0;
} catch {
return false;
@@ -56,22 +58,23 @@ async function isCompatible(context: ActionContext) {
export function valueClickAction(
filterManager: FilterManager,
timeFilter: TimefilterContract
-): Action {
- return createAction({
- type: VALUE_CLICK_ACTION,
- id: VALUE_CLICK_ACTION,
+): ActionByType {
+ return createAction({
+ type: ACTION_VALUE_CLICK,
+ id: ACTION_VALUE_CLICK,
getDisplayName: () => {
return i18n.translate('data.filter.applyFilterActionTitle', {
defaultMessage: 'Apply filter to current view',
});
},
isCompatible,
- execute: async ({ timeFieldName, data }: ActionContext) => {
+ execute: async ({ timeFieldName, data }: ValueClickActionContext) => {
if (!(await isCompatible({ timeFieldName, data }))) {
throw new IncompatibleActionError();
}
- const filters: Filter[] = (await createFiltersFromEvent(data)) || [];
+ const filters: Filter[] =
+ (await createFiltersFromEvent(data.data || [data], data.negate)) || [];
let selectedFilters: Filter[] = esFilters.mapAndFlattenFilters(filters);
diff --git a/src/legacy/core_plugins/data/public/plugin.ts b/src/legacy/core_plugins/data/public/plugin.ts
index e2b8ca5dda78..18230646ab41 100644
--- a/src/legacy/core_plugins/data/public/plugin.ts
+++ b/src/legacy/core_plugins/data/public/plugin.ts
@@ -37,8 +37,16 @@ import {
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
} from '../../../../plugins/data/public/services';
import { setSearchServiceShim } from './services';
-import { SELECT_RANGE_ACTION, selectRangeAction } from './actions/select_range_action';
-import { VALUE_CLICK_ACTION, valueClickAction } from './actions/value_click_action';
+import {
+ selectRangeAction,
+ SelectRangeActionContext,
+ ACTION_SELECT_RANGE,
+} from './actions/select_range_action';
+import {
+ valueClickAction,
+ ACTION_VALUE_CLICK,
+ ValueClickActionContext,
+} from './actions/value_click_action';
import {
SELECT_RANGE_TRIGGER,
VALUE_CLICK_TRIGGER,
@@ -76,6 +84,12 @@ export interface DataSetup {
export interface DataStart {
search: SearchStart;
}
+declare module '../../../../plugins/ui_actions/public' {
+ export interface ActionContextMapping {
+ [ACTION_SELECT_RANGE]: SelectRangeActionContext;
+ [ACTION_VALUE_CLICK]: ValueClickActionContext;
+ }
+}
/**
* Data Plugin - public
@@ -100,10 +114,13 @@ export class DataPlugin
// This is to be deprecated once we switch to the new search service fully
addSearchStrategy(defaultSearchStrategy);
- uiActions.registerAction(
+ uiActions.attachAction(
+ SELECT_RANGE_TRIGGER,
selectRangeAction(data.query.filterManager, data.query.timefilter.timefilter)
);
- uiActions.registerAction(
+
+ uiActions.attachAction(
+ VALUE_CLICK_TRIGGER,
valueClickAction(data.query.filterManager, data.query.timefilter.timefilter)
);
@@ -123,9 +140,6 @@ export class DataPlugin
setSearchService(data.search);
setOverlays(core.overlays);
- uiActions.attachAction(SELECT_RANGE_TRIGGER, SELECT_RANGE_ACTION);
- uiActions.attachAction(VALUE_CLICK_TRIGGER, VALUE_CLICK_ACTION);
-
return {
search,
};
diff --git a/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts b/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
index e85e9deff6dd..bd05fa21bfd5 100644
--- a/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
+++ b/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
@@ -20,7 +20,7 @@
import { set } from 'lodash';
// @ts-ignore
import { FormattedData } from '../../../../../../plugins/inspector/public';
-// @ts-ignore
+
import { createFilter } from './create_filter';
import { TabbedTable } from '../tabify';
@@ -66,7 +66,10 @@ export async function buildTabularInspectorData(
row => row[`col-${colIndex}-${col.aggConfig.id}`].raw === value.raw
);
const filter = createFilter(aggConfigs, table, colIndex, rowIndex, value.raw);
- queryFilter.addFilters(filter);
+
+ if (filter) {
+ queryFilter.addFilters(filter);
+ }
}),
filterOut:
isCellContentFilterable &&
@@ -75,14 +78,17 @@ export async function buildTabularInspectorData(
row => row[`col-${colIndex}-${col.aggConfig.id}`].raw === value.raw
);
const filter = createFilter(aggConfigs, table, colIndex, rowIndex, value.raw);
- const notOther = value.raw !== '__other__';
- const notMissing = value.raw !== '__missing__';
- if (Array.isArray(filter)) {
- filter.forEach(f => set(f, 'meta.negate', notOther && notMissing));
- } else {
- set(filter, 'meta.negate', notOther && notMissing);
+
+ if (filter) {
+ const notOther = value.raw !== '__other__';
+ const notMissing = value.raw !== '__missing__';
+ if (Array.isArray(filter)) {
+ filter.forEach(f => set(f, 'meta.negate', notOther && notMissing));
+ } else {
+ set(filter, 'meta.negate', notOther && notMissing);
+ }
+ queryFilter.addFilters(filter);
}
- queryFilter.addFilters(filter);
}),
};
});
diff --git a/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts b/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts
new file mode 100644
index 000000000000..890ec81778d4
--- /dev/null
+++ b/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts
@@ -0,0 +1,130 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ fieldFormats,
+ FieldFormatsGetConfigFn,
+ esFilters,
+} from '../../../../../../plugins/data/public';
+import { createFilter } from './create_filter';
+import { TabbedTable } from '../tabify';
+import { AggConfigs } from '../aggs/agg_configs';
+import { IAggConfig } from '../aggs/agg_config';
+import { mockDataServices, mockAggTypesRegistry } from '../aggs/test_helpers';
+
+describe('createFilter', () => {
+ let table: TabbedTable;
+ let aggConfig: IAggConfig;
+
+ const typesRegistry = mockAggTypesRegistry();
+
+ const getAggConfigs = (type: string, params: any) => {
+ const field = {
+ name: 'bytes',
+ filterable: true,
+ indexPattern: {
+ id: '1234',
+ },
+ format: new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn),
+ };
+
+ const indexPattern = {
+ id: '1234',
+ title: 'logstash-*',
+ fields: {
+ getByName: () => field,
+ filter: () => [field],
+ },
+ } as any;
+
+ return new AggConfigs(
+ indexPattern,
+ [
+ {
+ id: type,
+ type,
+ schema: 'buckets',
+ params,
+ },
+ ],
+ { typesRegistry }
+ );
+ };
+
+ const aggConfigParams: Record = {
+ field: 'bytes',
+ interval: 30,
+ otherBucket: true,
+ };
+
+ beforeEach(() => {
+ table = {
+ columns: [
+ {
+ id: '1-1',
+ name: 'test',
+ aggConfig,
+ },
+ ],
+ rows: [
+ {
+ '1-1': '2048',
+ },
+ ],
+ };
+ mockDataServices();
+ });
+
+ test('ignores event when cell value is not provided', async () => {
+ aggConfig = getAggConfigs('histogram', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, -1, null);
+
+ expect(filters).not.toBeDefined();
+ });
+
+ test('handles an event when aggregations type is a terms', async () => {
+ aggConfig = getAggConfigs('terms', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, 0, 'test');
+
+ expect(filters).toBeDefined();
+
+ if (filters) {
+ expect(filters.length).toEqual(1);
+ expect(filters[0].query.match_phrase.bytes).toEqual('2048');
+ }
+ });
+
+ test('handles an event when aggregations type is not terms', async () => {
+ aggConfig = getAggConfigs('histogram', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, 0, 'test');
+
+ expect(filters).toBeDefined();
+
+ if (filters) {
+ expect(filters.length).toEqual(1);
+
+ const [rangeFilter] = filters;
+
+ if (esFilters.isRangeFilter(rangeFilter)) {
+ expect(rangeFilter.range.bytes.gte).toEqual(2048);
+ expect(rangeFilter.range.bytes.lt).toEqual(2078);
+ }
+ }
+ });
+});
diff --git a/src/legacy/core_plugins/data/public/search/expressions/create_filter.js b/src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
similarity index 78%
rename from src/legacy/core_plugins/data/public/search/expressions/create_filter.js
rename to src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
index 3f4028a9b552..77e011932195 100644
--- a/src/legacy/core_plugins/data/public/search/expressions/create_filter.js
+++ b/src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
@@ -17,7 +17,11 @@
* under the License.
*/
-const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
+import { IAggConfig } from 'ui/agg_types';
+import { Filter } from '../../../../../../plugins/data/public';
+import { TabbedTable } from '../tabify';
+
+const getOtherBucketFilterTerms = (table: TabbedTable, columnIndex: number, rowIndex: number) => {
if (rowIndex === -1) {
return [];
}
@@ -41,11 +45,17 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
];
};
-const createFilter = (aggConfigs, table, columnIndex, rowIndex, cellValue) => {
+const createFilter = (
+ aggConfigs: IAggConfig[],
+ table: TabbedTable,
+ columnIndex: number,
+ rowIndex: number,
+ cellValue: any
+) => {
const column = table.columns[columnIndex];
const aggConfig = aggConfigs[columnIndex];
- let filter = [];
- const value = rowIndex > -1 ? table.rows[rowIndex][column.id] : cellValue;
+ let filter: Filter[] = [];
+ const value: any = rowIndex > -1 ? table.rows[rowIndex][column.id] : cellValue;
if (value === null || value === undefined || !aggConfig.isFilterable()) {
return;
}
@@ -56,6 +66,10 @@ const createFilter = (aggConfigs, table, columnIndex, rowIndex, cellValue) => {
filter = aggConfig.createFilter(value);
}
+ if (!filter) {
+ return;
+ }
+
if (!Array.isArray(filter)) {
filter = [filter];
}
diff --git a/src/legacy/core_plugins/kibana/public/.eslintrc.js b/src/legacy/core_plugins/kibana/public/.eslintrc.js
index b3ee0a8fa7b0..e7171a5291d2 100644
--- a/src/legacy/core_plugins/kibana/public/.eslintrc.js
+++ b/src/legacy/core_plugins/kibana/public/.eslintrc.js
@@ -77,7 +77,7 @@ module.exports = {
{
basePath: path.resolve(__dirname, '../../../../../'),
zones: topLevelRestricedZones.concat(
- buildRestrictedPaths(['visualize', 'discover', 'dashboard', 'devTools', 'home'])
+ buildRestrictedPaths(['visualize', 'discover', 'dashboard', 'devTools'])
),
},
],
diff --git a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/save_modal.test.js.snap b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/save_modal.test.js.snap
index aa9eaf09c7e0..7ac2e2d9dd31 100644
--- a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/save_modal.test.js.snap
+++ b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/save_modal.test.js.snap
@@ -59,6 +59,7 @@ exports[`renders DashboardSaveModal 1`] = `
}
showCopyOnSave={true}
+ showDescription={false}
title="dash title"
/>
`;
diff --git a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/save_modal.tsx b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/save_modal.tsx
index 026784fcae06..4a4fcb7e1adc 100644
--- a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/save_modal.tsx
+++ b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/save_modal.tsx
@@ -147,6 +147,7 @@ export class DashboardSaveModal extends React.Component {
showCopyOnSave={this.props.showCopyOnSave}
objectType="dashboard"
options={this.renderDashboardSaveOptions()}
+ showDescription={false}
/>
);
}
diff --git a/src/legacy/core_plugins/kibana/public/discover/build_services.ts b/src/legacy/core_plugins/kibana/public/discover/build_services.ts
index 6b0d2368cc1a..c58307adaf38 100644
--- a/src/legacy/core_plugins/kibana/public/discover/build_services.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/build_services.ts
@@ -33,11 +33,10 @@ import {
import { DiscoverStartPlugins } from './plugin';
import { SharePluginStart } from '../../../../../plugins/share/public';
-import { SavedSearch } from './np_ready/types';
import { DocViewsRegistry } from './np_ready/doc_views/doc_views_registry';
import { ChartsPluginStart } from '../../../../../plugins/charts/public';
import { VisualizationsStart } from '../../../visualizations/public';
-import { createSavedSearchesLoader } from '../../../../../plugins/discover/public';
+import { createSavedSearchesLoader, SavedSearch } from '../../../../../plugins/discover/public';
export interface DiscoverServices {
addBasePath: (path: string) => string;
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
index 1ac54ad5dabe..bb693ab86022 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
@@ -305,6 +305,7 @@ function discoverController(
defaultMessage:
'Save your Discover search so you can use it in visualizations and dashboards',
})}
+ showDescription={false}
/>
);
showSaveModal(saveModal, core.i18n.Context);
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/tool_bar_pager_buttons.tsx b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/tool_bar_pager_buttons.tsx
index 75a03dfa2a84..6f1cf81e2c54 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/tool_bar_pager_buttons.tsx
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/tool_bar_pager_buttons.tsx
@@ -49,7 +49,7 @@ export function ToolBarPagerButtons(props: Props) {
disabled={!props.hasNextPage}
data-test-subj="btnNextPage"
aria-label={i18n.translate(
- 'kbn.ddiscover.docTable.pager.toolbarPagerButtons.nextButtonAriaLabel',
+ 'kbn.discover.docTable.pager.toolbarPagerButtons.nextButtonAriaLabel',
{
defaultMessage: 'Next page in table',
}
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
index 2bbeea9d675c..100d9cdac133 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
@@ -23,9 +23,9 @@ import { get } from 'lodash';
export function getPainlessError(error: Error) {
const rootCause: Array<{ lang: string; script: string }> | undefined = get(
error,
- 'resp.error.root_cause'
+ 'body.attributes.error.root_cause'
);
- const message: string = get(error, 'message');
+ const message: string = get(error, 'body.message');
if (!rootCause) {
return;
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
index 738a74d93449..0aaf3e7f156c 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
@@ -37,7 +37,6 @@ import {
Embeddable,
} from '../../../../../embeddable_api/public/np_ready/public';
import * as columnActions from '../angular/doc_table/actions/columns';
-import { SavedSearch } from '../types';
import searchTemplate from './search_template.html';
import { ISearchEmbeddable, SearchInput, SearchOutput } from './types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
@@ -51,6 +50,7 @@ import {
ISearchSource,
} from '../../kibana_services';
import { SEARCH_EMBEDDABLE_TYPE } from './constants';
+import { SavedSearch } from '../../../../../../../plugins/discover/public';
interface SearchScope extends ng.IScope {
columns?: string[];
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
index e7aa390cda85..b20e9b2faf7c 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
@@ -18,9 +18,9 @@
*/
import { EmbeddableInput, EmbeddableOutput, IEmbeddable } from 'src/plugins/embeddable/public';
-import { SavedSearch } from '../types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
import { Filter, IIndexPattern, TimeRange, Query } from '../../../../../../../plugins/data/public';
+import { SavedSearch } from '../../../../../../../plugins/discover/public';
export interface SearchInput extends EmbeddableInput {
timeRange: TimeRange;
diff --git a/src/legacy/core_plugins/kibana/public/home/_index.scss b/src/legacy/core_plugins/kibana/public/home/_index.scss
deleted file mode 100644
index f42254c1096c..000000000000
--- a/src/legacy/core_plugins/kibana/public/home/_index.scss
+++ /dev/null
@@ -1 +0,0 @@
-@import 'np_ready/components/index';
diff --git a/src/legacy/core_plugins/kibana/public/home/plugin.ts b/src/legacy/core_plugins/kibana/public/home/plugin.ts
deleted file mode 100644
index f8c750cc8028..000000000000
--- a/src/legacy/core_plugins/kibana/public/home/plugin.ts
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import {
- AppMountParameters,
- CoreSetup,
- CoreStart,
- Plugin,
- PluginInitializerContext,
-} from 'kibana/public';
-
-import { DataPublicPluginStart } from 'src/plugins/data/public';
-import { TelemetryPluginStart } from 'src/plugins/telemetry/public';
-import { setServices } from './kibana_services';
-import { KibanaLegacySetup } from '../../../../../plugins/kibana_legacy/public';
-import { UsageCollectionSetup } from '../../../../../plugins/usage_collection/public';
-import {
- Environment,
- HomePublicPluginStart,
- HomePublicPluginSetup,
-} from '../../../../../plugins/home/public';
-
-export interface HomePluginStartDependencies {
- data: DataPublicPluginStart;
- home: HomePublicPluginStart;
- telemetry?: TelemetryPluginStart;
-}
-
-export interface HomePluginSetupDependencies {
- usageCollection: UsageCollectionSetup;
- kibanaLegacy: KibanaLegacySetup;
- home: HomePublicPluginSetup;
-}
-
-export class HomePlugin implements Plugin {
- private dataStart: DataPublicPluginStart | null = null;
- private savedObjectsClient: any = null;
- private environment: Environment | null = null;
- private featureCatalogue: HomePublicPluginStart['featureCatalogue'] | null = null;
- private telemetry?: TelemetryPluginStart;
-
- constructor(private initializerContext: PluginInitializerContext) {}
-
- setup(
- core: CoreSetup,
- { home, kibanaLegacy, usageCollection }: HomePluginSetupDependencies
- ) {
- kibanaLegacy.registerLegacyApp({
- id: 'home',
- title: 'Home',
- mount: async (params: AppMountParameters) => {
- const trackUiMetric = usageCollection.reportUiStats.bind(usageCollection, 'Kibana_home');
- const [coreStart, { home: homeStart }] = await core.getStartServices();
- setServices({
- trackUiMetric,
- kibanaVersion: this.initializerContext.env.packageInfo.version,
- http: coreStart.http,
- toastNotifications: core.notifications.toasts,
- banners: coreStart.overlays.banners,
- docLinks: coreStart.docLinks,
- savedObjectsClient: this.savedObjectsClient!,
- chrome: coreStart.chrome,
- telemetry: this.telemetry,
- uiSettings: core.uiSettings,
- addBasePath: core.http.basePath.prepend,
- getBasePath: core.http.basePath.get,
- indexPatternService: this.dataStart!.indexPatterns,
- environment: this.environment!,
- config: kibanaLegacy.config,
- homeConfig: home.config,
- tutorialVariables: homeStart.tutorials.get,
- featureCatalogue: this.featureCatalogue!,
- });
- const { renderApp } = await import('./np_ready/application');
- return await renderApp(params.element);
- },
- });
- }
-
- start(core: CoreStart, { data, home, telemetry }: HomePluginStartDependencies) {
- this.environment = home.environment.get();
- this.featureCatalogue = home.featureCatalogue;
- this.dataStart = data;
- this.telemetry = telemetry;
- this.savedObjectsClient = core.savedObjects.client;
- }
-
- stop() {}
-}
diff --git a/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png b/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png
new file mode 100644
index 000000000000..cc6ef0ce509e
Binary files /dev/null and b/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png differ
diff --git a/src/legacy/core_plugins/kibana/public/index.scss b/src/legacy/core_plugins/kibana/public/index.scss
index 3eef84c32db7..547f44652cf2 100644
--- a/src/legacy/core_plugins/kibana/public/index.scss
+++ b/src/legacy/core_plugins/kibana/public/index.scss
@@ -13,15 +13,15 @@
// Discover styles
@import 'discover/index';
-// Home styles
-@import './home/index';
-
// Visualize styles
@import './visualize/index';
// Has to come after visualize because of some
// bad cascading in the Editor layout
@import 'src/legacy/ui/public/vis/index';
+// Home styles
+@import '../../../../plugins/home/public/application/index';
+
// Management styles
@import './management/index';
diff --git a/src/legacy/core_plugins/kibana/public/kibana.js b/src/legacy/core_plugins/kibana/public/kibana.js
index a9f32949628e..04eaf2cbe267 100644
--- a/src/legacy/core_plugins/kibana/public/kibana.js
+++ b/src/legacy/core_plugins/kibana/public/kibana.js
@@ -42,7 +42,6 @@ import 'uiExports/shareContextMenuExtensions';
import 'uiExports/interpreter';
import 'ui/autoload/all';
-import './home';
import './discover/legacy';
import './visualize/legacy';
import './dashboard/legacy';
diff --git a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
index 2f48ecc322fe..3a542cacc44b 100644
--- a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
+++ b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
@@ -22,10 +22,6 @@ a tilemap in an iframe: https://github.com/elastic/kibana/issues/16457 */
}
}
-.visEditor__linkedMessage {
- padding: $euiSizeS;
-}
-
.visEditor__content {
@include flex-parent();
width: 100%;
diff --git a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
index 4979d9dc89a0..9dbb05ea95b4 100644
--- a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
+++ b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
@@ -1,28 +1,4 @@
-
-