diff --git a/src/legacy/core_plugins/telemetry/server/collection_manager.ts b/src/legacy/core_plugins/telemetry/server/collection_manager.ts index 933c249cd7279..0394dea343adf 100644 --- a/src/legacy/core_plugins/telemetry/server/collection_manager.ts +++ b/src/legacy/core_plugins/telemetry/server/collection_manager.ts @@ -41,8 +41,8 @@ export interface StatsCollectionConfig { usageCollection: UsageCollectionSetup; callCluster: CallCluster; server: any; - start: string; - end: string; + start: string | number; + end: string | number; } export type StatsGetterConfig = UnencryptedStatsGetterConfig | EncryptedStatsGetterConfig; @@ -193,7 +193,7 @@ export class TelemetryCollectionManager { } } catch (err) { statsCollectionConfig.server.log( - ['debu', 'telemetry', 'collection'], + ['debug', 'telemetry', 'collection'], `Failed to collect any usage with registered collections.` ); // swallow error to try next collection; diff --git a/src/plugins/usage_collection/server/collector/collector.ts b/src/plugins/usage_collection/server/collector/collector.ts index e102dc2a64ee8..91951aa2f3edf 100644 --- a/src/plugins/usage_collection/server/collector/collector.ts +++ b/src/plugins/usage_collection/server/collector/collector.ts @@ -85,7 +85,7 @@ export class Collector { protected defaultFormatterForBulkUpload(result: T) { return { type: this.type, - payload: result, + payload: (result as unknown) as U, }; } } diff --git a/src/plugins/usage_collection/server/collector/usage_collector.ts b/src/plugins/usage_collection/server/collector/usage_collector.ts index 05c701bd3abf4..bf861a94fccff 100644 --- a/src/plugins/usage_collection/server/collector/usage_collector.ts +++ b/src/plugins/usage_collection/server/collector/usage_collector.ts @@ -24,14 +24,14 @@ export class UsageCollector ex T, U > { - protected defaultUsageFormatterForBulkUpload(result: T) { + protected defaultFormatterForBulkUpload(result: T) { return { type: KIBANA_STATS_TYPE, - payload: { + payload: ({ usage: { [this.type]: result, }, - }, + } as unknown) as U, }; } } diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/fixtures/beats_stats_results.json b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__mocks__/fixtures/beats_stats_results.json similarity index 100% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/fixtures/beats_stats_results.json rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/__mocks__/fixtures/beats_stats_results.json diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/create_query.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.test.ts similarity index 89% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/create_query.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.test.ts index 63c779ab4b520..a85d084f83d83 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/create_query.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.test.ts @@ -4,14 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import { set } from 'lodash'; -import { createTypeFilter, createQuery } from '../create_query.js'; +import { createTypeFilter, createQuery } from './create_query'; describe('Create Type Filter', () => { it('Builds a type filter syntax', () => { const typeFilter = createTypeFilter('my_type'); - expect(typeFilter).to.eql({ + expect(typeFilter).toStrictEqual({ bool: { should: [{ term: { _type: 'my_type' } }, { term: { type: 'my_type' } }] }, }); }); @@ -36,7 +35,7 @@ describe('Create Query', () => { ], }, }; - expect(result).to.be.eql(expected); + expect(result).toStrictEqual(expected); }); it('Uses `type` option to add type filter with minimal fields', () => { @@ -47,7 +46,7 @@ describe('Create Query', () => { { term: { _type: 'test-type-yay' } }, { term: { type: 'test-type-yay' } }, ]); - expect(result).to.be.eql(expected); + expect(result).toStrictEqual(expected); }); it('Uses `type` option to add type filter with all other option fields', () => { @@ -77,6 +76,6 @@ describe('Create Query', () => { ], }, }; - expect(result).to.be.eql(expected); + expect(result).toStrictEqual(expected); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.ts similarity index 80% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.ts index 6fcbb677b307d..9a801094458bd 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/create_query.ts @@ -4,7 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -import { defaults } from 'lodash'; import moment from 'moment'; /* @@ -14,7 +13,7 @@ import moment from 'moment'; * TODO: this backwards compatibility helper will only be supported for 5.x-6. This * function should be removed in 7.0 */ -export const createTypeFilter = type => { +export const createTypeFilter = (type: string) => { return { bool: { should: [{ term: { _type: type } }, { term: { type } }], @@ -22,6 +21,18 @@ export const createTypeFilter = type => { }; }; +export interface QueryOptions { + type?: string; + filters?: object[]; + clusterUuid?: string; + start?: string | number; + end?: string | number; +} + +interface RangeFilter { + range: { [key: string]: { format?: string; gte?: string | number; lte?: string | number } }; +} + /* * Creates the boilerplace for querying monitoring data, including filling in * start time and end time, and injecting additional filters. @@ -36,9 +47,8 @@ export const createTypeFilter = type => { * @param {Date} options.start - numeric timestamp (optional) * @param {Date} options.end - numeric timestamp (optional) */ -export function createQuery(options) { - options = defaults(options, { filters: [] }); - const { type, clusterUuid, start, end, filters } = options; +export function createQuery(options: QueryOptions) { + const { type, clusterUuid, start, end, filters = [] } = options; let typeFilter; if (type) { @@ -50,7 +60,7 @@ export function createQuery(options) { clusterUuidFilter = { term: { cluster_uuid: clusterUuid } }; } - let timeRangeFilter; + let timeRangeFilter: RangeFilter | undefined; if (start || end) { timeRangeFilter = { range: { diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_all_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.test.ts similarity index 78% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_all_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.test.ts index f27fde50242f4..470642f9dd8a3 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_all_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.test.ts @@ -4,12 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import sinon from 'sinon'; -import { addStackStats, getAllStats, handleAllStats } from '../get_all_stats'; +import { addStackStats, getAllStats, handleAllStats } from './get_all_stats'; +import { ESClusterStats } from './get_es_stats'; +import { KibanaStats } from './get_kibana_stats'; +import { ClustersHighLevelStats } from './get_high_level_stats'; -// FAILING: https://github.com/elastic/kibana/issues/51371 -describe.skip('get_all_stats', () => { +describe('get_all_stats', () => { const size = 123; const start = 0; const end = 1; @@ -100,9 +101,6 @@ describe.skip('get_all_stats', () => { describe('getAllStats', () => { it('returns clusters', async () => { - const clusterUuidsResponse = { - aggregations: { cluster_uuids: { buckets: [{ key: 'a' }] } }, - }; const esStatsResponse = { hits: { hits: [{ _id: 'a', _source: { cluster_uuid: 'a' } }], @@ -177,15 +175,25 @@ describe.skip('get_all_stats', () => { callCluster .withArgs('search') .onCall(0) - .returns(Promise.resolve(clusterUuidsResponse)) - .onCall(1) .returns(Promise.resolve(esStatsResponse)) - .onCall(2) + .onCall(1) .returns(Promise.resolve(kibanaStatsResponse)) + .onCall(2) + .returns(Promise.resolve(logstashStatsResponse)) .onCall(3) - .returns(Promise.resolve(logstashStatsResponse)); + .returns(Promise.resolve({})) // Beats stats + .onCall(4) + .returns(Promise.resolve({})); // Beats state - expect(await getAllStats({ callCluster, server, start, end })).to.eql(allClusters); + expect( + await getAllStats([{ clusterUuid: 'a' }], { + callCluster: callCluster as any, + usageCollection: {} as any, + server, + start, + end, + }) + ).toStrictEqual(allClusters); }); it('returns empty clusters', async () => { @@ -195,21 +203,33 @@ describe.skip('get_all_stats', () => { callCluster.withArgs('search').returns(Promise.resolve(clusterUuidsResponse)); - expect(await getAllStats({ callCluster, server, start, end })).to.eql([]); + expect( + await getAllStats([], { + callCluster: callCluster as any, + usageCollection: {} as any, + server, + start, + end, + }) + ).toStrictEqual([]); }); }); describe('handleAllStats', () => { it('handles response', () => { - const clusters = handleAllStats(esClusters, { kibana: kibanaStats, logstash: logstashStats }); + const clusters = handleAllStats(esClusters as ESClusterStats[], { + kibana: (kibanaStats as unknown) as KibanaStats, + logstash: (logstashStats as unknown) as ClustersHighLevelStats, + beats: {}, + }); - expect(clusters).to.eql(expectedClusters); + expect(clusters).toStrictEqual(expectedClusters); }); it('handles no clusters response', () => { - const clusters = handleAllStats([], {}); + const clusters = handleAllStats([], {} as any); - expect(clusters).to.have.length(0); + expect(clusters).toHaveLength(0); }); }); @@ -230,9 +250,9 @@ describe.skip('get_all_stats', () => { }, }; - addStackStats(cluster, stats, 'xyz'); + addStackStats(cluster as ESClusterStats, stats, 'xyz'); - expect(cluster.stack_stats.xyz).to.be(stats.a); + expect((cluster as any).stack_stats.xyz).toStrictEqual(stats.a); }); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.ts similarity index 66% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.ts index 87281a19141ae..aa5e937387daf 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_all_stats.ts @@ -6,22 +6,26 @@ import { get, set, merge } from 'lodash'; +import { StatsGetter } from 'src/legacy/core_plugins/telemetry/server/collection_manager'; import { LOGSTASH_SYSTEM_ID, KIBANA_SYSTEM_ID, BEATS_SYSTEM_ID } from '../../common/constants'; -import { getElasticsearchStats } from './get_es_stats'; -import { getKibanaStats } from './get_kibana_stats'; +import { getElasticsearchStats, ESClusterStats } from './get_es_stats'; +import { getKibanaStats, KibanaStats } from './get_kibana_stats'; import { getBeatsStats } from './get_beats_stats'; import { getHighLevelStats } from './get_high_level_stats'; +type PromiseReturnType any> = ReturnType extends Promise + ? R + : T; + /** * Get statistics for all products joined by Elasticsearch cluster. + * Returns the array of clusters joined with the Kibana and Logstash instances. * - * @param {Object} server The Kibana server instance used to call ES as the internal user - * @param {function} callCluster The callWithRequest or callWithInternalUser handler - * @param {Date} start The starting range to request data - * @param {Date} end The ending range to request data - * @return {Promise} The array of clusters joined with the Kibana and Logstash instances. */ -export async function getAllStats(clustersDetails, { server, callCluster, start, end }) { +export const getAllStats: StatsGetter = async ( + clustersDetails, + { server, callCluster, start, end } +) => { const clusterUuids = clustersDetails.map(clusterDetails => clusterDetails.clusterUuid); const [esClusters, kibana, logstash, beats] = await Promise.all([ @@ -32,7 +36,7 @@ export async function getAllStats(clustersDetails, { server, callCluster, start, ]); return handleAllStats(esClusters, { kibana, logstash, beats }); -} +}; /** * Combine the statistics from the stack to create "cluster" stats that associate all products together based on the cluster @@ -41,9 +45,21 @@ export async function getAllStats(clustersDetails, { server, callCluster, start, * @param {Array} clusters The Elasticsearch clusters * @param {Object} kibana The Kibana instances keyed by Cluster UUID * @param {Object} logstash The Logstash nodes keyed by Cluster UUID - * @return {Array} The clusters joined with the Kibana and Logstash instances under each cluster's {@code stack_stats}. + * + * Returns the clusters joined with the Kibana and Logstash instances under each cluster's {@code stack_stats}. */ -export function handleAllStats(clusters, { kibana, logstash, beats }) { +export function handleAllStats( + clusters: ESClusterStats[], + { + kibana, + logstash, + beats, + }: { + kibana: KibanaStats; + logstash: PromiseReturnType; + beats: PromiseReturnType; + } +) { return clusters.map(cluster => { // if they are using Kibana or Logstash, then add it to the cluster details under cluster.stack_stats addStackStats(cluster, kibana, KIBANA_SYSTEM_ID); @@ -62,8 +78,12 @@ export function handleAllStats(clusters, { kibana, logstash, beats }) { * @param {Object} allProductStats Product stats, keyed by Cluster UUID * @param {String} product The product name being added (e.g., 'kibana' or 'logstash') */ -export function addStackStats(cluster, allProductStats, product) { - const productStats = get(allProductStats, cluster.cluster_uuid); +export function addStackStats( + cluster: ESClusterStats & { stack_stats?: { [product: string]: K } }, + allProductStats: T, + product: string +) { + const productStats = allProductStats[cluster.cluster_uuid]; // Don't add it if they're not using (or configured to report stats) this product for this cluster if (productStats) { @@ -75,12 +95,20 @@ export function addStackStats(cluster, allProductStats, product) { } } -export function mergeXPackStats(cluster, allProductStats, path, product) { +export function mergeXPackStats( + cluster: ESClusterStats & { stack_stats?: { xpack?: { [product: string]: unknown } } }, + allProductStats: T, + path: string, + product: string +) { const productStats = get(allProductStats, cluster.cluster_uuid + '.' + path); if (productStats || productStats === 0) { - if (!get(cluster, 'stack_stats.xpack')) { - set(cluster, 'stack_stats.xpack', {}); + if (!cluster.stack_stats) { + cluster.stack_stats = {}; + } + if (!cluster.stack_stats.xpack) { + cluster.stack_stats.xpack = {}; } const mergeStats = {}; diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_beats_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.test.ts similarity index 75% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_beats_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.test.ts index 522be71555fba..30888e1af3f53 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_beats_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.test.ts @@ -4,10 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fetchBeatsStats, processResults } from '../get_beats_stats'; +import { fetchBeatsStats, processResults } from './get_beats_stats'; import sinon from 'sinon'; -import expect from '@kbn/expect'; -import beatsStatsResultSet from './fixtures/beats_stats_results'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const beatsStatsResultSet = require('./__mocks__/fixtures/beats_stats_results'); const getBaseOptions = () => ({ clusters: {}, @@ -22,8 +22,8 @@ describe('Get Beats Stats', () => { const clusterUuids = ['aCluster', 'bCluster', 'cCluster']; const start = 100; const end = 200; - let server; - let callCluster; + let server = { config: () => ({ get: sinon.stub() }) }; + let callCluster = sinon.stub(); beforeEach(() => { const getStub = { get: sinon.stub() }; @@ -32,34 +32,34 @@ describe('Get Beats Stats', () => { callCluster = sinon.stub(); }); - it('should set `from: 0, to: 10000` in the query', () => { - fetchBeatsStats(server, callCluster, clusterUuids, start, end); + it('should set `from: 0, to: 10000` in the query', async () => { + await fetchBeatsStats(server, callCluster, clusterUuids, start, end, {} as any); const { args } = callCluster.firstCall; const [api, { body }] = args; - expect(api).to.be('search'); - expect(body.from).to.be(0); - expect(body.size).to.be(10000); + expect(api).toEqual('search'); + expect(body.from).toEqual(0); + expect(body.size).toEqual(10000); }); - it('should set `from: 10000, from: 10000` in the query', () => { - fetchBeatsStats(server, callCluster, clusterUuids, start, end, { page: 1 }); + it('should set `from: 10000, from: 10000` in the query', async () => { + await fetchBeatsStats(server, callCluster, clusterUuids, start, end, { page: 1 } as any); const { args } = callCluster.firstCall; const [api, { body }] = args; - expect(api).to.be('search'); - expect(body.from).to.be(10000); - expect(body.size).to.be(10000); + expect(api).toEqual('search'); + expect(body.from).toEqual(10000); + expect(body.size).toEqual(10000); }); - it('should set `from: 20000, from: 10000` in the query', () => { - fetchBeatsStats(server, callCluster, clusterUuids, start, end, { page: 2 }); + it('should set `from: 20000, from: 10000` in the query', async () => { + await fetchBeatsStats(server, callCluster, clusterUuids, start, end, { page: 2 } as any); const { args } = callCluster.firstCall; const [api, { body }] = args; - expect(api).to.be('search'); - expect(body.from).to.be(20000); - expect(body.size).to.be(10000); + expect(api).toEqual('search'); + expect(body.from).toEqual(20000); + expect(body.size).toEqual(10000); }); }); @@ -68,9 +68,9 @@ describe('Get Beats Stats', () => { const resultsEmpty = undefined; const options = getBaseOptions(); - processResults(resultsEmpty, options); + processResults(resultsEmpty as any, options); - expect(options.clusters).to.eql({}); + expect(options.clusters).toStrictEqual({}); }); it('should summarize single result with some missing fields', () => { @@ -92,9 +92,9 @@ describe('Get Beats Stats', () => { }; const options = getBaseOptions(); - processResults(results, options); + processResults(results as any, options); - expect(options.clusters).to.eql({ + expect(options.clusters).toStrictEqual({ FlV4ckTxQ0a78hmBkzzc9A: { count: 1, versions: {}, @@ -122,11 +122,11 @@ describe('Get Beats Stats', () => { const options = getBaseOptions(); // beatsStatsResultSet is an array of many small query results - beatsStatsResultSet.forEach(results => { + beatsStatsResultSet.forEach((results: any) => { processResults(results, options); }); - expect(options.clusters).to.eql({ + expect(options.clusters).toStrictEqual({ W7hppdX7R229Oy3KQbZrTw: { count: 5, versions: { '7.0.0-alpha1': 5 }, diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.ts similarity index 60% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.ts index 5722228b60207..975a3bfee6333 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_beats_stats.ts @@ -5,6 +5,8 @@ */ import { get } from 'lodash'; +import { StatsCollectionConfig } from 'src/legacy/core_plugins/telemetry/server/collection_manager'; +import { SearchResponse } from 'elasticsearch'; import { createQuery } from './create_query'; import { INDEX_PATTERN_BEATS } from '../../common/constants'; @@ -33,6 +35,107 @@ const getBaseStats = () => ({ }, }); +export interface BeatsStats { + cluster_uuid: string; + beats_stats?: { + beat?: { + version?: string; + type?: string; + host?: string; + }; + metrics?: { + libbeat?: { + output?: { + type?: string; + }; + pipeline?: { + events?: { + published?: number; + }; + }; + }; + }; + }; + beats_state?: { + beat?: { + type?: string; + }; + state?: { + input?: { + names: string[]; + count: number; + }; + module?: { + names: string[]; + count: number; + }; + heartbeat?: HeartbeatBase; + functionbeat?: { + functions?: { + count?: number; + }; + }; + host?: { + architecture: string; + os: { platform: string }; + }; + }; + }; +} + +interface HeartbeatBase { + monitors: number; + endpoints: number; + // I have to add the '| number' bit because otherwise TS complains about 'monitors' and 'endpoints' not being of type HeartbeatBase + [key: string]: HeartbeatBase | number | undefined; +} + +export interface BeatsBaseStats { + // stats + versions: { [version: string]: number }; + types: { [type: string]: number }; + outputs: { [outputType: string]: number }; + count: number; + eventsPublished: number; + hosts: number; + // state + input: { + count: number; + names: string[]; + }; + module: { + count: number; + names: string[]; + }; + architecture: { + count: number; + architectures: BeatsArchitecture[]; + }; + heartbeat?: HeartbeatBase; + functionbeat?: { + functions: { + count: number; + }; + }; +} + +export interface BeatsProcessOptions { + clusters: { [clusterUuid: string]: BeatsBaseStats }; // the result object to be built up + clusterHostSets: { [clusterUuid: string]: Set }; // passed to processResults for tracking state in the results generation + clusterInputSets: { [clusterUuid: string]: Set }; // passed to processResults for tracking state in the results generation + clusterModuleSets: { [clusterUuid: string]: Set }; // passed to processResults for tracking state in the results generation + clusterArchitectureMaps: { + // passed to processResults for tracking state in the results generation + [clusterUuid: string]: Map; + }; +} + +export interface BeatsArchitecture { + name: string; + architecture: string; + count: number; +} + /* * Update a clusters object with processed beat stats * @param {Array} results - array of Beats docs from ES @@ -41,12 +144,18 @@ const getBaseStats = () => ({ * @param {Object} clusterModuleSets - the object keyed by cluster UUIDs to count the unique modules */ export function processResults( - results = [], - { clusters, clusterHostSets, clusterInputSets, clusterModuleSets, clusterArchitectureMaps } + results: SearchResponse, + { + clusters, + clusterHostSets, + clusterInputSets, + clusterModuleSets, + clusterArchitectureMaps, + }: BeatsProcessOptions ) { - const currHits = get(results, 'hits.hits', []); + const currHits = results?.hits?.hits || []; currHits.forEach(hit => { - const clusterUuid = get(hit, '_source.cluster_uuid'); + const clusterUuid = hit._source.cluster_uuid; if (clusters[clusterUuid] === undefined) { clusters[clusterUuid] = getBaseStats(); clusterHostSets[clusterUuid] = new Set(); @@ -57,30 +166,30 @@ export function processResults( const processBeatsStatsResults = () => { const { versions, types, outputs } = clusters[clusterUuid]; - const thisVersion = get(hit, '_source.beats_stats.beat.version'); + const thisVersion = hit._source.beats_stats?.beat?.version; if (thisVersion !== undefined) { const thisVersionAccum = versions[thisVersion] || 0; versions[thisVersion] = thisVersionAccum + 1; } - const thisType = get(hit, '_source.beats_stats.beat.type'); + const thisType = hit._source.beats_stats?.beat?.type; if (thisType !== undefined) { const thisTypeAccum = types[thisType] || 0; types[thisType] = thisTypeAccum + 1; } - const thisOutput = get(hit, '_source.beats_stats.metrics.libbeat.output.type'); + const thisOutput = hit._source.beats_stats?.metrics?.libbeat?.output?.type; if (thisOutput !== undefined) { const thisOutputAccum = outputs[thisOutput] || 0; outputs[thisOutput] = thisOutputAccum + 1; } - const thisEvents = get(hit, '_source.beats_stats.metrics.libbeat.pipeline.events.published'); + const thisEvents = hit._source.beats_stats?.metrics?.libbeat?.pipeline?.events?.published; if (thisEvents !== undefined) { clusters[clusterUuid].eventsPublished += thisEvents; } - const thisHost = get(hit, '_source.beats_stats.beat.host'); + const thisHost = hit._source.beats_stats?.beat?.host; if (thisHost !== undefined) { const hostsMap = clusterHostSets[clusterUuid]; hostsMap.add(thisHost); @@ -89,7 +198,7 @@ export function processResults( }; const processBeatsStateResults = () => { - const stateInput = get(hit, '_source.beats_state.state.input'); + const stateInput = hit._source.beats_state?.state?.input; if (stateInput !== undefined) { const inputSet = clusterInputSets[clusterUuid]; stateInput.names.forEach(name => inputSet.add(name)); @@ -97,8 +206,8 @@ export function processResults( clusters[clusterUuid].input.count += stateInput.count; } - const stateModule = get(hit, '_source.beats_state.state.module'); - const statsType = get(hit, '_source.beats_state.beat.type'); + const stateModule = hit._source.beats_state?.state?.module; + const statsType = hit._source.beats_state?.beat?.type; if (stateModule !== undefined) { const moduleSet = clusterModuleSets[clusterUuid]; stateModule.names.forEach(name => moduleSet.add(statsType + '.' + name)); @@ -106,7 +215,7 @@ export function processResults( clusters[clusterUuid].module.count += stateModule.count; } - const heartbeatState = get(hit, '_source.beats_state.state.heartbeat'); + const heartbeatState = hit._source.beats_state?.state?.heartbeat; if (heartbeatState !== undefined) { if (!clusters[clusterUuid].hasOwnProperty('heartbeat')) { clusters[clusterUuid].heartbeat = { @@ -114,7 +223,7 @@ export function processResults( endpoints: 0, }; } - const clusterHb = clusters[clusterUuid].heartbeat; + const clusterHb = clusters[clusterUuid].heartbeat!; clusterHb.monitors += heartbeatState.monitors; clusterHb.endpoints += heartbeatState.endpoints; @@ -133,12 +242,12 @@ export function processResults( endpoints: 0, }; } - clusterHb[proto].monitors += val.monitors; - clusterHb[proto].endpoints += val.endpoints; + (clusterHb[proto] as HeartbeatBase).monitors += val.monitors; + (clusterHb[proto] as HeartbeatBase).endpoints += val.endpoints; } } - const functionbeatState = get(hit, '_source.beats_state.state.functionbeat'); + const functionbeatState = hit._source.beats_state?.state?.functionbeat; if (functionbeatState !== undefined) { if (!clusters[clusterUuid].hasOwnProperty('functionbeat')) { clusters[clusterUuid].functionbeat = { @@ -148,14 +257,11 @@ export function processResults( }; } - clusters[clusterUuid].functionbeat.functions.count += get( - functionbeatState, - 'functions.count', - 0 - ); + clusters[clusterUuid].functionbeat!.functions.count += + functionbeatState.functions?.count || 0; } - const stateHost = get(hit, '_source.beats_state.state.host'); + const stateHost = hit._source.beats_state?.state?.host; if (stateHost !== undefined) { const hostMap = clusterArchitectureMaps[clusterUuid]; const hostKey = `${stateHost.architecture}/${stateHost.os.platform}`; @@ -198,14 +304,14 @@ export function processResults( * @return {Promise} */ async function fetchBeatsByType( - server, - callCluster, - clusterUuids, - start, - end, - { page = 0, ...options } = {}, - type -) { + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'], + { page = 0, ...options }: { page?: number } & BeatsProcessOptions, + type: string +): Promise { const params = { index: INDEX_PATTERN_BEATS, ignoreUnavailable: true, @@ -232,7 +338,7 @@ async function fetchBeatsByType( { bool: { must_not: { term: { [`${type}.beat.type`]: 'apm-server' } }, - must: { term: { type: type } }, + must: { term: { type } }, }, }, ], @@ -244,8 +350,8 @@ async function fetchBeatsByType( }, }; - const results = await callCluster('search', params); - const hitsLength = get(results, 'hits.hits.length', 0); + const results = await callCluster>('search', params); + const hitsLength = results?.hits?.hits.length || 0; if (hitsLength > 0) { // further augment the clusters object with more stats processResults(results, options); @@ -265,20 +371,40 @@ async function fetchBeatsByType( return Promise.resolve(); } -export async function fetchBeatsStats(...args) { - return fetchBeatsByType(...args, 'beats_stats'); +export async function fetchBeatsStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'], + options: { page?: number } & BeatsProcessOptions +) { + return fetchBeatsByType(server, callCluster, clusterUuids, start, end, options, 'beats_stats'); } -export async function fetchBeatsStates(...args) { - return fetchBeatsByType(...args, 'beats_state'); +export async function fetchBeatsStates( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'], + options: { page?: number } & BeatsProcessOptions +) { + return fetchBeatsByType(server, callCluster, clusterUuids, start, end, options, 'beats_state'); } /* * Call the function for fetching and summarizing beats stats * @return {Object} - Beats stats in an object keyed by the cluster UUIDs */ -export async function getBeatsStats(server, callCluster, clusterUuids, start, end) { - const options = { +export async function getBeatsStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'] +) { + const options: BeatsProcessOptions = { clusters: {}, // the result object to be built up clusterHostSets: {}, // passed to processResults for tracking state in the results generation clusterInputSets: {}, // passed to processResults for tracking state in the results generation diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_cluster_uuids.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_cluster_uuids.test.ts similarity index 77% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_cluster_uuids.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_cluster_uuids.test.ts index 1f62c677dbb21..4f952b9dec6da 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_cluster_uuids.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_cluster_uuids.test.ts @@ -4,13 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import sinon from 'sinon'; import { getClusterUuids, fetchClusterUuids, handleClusterUuidsResponse, -} from '../get_cluster_uuids'; +} from './get_cluster_uuids'; describe('get_cluster_uuids', () => { const callCluster = sinon.stub(); @@ -35,20 +34,24 @@ describe('get_cluster_uuids', () => { const expectedUuids = response.aggregations.cluster_uuids.buckets .map(bucket => bucket.key) .map(expectedUuid => ({ clusterUuid: expectedUuid })); - const start = new Date(); - const end = new Date(); + const start = new Date().toISOString(); + const end = new Date().toISOString(); describe('getClusterUuids', () => { it('returns cluster UUIDs', async () => { callCluster.withArgs('search').returns(Promise.resolve(response)); - expect(await getClusterUuids({ server, callCluster, start, end })).to.eql(expectedUuids); + expect( + await getClusterUuids({ server, callCluster, start, end, usageCollection: {} as any }) + ).toStrictEqual(expectedUuids); }); }); describe('fetchClusterUuids', () => { it('searches for clusters', async () => { callCluster.returns(Promise.resolve(response)); - expect(await fetchClusterUuids({ server, callCluster, start, end })).to.be(response); + expect( + await fetchClusterUuids({ server, callCluster, start, end, usageCollection: {} as any }) + ).toStrictEqual(response); }); }); @@ -56,12 +59,12 @@ describe('get_cluster_uuids', () => { // filterPath makes it easy to ignore anything unexpected because it will come back empty it('handles unexpected response', () => { const clusterUuids = handleClusterUuidsResponse({}); - expect(clusterUuids.length).to.be(0); + expect(clusterUuids.length).toStrictEqual(0); }); it('handles valid response', () => { const clusterUuids = handleClusterUuidsResponse(response); - expect(clusterUuids).to.eql(expectedUuids); + expect(clusterUuids).toStrictEqual(expectedUuids); }); it('handles no buckets response', () => { @@ -73,7 +76,7 @@ describe('get_cluster_uuids', () => { }, }); - expect(clusterUuids.length).to.be(0); + expect(clusterUuids.length).toStrictEqual(0); }); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_es_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.test.ts similarity index 82% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_es_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.test.ts index 536e831640fad..70ed2240b47d4 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_es_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.test.ts @@ -4,13 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import sinon from 'sinon'; import { fetchElasticsearchStats, getElasticsearchStats, handleElasticsearchStats, -} from '../get_es_stats'; +} from './get_es_stats'; describe('get_es_stats', () => { const callWith = sinon.stub(); @@ -41,7 +40,9 @@ describe('get_es_stats', () => { it('returns clusters', async () => { callWith.withArgs('search').returns(Promise.resolve(response)); - expect(await getElasticsearchStats(server, callWith, clusterUuids)).to.eql(expectedClusters); + expect(await getElasticsearchStats(server, callWith, clusterUuids)).toStrictEqual( + expectedClusters + ); }); }); @@ -49,28 +50,28 @@ describe('get_es_stats', () => { it('searches for clusters', async () => { callWith.returns(response); - expect(await fetchElasticsearchStats(server, callWith, clusterUuids)).to.be(response); + expect(await fetchElasticsearchStats(server, callWith, clusterUuids)).toStrictEqual(response); }); }); describe('handleElasticsearchStats', () => { // filterPath makes it easy to ignore anything unexpected because it will come back empty it('handles unexpected response', () => { - const clusters = handleElasticsearchStats({}); + const clusters = handleElasticsearchStats({} as any); - expect(clusters.length).to.be(0); + expect(clusters.length).toStrictEqual(0); }); it('handles valid response', () => { - const clusters = handleElasticsearchStats(response); + const clusters = handleElasticsearchStats(response as any); - expect(clusters).to.eql(expectedClusters); + expect(clusters).toStrictEqual(expectedClusters); }); it('handles no hits response', () => { - const clusters = handleElasticsearchStats({ hits: { hits: [] } }); + const clusters = handleElasticsearchStats({ hits: { hits: [] } } as any); - expect(clusters.length).to.be(0); + expect(clusters.length).toStrictEqual(0); }); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.ts similarity index 71% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.ts index 52d34258b5fa4..f0ae1163d3f52 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_es_stats.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { get } from 'lodash'; +import { StatsCollectionConfig } from 'src/legacy/core_plugins/telemetry/server/collection_manager'; +import { SearchResponse } from 'elasticsearch'; import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants'; /** @@ -13,10 +14,14 @@ import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants'; * @param {Object} server The server instance * @param {function} callCluster The callWithRequest or callWithInternalUser handler * @param {Array} clusterUuids The string Cluster UUIDs to fetch details for - * @return {Promise} Array of the Elasticsearch clusters. */ -export function getElasticsearchStats(server, callCluster, clusterUuids) { - return fetchElasticsearchStats(server, callCluster, clusterUuids).then(handleElasticsearchStats); +export async function getElasticsearchStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[] +) { + const response = await fetchElasticsearchStats(server, callCluster, clusterUuids); + return handleElasticsearchStats(response); } /** @@ -25,9 +30,14 @@ export function getElasticsearchStats(server, callCluster, clusterUuids) { * @param {Object} server The server instance * @param {function} callCluster The callWithRequest or callWithInternalUser handler * @param {Array} clusterUuids Cluster UUIDs to limit the request against - * @return {Promise} Response for the aggregations to fetch details for the product. + * + * Returns the response for the aggregations to fetch details for the product. */ -export function fetchElasticsearchStats(server, callCluster, clusterUuids) { +export function fetchElasticsearchStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[] +) { const config = server.config(); const params = { index: INDEX_PATTERN_ELASTICSEARCH, @@ -67,13 +77,16 @@ export function fetchElasticsearchStats(server, callCluster, clusterUuids) { return callCluster('search', params); } +export interface ESClusterStats { + cluster_uuid: string; + type: 'cluster_stats'; +} + /** * Extract the cluster stats for each cluster. - * - * @return {Array} The Elasticsearch clusters. */ -export function handleElasticsearchStats(response) { - const clusters = get(response, 'hits.hits', []); +export function handleElasticsearchStats(response: SearchResponse) { + const clusters = response.hits?.hits || []; return clusters.map(cluster => cluster._source); } diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_high_level_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.test.ts similarity index 91% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_high_level_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.test.ts index 1c1f8dc888d01..76c80e2eb3d37 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_high_level_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.test.ts @@ -4,13 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import sinon from 'sinon'; import { fetchHighLevelStats, getHighLevelStats, handleHighLevelStatsResponse, -} from '../get_high_level_stats'; +} from './get_high_level_stats'; describe('get_high_level_stats', () => { const callWith = sinon.stub(); @@ -244,9 +243,9 @@ describe('get_high_level_stats', () => { it('returns clusters', async () => { callWith.withArgs('search').returns(Promise.resolve(response)); - expect(await getHighLevelStats(server, callWith, clusterUuids, start, end, product)).to.eql( - expectedClusters - ); + expect( + await getHighLevelStats(server, callWith, clusterUuids, start, end, product) + ).toStrictEqual(expectedClusters); }); }); @@ -254,30 +253,30 @@ describe('get_high_level_stats', () => { it('searches for clusters', async () => { callWith.returns(Promise.resolve(response)); - expect(await fetchHighLevelStats(server, callWith, clusterUuids, start, end, product)).to.be( - response - ); + expect( + await fetchHighLevelStats(server, callWith, clusterUuids, start, end, product) + ).toStrictEqual(response); }); }); describe('handleHighLevelStatsResponse', () => { // filterPath makes it easy to ignore anything unexpected because it will come back empty it('handles unexpected response', () => { - const clusters = handleHighLevelStatsResponse({}, product); + const clusters = handleHighLevelStatsResponse({} as any, product); - expect(clusters).to.eql({}); + expect(clusters).toStrictEqual({}); }); it('handles valid response', () => { - const clusters = handleHighLevelStatsResponse(response, product); + const clusters = handleHighLevelStatsResponse(response as any, product); - expect(clusters).to.eql(expectedClusters); + expect(clusters).toStrictEqual(expectedClusters); }); it('handles no hits response', () => { - const clusters = handleHighLevelStatsResponse({ hits: { hits: [] } }, product); + const clusters = handleHighLevelStatsResponse({ hits: { hits: [] } } as any, product); - expect(clusters).to.eql({}); + expect(clusters).toStrictEqual({}); }); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.ts similarity index 66% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.ts index b87f632308e4d..f67f80940d9f4 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_high_level_stats.ts @@ -5,6 +5,8 @@ */ import { get } from 'lodash'; +import { StatsCollectionConfig } from 'src/legacy/core_plugins/telemetry/server/collection_manager'; +import { SearchResponse } from 'elasticsearch'; import { createQuery } from './create_query'; import { INDEX_PATTERN_KIBANA, @@ -17,13 +19,40 @@ import { TELEMETRY_QUERY_SOURCE, } from '../../common/constants'; +export interface ClusterCloudStats { + name: string; + count: number; + vms: number; + regions: Array<{ region: string; count: number }>; + vm_types: Array<{ vm_type: string; count: number }>; + zones: Array<{ zone: string; count: number }>; +} + +export interface ClusterHighLevelStats { + count: number; + versions: Array<{ version: string; count: number }>; + os: { + platforms: Array<{ platform: string; count: number }>; + platformReleases: Array<{ platformRelease: string; count: number }>; + distros: Array<{ distro: string; count: number }>; + distroReleases: Array<{ distroRelease: string; count: number }>; + }; + cloud: ClusterCloudStats[] | undefined; +} + +export interface ClustersHighLevelStats { + [clusterUuid: string]: ClusterHighLevelStats; +} + +type Counter = Map; + /** * Update a counter associated with the {@code key}. * * @param {Map} map Map to update the counter for the {@code key}. * @param {String} key The key to increment a counter for. */ -function incrementByKey(map, key) { +function incrementByKey(map: Counter, key?: string) { if (!key) { return; } @@ -37,13 +66,29 @@ function incrementByKey(map, key) { map.set(key, count + 1); } +interface InternalCloudMap { + count: number; + unique: Set; + vm_type: Counter; + region: Counter; + zone: Counter; +} + +interface CloudEntry { + id: string; + name: string; + vm_type: string; + region: string; + zone: string; +} + /** * Help to reduce Cloud metrics into unidentifiable metrics (e.g., count IDs so that they can be dropped). * * @param {Map} clouds Existing cloud data by cloud name. * @param {Object} cloud Cloud object loaded from Elasticsearch data. */ -function reduceCloudForCluster(cloudMap, cloud) { +function reduceCloudForCluster(cloudMap: Map, cloud?: CloudEntry) { if (!cloud) { return; } @@ -74,22 +119,48 @@ function reduceCloudForCluster(cloudMap, cloud) { incrementByKey(cloudByName.zone, cloud.zone); } +interface InternalClusterMap { + count: number; + versions: Counter; + cloudMap: Map; + os: { + platforms: Counter; + platformReleases: Counter; + distros: Counter; + distroReleases: Counter; + }; +} + +interface OSData { + platform?: string; + platformRelease?: string; + distro?: string; + distroRelease?: string; +} + /** * Group the instances (hits) by clusters. * * @param {Array} instances Array of hits from the request containing the cluster UUID and version. * @param {String} product The product to limit too ('kibana', 'logstash', 'beats') - * @return {Map} A map of the Cluster UUID to an {@link Object} containing the {@code count} and {@code versions} {@link Map} + * + * Returns a map of the Cluster UUID to an {@link Object} containing the {@code count} and {@code versions} {@link Map} */ -function groupInstancesByCluster(instances, product) { - const clusterMap = new Map(); +function groupInstancesByCluster( + instances: Array<{ _source: T }>, + product: string +) { + const clusterMap = new Map(); // hits are sorted arbitrarily by product UUID instances.map(instance => { - const clusterUuid = get(instance, '_source.cluster_uuid'); - const version = get(instance, `_source.${product}_stats.${product}.version`); - const cloud = get(instance, `_source.${product}_stats.cloud`); - const os = get(instance, `_source.${product}_stats.os`); + const clusterUuid = instance._source.cluster_uuid; + const version: string | undefined = get( + instance, + `_source.${product}_stats.${product}.version` + ); + const cloud: CloudEntry | undefined = get(instance, `_source.${product}_stats.cloud`); + const os: OSData | undefined = get(instance, `_source.${product}_stats.os`); if (clusterUuid) { let cluster = clusterMap.get(clusterUuid); @@ -134,16 +205,12 @@ function groupInstancesByCluster(instances, product) { * { [keyName]: key1, count: value1 }, * { [keyName]: key2, count: value2 } * ] - * - * @param {Map} map [description] - * @param {String} keyName [description] - * @return {Array} [description] */ -function mapToList(map, keyName) { - const list = []; +function mapToList(map: Map, keyName: string): T[] { + const list: T[] = []; for (const [key, count] of map) { - list.push({ [keyName]: key, count }); + list.push(({ [keyName]: key, count } as unknown) as T); } return list; @@ -154,7 +221,7 @@ function mapToList(map, keyName) { * * @param {*} product The product id, which should be in the constants file */ -function getIndexPatternForStackProduct(product) { +function getIndexPatternForStackProduct(product: string) { switch (product) { case KIBANA_SYSTEM_ID: return INDEX_PATTERN_KIBANA; @@ -176,23 +243,41 @@ function getIndexPatternForStackProduct(product) { * @param {Date} start Start time to limit the stats * @param {Date} end End time to limit the stats * @param {String} product The product to limit too ('kibana', 'logstash', 'beats') - * @return {Promise} Object keyed by the cluster UUIDs to make grouping easier. + * + * Returns an object keyed by the cluster UUIDs to make grouping easier. */ -export function getHighLevelStats(server, callCluster, clusterUuids, start, end, product) { - return fetchHighLevelStats( +export async function getHighLevelStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'], + product: string +) { + const response = await fetchHighLevelStats( server, callCluster, clusterUuids, start, end, product - ).then(response => handleHighLevelStatsResponse(response, product)); + ); + return handleHighLevelStatsResponse(response, product); } -export async function fetchHighLevelStats(server, callCluster, clusterUuids, start, end, product) { +export async function fetchHighLevelStats< + T extends { cluster_uuid?: string } = { cluster_uuid?: string } +>( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'] | undefined, + end: StatsCollectionConfig['end'] | undefined, + product: string +): Promise> { const config = server.config(); const isKibanaIndex = product === KIBANA_SYSTEM_ID; - const filters = [{ terms: { cluster_uuid: clusterUuids } }]; + const filters: object[] = [{ terms: { cluster_uuid: clusterUuids } }]; // we should supply this from a parameter in the future so that this remains generic if (isKibanaIndex) { @@ -257,13 +342,17 @@ export async function fetchHighLevelStats(server, callCluster, clusterUuids, sta * * @param {Object} response The response from the aggregation * @param {String} product The product to limit too ('kibana', 'logstash', 'beats') - * @return {Object} Object keyed by the cluster UUIDs to make grouping easier. + * + * Returns an object keyed by the cluster UUIDs to make grouping easier. */ -export function handleHighLevelStatsResponse(response, product) { - const instances = get(response, 'hits.hits', []); +export function handleHighLevelStatsResponse( + response: SearchResponse<{ cluster_uuid?: string }>, + product: string +) { + const instances = response.hits?.hits || []; const clusterMap = groupInstancesByCluster(instances, product); - const clusters = {}; + const clusters: ClustersHighLevelStats = {}; for (const [clusterUuid, cluster] of clusterMap) { // it's unlikely this will be an array of more than one, but it is one just incase @@ -271,14 +360,15 @@ export function handleHighLevelStatsResponse(response, product) { // remap the clouds (most likely singular or empty) for (const [name, cloud] of cluster.cloudMap) { - clouds.push({ + const cloudStats: ClusterCloudStats = { name, count: cloud.count, vms: cloud.unique.size, regions: mapToList(cloud.region, 'region'), vm_types: mapToList(cloud.vm_type, 'vm_type'), zones: mapToList(cloud.zone, 'zone'), - }); + }; + clouds.push(cloudStats); } // map stats for product by cluster so that it can be joined with ES cluster stats diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_kibana_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.test.ts similarity index 79% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_kibana_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.test.ts index 98e0afa28fba3..0092e848c827b 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/__tests__/get_kibana_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.test.ts @@ -4,19 +4,25 @@ * you may not use this file except in compliance with the Elastic License. */ -import { getUsageStats, combineStats, rollUpTotals, ensureTimeSpan } from '../get_kibana_stats'; -import expect from '@kbn/expect'; +import { + getUsageStats, + combineStats, + rollUpTotals, + ensureTimeSpan, + KibanaUsageStats, +} from './get_kibana_stats'; +import { SearchResponse } from 'elasticsearch'; describe('Get Kibana Stats', () => { describe('Make a map of usage stats for each cluster', () => { - it('passes through if there are no kibana instances', () => { - const rawStats = {}; - expect(getUsageStats(rawStats)).to.eql({}); + test('passes through if there are no kibana instances', () => { + const rawStats = {} as SearchResponse; + expect(getUsageStats(rawStats)).toStrictEqual({}); }); describe('with single cluster', () => { describe('single index', () => { - it('for a single unused instance', () => { + test('for a single unused instance', () => { const rawStats = { hits: { hits: [ @@ -39,7 +45,7 @@ describe('Get Kibana Stats', () => { }, ], }, - }; + } as any; const expected = { clusterone: { dashboard: { total: 0 }, @@ -53,10 +59,10 @@ describe('Get Kibana Stats', () => { }, }; - expect(getUsageStats(rawStats)).to.eql(expected); + expect(getUsageStats(rawStats)).toStrictEqual(expected); }); - it('for a single instance of active usage', () => { + test('for a single instance of active usage', () => { const rawStats = { hits: { hits: [ @@ -79,7 +85,7 @@ describe('Get Kibana Stats', () => { }, ], }, - }; + } as any; const expected = { clusterone: { dashboard: { total: 1 }, @@ -92,11 +98,49 @@ describe('Get Kibana Stats', () => { plugins: {}, }, }; + expect(getUsageStats(rawStats)).toStrictEqual(expected); + }); - expect(getUsageStats(rawStats)).to.eql(expected); + test('it merges the plugin stats and kibana', () => { + const rawStats = { + hits: { + hits: [ + { + _source: { + cluster_uuid: 'clusterone', + kibana_stats: { + kibana: { version: '7.0.0-alpha1-test02' }, + usage: { + dashboard: { total: 1 }, + visualization: { total: 3 }, + search: { total: 1 }, + index_pattern: { total: 1 }, + graph_workspace: { total: 1 }, + timelion_sheet: { total: 1 }, + index: '.kibana-test-01', + }, + }, + }, + }, + ], + }, + } as any; + const expected = { + clusterone: { + dashboard: { total: 1 }, + visualization: { total: 3 }, + search: { total: 1 }, + index_pattern: { total: 1 }, + graph_workspace: { total: 1 }, + timelion_sheet: { total: 1 }, + indices: 1, + plugins: {}, + }, + }; + expect(getUsageStats(rawStats)).toStrictEqual(expected); }); - it('flattens x-pack stats', () => { + test('flattens x-pack stats', () => { const rawStats = { hits: { hits: [ @@ -126,8 +170,9 @@ describe('Get Kibana Stats', () => { }, ], }, - }; - expect(getUsageStats(rawStats)).to.eql({ + } as any; + + expect(getUsageStats(rawStats)).toStrictEqual({ clusterone: { dashboard: { total: 1 }, visualization: { total: 3 }, @@ -143,7 +188,7 @@ describe('Get Kibana Stats', () => { }); describe('separate indices', () => { - it('with one unused instance', () => { + test('with one unused instance', () => { const rawStats = { hits: { hits: [ @@ -200,7 +245,7 @@ describe('Get Kibana Stats', () => { }, ], }, - }; + } as any; const expected = { clusterone: { dashboard: { total: 1 }, @@ -213,11 +258,10 @@ describe('Get Kibana Stats', () => { plugins: {}, }, }; - - expect(getUsageStats(rawStats)).to.eql(expected); + expect(getUsageStats(rawStats)).toStrictEqual(expected); }); - it('with all actively used instances', () => { + test('with all actively used instances', () => { const rawStats = { hits: { hits: [ @@ -274,7 +318,7 @@ describe('Get Kibana Stats', () => { }, ], }, - }; + } as any; const expected = { clusterone: { dashboard: { total: 4 }, @@ -287,15 +331,14 @@ describe('Get Kibana Stats', () => { plugins: {}, }, }; - - expect(getUsageStats(rawStats)).to.eql(expected); + expect(getUsageStats(rawStats)).toStrictEqual(expected); }); }); }); describe('with multiple clusters', () => { describe('separate indices', () => { - it('with all actively used instances', () => { + test('with all actively used instances', () => { const rawStats = { hits: { hits: [ @@ -369,7 +412,7 @@ describe('Get Kibana Stats', () => { }, ], }, - }; + } as any; const expected = { clusterone: { dashboard: { total: 4 }, @@ -392,29 +435,28 @@ describe('Get Kibana Stats', () => { plugins: {}, }, }; - - expect(getUsageStats(rawStats)).to.eql(expected); + expect(getUsageStats(rawStats)).toStrictEqual(expected); }); }); }); }); describe('Combines usage stats with high-level stats', () => { - it('passes through if there are no kibana instances', () => { + test('passes through if there are no kibana instances', () => { const highLevelStats = {}; const usageStats = {}; - expect(combineStats(highLevelStats, usageStats)).to.eql({}); + expect(combineStats(highLevelStats, usageStats)).toStrictEqual({}); }); describe('adds usage stats to high-level stats', () => { - it('for a single cluster', () => { + test('for a single cluster', () => { const highLevelStats = { clusterone: { count: 2, versions: [{ count: 2, version: '7.0.0-alpha1-test12' }], }, - }; + } as any; const usageStats = { clusterone: { dashboard: { total: 1 }, @@ -428,7 +470,7 @@ describe('Get Kibana Stats', () => { }, }; - expect(combineStats(highLevelStats, usageStats)).to.eql({ + expect(combineStats(highLevelStats, usageStats)).toStrictEqual({ clusterone: { count: 2, dashboard: { total: 1 }, @@ -444,7 +486,7 @@ describe('Get Kibana Stats', () => { }); }); - it('for multiple single clusters', () => { + test('for multiple single clusters', () => { const highLevelStats = { clusterone: { count: 2, @@ -454,7 +496,7 @@ describe('Get Kibana Stats', () => { count: 1, versions: [{ count: 1, version: '7.0.0-alpha1-test14' }], }, - }; + } as any; const usageStats = { clusterone: { dashboard: { total: 1 }, @@ -478,7 +520,7 @@ describe('Get Kibana Stats', () => { }, }; - expect(combineStats(highLevelStats, usageStats)).to.eql({ + expect(combineStats(highLevelStats, usageStats)).toStrictEqual({ clusterone: { count: 2, dashboard: { total: 1 }, @@ -508,16 +550,16 @@ describe('Get Kibana Stats', () => { }); describe('if usage stats are empty', () => { - it('returns just high-level stats', () => { + test('returns just high-level stats', () => { const highLevelStats = { clusterone: { count: 2, versions: [{ count: 2, version: '7.0.0-alpha1-test12' }], }, - }; + } as any; const usageStats = undefined; - expect(combineStats(highLevelStats, usageStats)).to.eql({ + expect(combineStats(highLevelStats, usageStats)).toStrictEqual({ clusterone: { count: 2, versions: [{ count: 2, version: '7.0.0-alpha1-test12' }], @@ -528,64 +570,64 @@ describe('Get Kibana Stats', () => { }); describe('Rolls up stats when there are multiple Kibana indices for a cluster', () => { - it('by combining the `total` fields where previous was 0', () => { - const rollUp = { my_field: { total: 0 } }; + test('by combining the `total` fields where previous was 0', () => { + const rollUp = { my_field: { total: 0 } } as any; const addOn = { my_field: { total: 1 } }; - expect(rollUpTotals(rollUp, addOn, 'my_field')).to.eql({ total: 1 }); + expect(rollUpTotals(rollUp, addOn, 'my_field' as any)).toStrictEqual({ total: 1 }); }); - it('by combining the `total` fields with > 1 for previous and addOn', () => { - const rollUp = { my_field: { total: 1 } }; + test('by combining the `total` fields with > 1 for previous and addOn', () => { + const rollUp = { my_field: { total: 1 } } as any; const addOn = { my_field: { total: 3 } }; - expect(rollUpTotals(rollUp, addOn, 'my_field')).to.eql({ total: 4 }); + expect(rollUpTotals(rollUp, addOn, 'my_field' as any)).toStrictEqual({ total: 4 }); }); }); describe('Ensure minimum time difference', () => { - it('should return start and end as is when none are provided', () => { + test('should return start and end as is when none are provided', () => { const { start, end } = ensureTimeSpan(undefined, undefined); - expect(start).to.be.undefined; - expect(end).to.be.undefined; + expect(start).toBe(undefined); + expect(end).toBe(undefined); }); - it('should return start and end as is when only end is provided', () => { + test('should return start and end as is when only end is provided', () => { const initialEnd = '2020-01-01T00:00:00Z'; const { start, end } = ensureTimeSpan(undefined, initialEnd); - expect(start).to.be.undefined; - expect(end).to.be.equal(initialEnd); + expect(start).toBe(undefined); + expect(end).toEqual(initialEnd); }); - it('should return start and end as is because they are already 24h away', () => { + test('should return start and end as is because they are already 24h away', () => { const initialStart = '2019-12-31T00:00:00Z'; const initialEnd = '2020-01-01T00:00:00Z'; const { start, end } = ensureTimeSpan(initialStart, initialEnd); - expect(start).to.be.equal(initialStart); - expect(end).to.be.equal(initialEnd); + expect(start).toEqual(initialStart); + expect(end).toEqual(initialEnd); }); - it('should return start and end as is because they are already 24h+ away', () => { + test('should return start and end as is because they are already 24h+ away', () => { const initialStart = '2019-12-31T00:00:00Z'; const initialEnd = '2020-01-01T01:00:00Z'; const { start, end } = ensureTimeSpan(initialStart, initialEnd); - expect(start).to.be.equal(initialStart); - expect(end).to.be.equal(initialEnd); + expect(start).toEqual(initialStart); + expect(end).toEqual(initialEnd); }); - it('should modify start to a date 24h before end', () => { + test('should modify start to a date 24h before end', () => { const initialStart = '2020-01-01T00:00:00.000Z'; const initialEnd = '2020-01-01T01:00:00.000Z'; const { start, end } = ensureTimeSpan(initialStart, initialEnd); - expect(start).to.be.equal('2019-12-31T01:00:00.000Z'); - expect(end).to.be.equal(initialEnd); + expect(start).toEqual('2019-12-31T01:00:00.000Z'); + expect(end).toEqual(initialEnd); }); - it('should modify start to a date 24h before now', () => { + test('should modify start to a date 24h before now', () => { const initialStart = new Date().toISOString(); const { start, end } = ensureTimeSpan(initialStart, undefined); - expect(start).to.not.be.equal(initialStart); - expect(end).to.be.undefined; + expect(start).not.toBe(initialStart); + expect(end).toBe(undefined); }); }); }); diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.js b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.ts similarity index 58% rename from x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.js rename to x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.ts index 1e22507c5baf4..e2ad64ce04c6b 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.js +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/get_kibana_stats.ts @@ -5,30 +5,78 @@ */ import moment from 'moment'; -import { get, isEmpty, omit } from 'lodash'; +import { isEmpty } from 'lodash'; +import { StatsCollectionConfig } from 'src/legacy/core_plugins/telemetry/server/collection_manager'; +import { SearchResponse } from 'elasticsearch'; import { KIBANA_SYSTEM_ID, TELEMETRY_COLLECTION_INTERVAL } from '../../common/constants'; -import { fetchHighLevelStats, handleHighLevelStatsResponse } from './get_high_level_stats'; +import { + fetchHighLevelStats, + handleHighLevelStatsResponse, + ClustersHighLevelStats, + ClusterHighLevelStats, +} from './get_high_level_stats'; -export function rollUpTotals(rolledUp, addOn, field) { - const rolledUpTotal = get(rolledUp, [field, 'total'], 0); - const addOnTotal = get(addOn, [field, 'total'], 0); +export function rollUpTotals( + rolledUp: ClusterUsageStats, + addOn: { [key: string]: { total?: number } | undefined }, + field: Exclude +) { + const rolledUpTotal = rolledUp[field]?.total || 0; + const addOnTotal = addOn[field]?.total || 0; return { total: rolledUpTotal + addOnTotal }; } -export function rollUpIndices(rolledUp) { +export function rollUpIndices(rolledUp: ClusterUsageStats) { return rolledUp.indices + 1; } +export interface KibanaUsageStats { + cluster_uuid: string; + kibana_stats?: { + usage?: { + index?: string; + } & { + [plugin: string]: { + total: number; + }; + }; + }; +} + +export interface ClusterUsageStats { + dashboard?: { total: number }; + visualization?: { total: number }; + search?: { total: number }; + index_pattern?: { total: number }; + graph_workspace?: { total: number }; + timelion_sheet?: { total: number }; + indices: number; + plugins?: { + xpack?: unknown; + [plugin: string]: unknown; + }; +} + +export interface ClustersUsageStats { + [clusterUuid: string]: ClusterUsageStats | undefined; +} + +export interface KibanaClusterStat extends Partial, ClusterHighLevelStats {} + +export interface KibanaStats { + [clusterUuid: string]: KibanaClusterStat; +} + /* * @param {Object} rawStats */ -export function getUsageStats(rawStats) { +export function getUsageStats(rawStats: SearchResponse) { const clusterIndexCache = new Set(); - const rawStatsHits = get(rawStats, 'hits.hits', []); + const rawStatsHits = rawStats.hits?.hits || []; // get usage stats per cluster / .kibana index return rawStatsHits.reduce((accum, currInstance) => { - const clusterUuid = get(currInstance, '_source.cluster_uuid'); - const currUsage = get(currInstance, '_source.kibana_stats.usage', {}); + const clusterUuid = currInstance._source.cluster_uuid; + const currUsage = currInstance._source.kibana_stats?.usage || {}; const clusterIndexCombination = clusterUuid + currUsage.index; // return early if usage data is empty or if this cluster/index has already been processed @@ -39,7 +87,7 @@ export function getUsageStats(rawStats) { // Get the stats that were read from any number of different .kibana indices in the cluster, // roll them up into cluster-wide totals - const rolledUpStats = get(accum, clusterUuid, { indices: 0 }); + const rolledUpStats = accum[clusterUuid] || { indices: 0 }; const stats = { dashboard: rollUpTotals(rolledUpStats, currUsage, 'dashboard'), visualization: rollUpTotals(rolledUpStats, currUsage, 'visualization'), @@ -51,21 +99,22 @@ export function getUsageStats(rawStats) { }; // Get the stats provided by telemetry collectors. - const pluginsNested = omit(currUsage, [ - 'index', - 'dashboard', - 'visualization', - 'search', - 'index_pattern', - 'graph_workspace', - 'timelion_sheet', - ]); + const { + index, + dashboard, + visualization, + search, + index_pattern, + graph_workspace, + timelion_sheet, + xpack, + ...pluginsTop + } = currUsage; // Stats filtered by telemetry collectors need to be flattened since they're pulled in a generic way. // A plugin might not provide flat stats if it implements formatForBulkUpload in its collector. // e.g: we want `xpack.reporting` to just be `reporting` - const top = omit(pluginsNested, 'xpack'); - const plugins = { ...top, ...pluginsNested.xpack }; + const plugins = { ...pluginsTop, ...xpack }; return { ...accum, @@ -74,10 +123,13 @@ export function getUsageStats(rawStats) { plugins, }, }; - }, {}); + }, {} as ClustersUsageStats); } -export function combineStats(highLevelStats, usageStats = {}) { +export function combineStats( + highLevelStats: ClustersHighLevelStats, + usageStats: ClustersUsageStats = {} +) { return Object.keys(highLevelStats).reduce((accum, currClusterUuid) => { return { ...accum, @@ -86,7 +138,7 @@ export function combineStats(highLevelStats, usageStats = {}) { ...usageStats[currClusterUuid], }, }; - }, {}); + }, {} as KibanaStats); } /** @@ -96,7 +148,10 @@ export function combineStats(highLevelStats, usageStats = {}) { * @param {date} [start] The start time from which to get the telemetry data * @param {date} [end] The end time from which to get the telemetry data */ -export function ensureTimeSpan(start, end) { +export function ensureTimeSpan( + start?: StatsCollectionConfig['start'], + end?: StatsCollectionConfig['end'] +) { // We only care if we have a start date, because that's the limit that might make us lose the document if (start) { const duration = moment.duration(TELEMETRY_COLLECTION_INTERVAL, 'milliseconds'); @@ -117,9 +172,15 @@ export function ensureTimeSpan(start, end) { * Monkey-patch the modules from get_high_level_stats and add in the * specialized usage data that comes with kibana stats (kibana_stats.usage). */ -export async function getKibanaStats(server, callCluster, clusterUuids, start, end) { +export async function getKibanaStats( + server: StatsCollectionConfig['server'], + callCluster: StatsCollectionConfig['callCluster'], + clusterUuids: string[], + start: StatsCollectionConfig['start'], + end: StatsCollectionConfig['end'] +) { const { start: safeStart, end: safeEnd } = ensureTimeSpan(start, end); - const rawStats = await fetchHighLevelStats( + const rawStats = await fetchHighLevelStats( server, callCluster, clusterUuids, diff --git a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/register_monitoring_collection.ts b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/register_monitoring_collection.ts index 49a925d1dad0b..f0fda5229cb5c 100644 --- a/x-pack/legacy/plugins/monitoring/server/telemetry_collection/register_monitoring_collection.ts +++ b/x-pack/legacy/plugins/monitoring/server/telemetry_collection/register_monitoring_collection.ts @@ -5,7 +5,6 @@ */ import { telemetryCollectionManager } from '../../../../../../src/legacy/core_plugins/telemetry/server'; -// @ts-ignore import { getAllStats } from './get_all_stats'; import { getClusterUuids } from './get_cluster_uuids';