Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Metrics UI] Add integration tests for Metric Threshold Rule and refactor to fire correctly #109971

Merged
merged 9 commits into from
Sep 2, 2021
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
* 2.0.
*/

import { mapValues, first, last, isNaN } from 'lodash';
import { mapValues, first, last, isNaN, isNumber, isObject, has } from 'lodash';
import moment from 'moment';
import { ElasticsearchClient } from 'kibana/server';
import {
Expand All @@ -23,7 +23,11 @@ import { UNGROUPED_FACTORY_KEY } from '../../common/utils';
import { MetricExpressionParams, Comparator, Aggregators } from '../types';
import { getElasticsearchMetricQuery } from './metric_query';

interface Aggregation {
interface AggregationWithoutIntervals {
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
}

interface AggregationWithIntervals {
aggregatedIntervals: {
buckets: Array<{
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
Expand All @@ -35,6 +39,14 @@ interface Aggregation {
};
}

type Aggregation = AggregationWithIntervals | AggregationWithoutIntervals;

function isAggregationWithIntervals(
subject: Aggregation | undefined
): subject is AggregationWithIntervals {
return isObject(subject) && has(subject, 'aggregatedIntervals');
}

interface CompositeAggregationsResponse {
groupings: {
buckets: Aggregation[];
Expand All @@ -52,7 +64,7 @@ export const evaluateAlert = <Params extends EvaluatedAlertParams = EvaluatedAle
esClient: ElasticsearchClient,
params: Params,
config: InfraSource['configuration'],
timeframe?: { start: number; end: number }
timeframe?: { start?: number; end: number }
) => {
const { criteria, groupBy, filterQuery, shouldDropPartialBuckets } = params;
return Promise.all(
Expand Down Expand Up @@ -105,7 +117,7 @@ const getMetric: (
timefield: string,
groupBy: string | undefined | string[],
filterQuery: string | undefined,
timeframe?: { start: number; end: number },
timeframe?: { start?: number; end: number },
shouldDropPartialBuckets?: boolean
) => Promise<Record<string, number[]>> = async function (
esClient,
Expand All @@ -124,18 +136,15 @@ const getMetric: (
const intervalAsSeconds = getIntervalInSeconds(interval);
const intervalAsMS = intervalAsSeconds * 1000;

const to = moment(timeframe ? timeframe.end : Date.now())
.add(1, timeUnit)
.startOf(timeUnit)
.valueOf();
const to = moment(timeframe ? timeframe.end : Date.now()).valueOf();

// Rate aggregations need 5 buckets worth of data
const minimumBuckets = aggType === Aggregators.RATE ? 5 : 1;

const minimumFrom = to - intervalAsMS * minimumBuckets;

const from = roundTimestamp(
timeframe && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
timeframe && timeframe.start && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
timeUnit
);

Expand Down Expand Up @@ -172,16 +181,26 @@ const getMetric: (
searchBody,
bucketSelector,
afterKeyHandler
)) as Array<Aggregation & { key: Record<string, string> }>;
return compositeBuckets.reduce(
)) as Array<Aggregation & { key: Record<string, string>; doc_count: number }>;
const groupedResults = compositeBuckets.reduce(
(result, bucket) => ({
...result,
[Object.values(bucket.key)
.map((value) => value)
.join(', ')]: getValuesFromAggregations(bucket, aggType, dropPartialBucketsOptions),
.join(', ')]: getValuesFromAggregations(
bucket,
aggType,
dropPartialBucketsOptions,
{
start: from,
end: to,
},
bucket.doc_count
),
}),
{}
);
return groupedResults;
}
const { body: result } = await esClient.search({
body: searchBody,
Expand All @@ -192,7 +211,9 @@ const getMetric: (
[UNGROUPED_FACTORY_KEY]: getValuesFromAggregations(
(result.aggregations! as unknown) as Aggregation,
aggType,
dropPartialBucketsOptions
dropPartialBucketsOptions,
{ start: from, end: to },
isNumber(result.hits.total) ? result.hits.total : result.hits.total.value
),
};
} catch (e) {
Expand Down Expand Up @@ -221,7 +242,7 @@ interface DropPartialBucketOptions {
const dropPartialBuckets = ({ from, to, bucketSizeInMillis }: DropPartialBucketOptions) => (
row: {
key: string;
value: number;
value: number | null;
} | null
) => {
if (row == null) return null;
Expand All @@ -230,20 +251,45 @@ const dropPartialBuckets = ({ from, to, bucketSizeInMillis }: DropPartialBucketO
};

const getValuesFromAggregations = (
aggregations: Aggregation,
aggregations: Aggregation | undefined,
aggType: MetricExpressionParams['aggType'],
dropPartialBucketsOptions: DropPartialBucketOptions | null
dropPartialBucketsOptions: DropPartialBucketOptions | null,
timeFrame: { start: number; end: number },
docCount?: number
) => {
try {
const { buckets } = aggregations.aggregatedIntervals;
let buckets;
if (aggType === Aggregators.COUNT) {
buckets = [
{
doc_count: docCount,
to_as_string: moment(timeFrame.end).toISOString(),
from_as_string: moment(timeFrame.start).toISOString(),
key_as_string: moment(timeFrame.start).toISOString(),
},
];
} else if (isAggregationWithIntervals(aggregations)) {
buckets = aggregations.aggregatedIntervals.buckets;
} else {
buckets = [
{
...aggregations,
doc_count: docCount,
to_as_string: moment(timeFrame.end).toISOString(),
from_as_string: moment(timeFrame.start).toISOString(),
key_as_string: moment(timeFrame.start).toISOString(),
},
];
}

if (!buckets.length) return null; // No Data state

let mappedBuckets;
let mappedBuckets: Array<{ key: string; value: number | null } | null>;

if (aggType === Aggregators.COUNT) {
mappedBuckets = buckets.map((bucket) => ({
key: bucket.from_as_string,
value: bucket.doc_count,
value: bucket.doc_count || null,
}));
} else if (aggType === Aggregators.P95 || aggType === Aggregators.P99) {
mappedBuckets = buckets.map((bucket) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,30 +64,4 @@ describe("The Metric Threshold Alert's getElasticsearchMetricQuery", () => {
);
});
});

describe('when passed a timeframe of 1 hour', () => {
const testTimeframe = {
start: moment().subtract(1, 'hour').valueOf(),
end: moment().valueOf(),
};
const searchBodyWithoutGroupBy = getElasticsearchMetricQuery(
expressionParams,
timefield,
testTimeframe
);
const searchBodyWithGroupBy = getElasticsearchMetricQuery(
expressionParams,
timefield,
testTimeframe,
groupBy
);
test("generates 1 hour's worth of buckets", () => {
// @ts-ignore
expect(searchBodyWithoutGroupBy.aggs.aggregatedIntervals.date_range.ranges.length).toBe(60);
expect(
// @ts-ignore
searchBodyWithGroupBy.aggs.groupings.aggs.aggregatedIntervals.date_range.ranges.length
).toBe(60);
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import { networkTraffic } from '../../../../../common/inventory_models/shared/metrics/snapshot/network_traffic';
import { MetricExpressionParams, Aggregators } from '../types';
import { getIntervalInSeconds } from '../../../../utils/get_interval_in_seconds';
import { createPercentileAggregation } from './create_percentile_aggregation';
import { calculateDateHistogramOffset } from '../../../metrics/lib/calculate_date_histogram_offset';

Expand All @@ -34,13 +33,9 @@ export const getElasticsearchMetricQuery = (
throw new Error('Can only aggregate without a metric if using the document count aggregator');
}
const interval = `${timeSize}${timeUnit}`;
const intervalAsSeconds = getIntervalInSeconds(interval);
const intervalAsMS = intervalAsSeconds * 1000;
const to = timeframe.end;
const from = timeframe.start;

const deliveryDelay = 60 * 1000; // INFO: This allows us to account for any delay ES has in indexing the most recent data.

const aggregations =
aggType === Aggregators.COUNT
? {}
Expand Down Expand Up @@ -72,21 +67,7 @@ export const getElasticsearchMetricQuery = (
aggregations,
},
}
: {
aggregatedIntervals: {
date_range: {
field: timefield,
// Generate an array of buckets, starting at `from` and ending at `to`
// This is usually only necessary for alert previews or rate aggs. Most alert evaluations
// will generate only one bucket from this logic.
ranges: Array.from(Array(Math.floor((to - from) / intervalAsMS)), (_, i) => ({
from: from + intervalAsMS * i - deliveryDelay,
to: from + intervalAsMS * (i + 1) - deliveryDelay,
})),
},
aggregations,
},
};
: aggregations;

const aggs = groupBy
? {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ services.scopedClusterClient.asCurrentUser.search.mockImplementation((params?: a
}
if (metric === 'test.metric.2') {
return elasticsearchClientMock.createSuccessTransportRequestPromise(
mocks.alternateMetricResponse(from)
mocks.alternateMetricResponse()
);
} else if (metric === 'test.metric.3') {
return elasticsearchClientMock.createSuccessTransportRequestPromise(
Expand All @@ -524,9 +524,7 @@ services.scopedClusterClient.asCurrentUser.search.mockImplementation((params?: a
: mocks.emptyMetricResponse
);
}
return elasticsearchClientMock.createSuccessTransportRequestPromise(
mocks.basicMetricResponse(from)
);
return elasticsearchClientMock.createSuccessTransportRequestPromise(mocks.basicMetricResponse());
});
services.savedObjectsClient.get.mockImplementation(async (type: string, sourceId: string) => {
if (sourceId === 'alternate')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,20 +103,26 @@ const bucketsC = (from: number) => [
},
];

export const basicMetricResponse = (from: number) => ({
aggregations: {
aggregatedIntervals: {
buckets: bucketsA(from),
export const basicMetricResponse = () => ({
hits: {
total: {
value: 1,
},
},
aggregations: {
aggregatedValue: { value: 1.0, values: [{ key: 95.0, value: 1.0 }] },
},
});

export const alternateMetricResponse = (from: number) => ({
aggregations: {
aggregatedIntervals: {
buckets: bucketsB(from),
export const alternateMetricResponse = () => ({
hits: {
total: {
value: 1,
},
},
aggregations: {
aggregatedValue: { value: 3, values: [{ key: 99.0, value: 3 }] },
},
});

export const emptyMetricResponse = {
Expand Down
10 changes: 10 additions & 0 deletions x-pack/test/api_integration/apis/metrics_ui/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,14 @@ export const DATES = {
max: 1564083493080,
},
},
'alert-test-data': {
gauge: {
min: 1609459200000, // '2022-01-01T00:00:00Z'
max: 1609462800000, // '2021-01-01T01:00:00Z'
},
rate: {
min: 1609545600000, // '2021-01-02T00:00:00Z'
max: 1609545900000, // '2021-01-02T00:05:00Z'
},
},
};
1 change: 1 addition & 0 deletions x-pack/test/api_integration/apis/metrics_ui/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,6 @@ export default function ({ loadTestFile }) {
loadTestFile(require.resolve('./metrics_explorer'));
loadTestFile(require.resolve('./ip_to_hostname'));
loadTestFile(require.resolve('./http_source'));
loadTestFile(require.resolve('./metric_threshold_alert'));
});
}
Loading