Skip to content

Commit

Permalink
[Metrics UI] Replace date_histogram with date_range aggregation in th…
Browse files Browse the repository at this point in the history
…reshold alert (#100004) (#100350)

* [Metrics UI] Replace date_histogram with date_range aggregation in threshold alert

* Remove console.log

* Fix rate aggregation and offset
  • Loading branch information
Zacqary authored May 19, 2021
1 parent 005e433 commit 7189a07
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ interface Aggregation {
buckets: Array<{
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
doc_count: number;
to_as_string: string;
key_as_string: string;
}>;
};
Expand Down Expand Up @@ -60,6 +61,7 @@ export const evaluateAlert = <Params extends EvaluatedAlertParams = EvaluatedAle
filterQuery,
timeframe
);

const { threshold, warningThreshold, comparator, warningComparator } = criterion;
const pointsEvaluator = (points: any[] | typeof NaN | null, t?: number[], c?: Comparator) => {
if (!t || !c) return [false];
Expand Down Expand Up @@ -179,18 +181,21 @@ const getValuesFromAggregations = (
const { buckets } = aggregations.aggregatedIntervals;
if (!buckets.length) return null; // No Data state
if (aggType === Aggregators.COUNT) {
return buckets.map((bucket) => ({ key: bucket.key_as_string, value: bucket.doc_count }));
return buckets.map((bucket) => ({
key: bucket.to_as_string,
value: bucket.doc_count,
}));
}
if (aggType === Aggregators.P95 || aggType === Aggregators.P99) {
return buckets.map((bucket) => {
const values = bucket.aggregatedValue?.values || [];
const firstValue = first(values);
if (!firstValue) return null;
return { key: bucket.key_as_string, value: firstValue.value };
return { key: bucket.to_as_string, value: firstValue.value };
});
}
return buckets.map((bucket) => ({
key: bucket.key_as_string,
key: bucket.key_as_string ?? bucket.to_as_string,
value: bucket.aggregatedValue?.value ?? null,
}));
} catch (e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,20 @@ export const getElasticsearchMetricQuery = (
}
const interval = `${timeSize}${timeUnit}`;
const intervalAsSeconds = getIntervalInSeconds(interval);
const intervalAsMS = intervalAsSeconds * 1000;

const to = roundTimestamp(timeframe ? timeframe.end : Date.now(), timeUnit);
// We need enough data for 5 buckets worth of data. We also need
// to convert the intervalAsSeconds to milliseconds.
const minimumFrom = to - intervalAsSeconds * 1000 * MINIMUM_BUCKETS;
const minimumFrom = to - intervalAsMS * MINIMUM_BUCKETS;

const from = roundTimestamp(
timeframe && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
timeUnit
);

const offset = calculateDateHistogramOffset({ from, to, interval, field: timefield });
const offsetInMS = parseInt(offset, 10) * 1000;

const aggregations =
aggType === Aggregators.COUNT
Expand All @@ -65,20 +67,34 @@ export const getElasticsearchMetricQuery = (
},
};

const baseAggs = {
aggregatedIntervals: {
date_histogram: {
field: timefield,
fixed_interval: interval,
offset,
extended_bounds: {
min: from,
max: to,
},
},
aggregations,
},
};
const baseAggs =
aggType === Aggregators.RATE
? {
aggregatedIntervals: {
date_histogram: {
field: timefield,
fixed_interval: interval,
offset,
extended_bounds: {
min: from,
max: to,
},
},
aggregations,
},
}
: {
aggregatedIntervals: {
date_range: {
field: timefield,
ranges: Array.from(Array(Math.floor((to - from) / intervalAsMS)), (_, i) => ({
from: from + intervalAsMS * i + offsetInMS,
to: from + intervalAsMS * (i + 1) + offsetInMS,
})),
},
aggregations,
},
};

const aggs = groupBy
? {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const bucketsA = [
{
doc_count: 3,
aggregatedValue: { value: 1.0, values: [{ key: 95.0, value: 1.0 }] },
key_as_string: new Date(1577858400000).toISOString(),
to_as_string: new Date(1577858400000).toISOString(),
},
];

Expand Down

0 comments on commit 7189a07

Please sign in to comment.