Skip to content

Commit

Permalink
Add tests for search top anomaly results API (#325)
Browse files Browse the repository at this point in the history
Signed-off-by: Tyler Ohlsen <[email protected]>
  • Loading branch information
ohltyler authored and ylwu-amzn committed Jan 12, 2022
1 parent 042a36a commit 323dea1
Show file tree
Hide file tree
Showing 13 changed files with 1,481 additions and 25 deletions.
6 changes: 1 addition & 5 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -522,11 +522,7 @@ List<String> jacocoExclusions = [
// TODO: fix unstable code coverage caused by null NodeClient issue
// https://github.com/opensearch-project/anomaly-detection/issues/241
'org.opensearch.ad.task.ADBatchTaskRunner',

// TODO: add tests for multi-category API
'org.opensearch.ad.transport.SearchTopAnomalyResult*',
'org.opensearch.ad.rest.RestSearchTopAnomalyResultAction',
'org.opensearch.ad.model.AnomalyResultBucket',

//TODO: custom result index caused coverage drop
'org.opensearch.ad.indices.AnomalyDetectionIndices',
'org.opensearch.ad.transport.handler.AnomalyResultBulkIndexHandler'
Expand Down
32 changes: 32 additions & 0 deletions src/main/java/org/opensearch/ad/model/AnomalyResultBucket.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
import java.io.IOException;
import java.util.Map;

import org.apache.commons.lang.builder.ToStringBuilder;
import org.opensearch.ad.annotation.Generated;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.io.stream.Writeable;
Expand All @@ -22,6 +24,8 @@
import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket;
import org.opensearch.search.aggregations.metrics.InternalMax;

import com.google.common.base.Objects;

/**
* Represents a single bucket when retrieving top anomaly results for HC detectors
*/
Expand Down Expand Up @@ -72,6 +76,34 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(maxAnomalyGrade);
}

@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
AnomalyResultBucket that = (AnomalyResultBucket) o;
return Objects.equal(getKey(), that.getKey())
&& Objects.equal(getDocCount(), that.getDocCount())
&& Objects.equal(getMaxAnomalyGrade(), that.getMaxAnomalyGrade());
}

@Generated
@Override
public int hashCode() {
return Objects.hashCode(getKey(), getDocCount(), getMaxAnomalyGrade());
}

@Generated
@Override
public String toString() {
return new ToStringBuilder(this)
.append("key", key)
.append("docCount", docCount)
.append("maxAnomalyGrade", maxAnomalyGrade)
.toString();
}

public Map<String, Object> getKey() {
return key;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ public class SearchTopAnomalyResultRequest extends ActionRequest {
private Instant endTime;

public SearchTopAnomalyResultRequest(StreamInput in) throws IOException {
super(in);
detectorId = in.readOptionalString();
taskId = in.readOptionalString();
historical = in.readBoolean();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,12 +182,12 @@ public class SearchTopAnomalyResultTransportAction extends
private static final String defaultIndex = ALL_AD_RESULTS_INDEX_PATTERN;
private static final String COUNT_FIELD = "_count";
private static final String BUCKET_SORT_FIELD = "bucket_sort";
private static final String MULTI_BUCKETS_FIELD = "multi_buckets";
public static final String MULTI_BUCKETS_FIELD = "multi_buckets";
private static final Logger logger = LogManager.getLogger(SearchTopAnomalyResultTransportAction.class);
private final Client client;
private Clock clock;

private enum OrderType {
public enum OrderType {
SEVERITY("severity"),
OCCURRENCE("occurrence");

Expand Down
20 changes: 20 additions & 0 deletions src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import java.io.InputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;

import org.apache.http.HttpHeaders;
Expand Down Expand Up @@ -367,6 +368,25 @@ public Response getSearchDetectorMatch(String name) throws IOException {
);
}

public Response searchTopAnomalyResults(String detectorId, boolean historical, String bodyAsJsonString, RestClient client)
throws IOException {
return TestHelpers
.makeRequest(
client,
"POST",
TestHelpers.AD_BASE_DETECTORS_URI
+ "/"
+ detectorId
+ "/"
+ RestHandlerUtils.RESULTS
+ "/"
+ RestHandlerUtils.TOP_ANOMALIES,
Collections.singletonMap("historical", String.valueOf(historical)),
TestHelpers.toHttpEntity(bodyAsJsonString),
new ArrayList<>()
);
}

public Response createUser(String name, String password, ArrayList<String> backendRoles) throws IOException {
JsonArray backendRolesString = new JsonArray();
for (int i = 0; i < backendRoles.size(); i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,4 +238,18 @@ public ADTask startHistoricalAnalysis(Instant startTime, Instant endTime) throws
AnomalyDetectorJobResponse response = client().execute(AnomalyDetectorJobAction.INSTANCE, request).actionGet(10000);
return getADTask(response.getId());
}

public ADTask startHistoricalAnalysis(String detectorId, Instant startTime, Instant endTime) throws IOException {
DetectionDateRange dateRange = new DetectionDateRange(startTime, endTime);
AnomalyDetectorJobRequest request = new AnomalyDetectorJobRequest(
detectorId,
dateRange,
true,
UNASSIGNED_SEQ_NO,
UNASSIGNED_PRIMARY_TERM,
START_JOB
);
AnomalyDetectorJobResponse response = client().execute(AnomalyDetectorJobAction.INSTANCE, request).actionGet(10000);
return getADTask(response.getId());
}
}
126 changes: 108 additions & 18 deletions src/test/java/org/opensearch/ad/TestHelpers.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,7 @@
import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.opensearch.test.OpenSearchTestCase.buildNewFakeTransportAddress;
import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLength;
import static org.opensearch.test.OpenSearchTestCase.randomBoolean;
import static org.opensearch.test.OpenSearchTestCase.randomDouble;
import static org.opensearch.test.OpenSearchTestCase.randomDoubleBetween;
import static org.opensearch.test.OpenSearchTestCase.randomInt;
import static org.opensearch.test.OpenSearchTestCase.randomIntBetween;
import static org.opensearch.test.OpenSearchTestCase.randomLong;
import static org.opensearch.test.OpenSearchTestCase.*;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;

Expand Down Expand Up @@ -66,6 +59,7 @@
import org.opensearch.ad.constant.CommonName;
import org.opensearch.ad.constant.CommonValue;
import org.opensearch.ad.feature.Features;
import org.opensearch.ad.indices.AnomalyDetectionIndices;
import org.opensearch.ad.ml.ThresholdingResult;
import org.opensearch.ad.mock.model.MockSimpleLog;
import org.opensearch.ad.model.ADTask;
Expand All @@ -75,6 +69,7 @@
import org.opensearch.ad.model.AnomalyDetectorExecutionInput;
import org.opensearch.ad.model.AnomalyDetectorJob;
import org.opensearch.ad.model.AnomalyResult;
import org.opensearch.ad.model.AnomalyResultBucket;
import org.opensearch.ad.model.DataByFeatureId;
import org.opensearch.ad.model.DetectionDateRange;
import org.opensearch.ad.model.DetectorInternalState;
Expand Down Expand Up @@ -371,13 +366,37 @@ public static DetectionDateRange randomDetectionDateRange() {

public static AnomalyDetector randomAnomalyDetectorUsingCategoryFields(String detectorId, List<String> categoryFields)
throws IOException {
return randomAnomalyDetectorUsingCategoryFields(
detectorId,
randomAlphaOfLength(5),
ImmutableList.of(randomAlphaOfLength(10).toLowerCase()),
categoryFields
);
}

public static AnomalyDetector randomAnomalyDetectorUsingCategoryFields(
String detectorId,
String timeField,
List<String> indices,
List<String> categoryFields
) throws IOException {
return randomAnomalyDetectorUsingCategoryFields(detectorId, timeField, indices, categoryFields, null);
}

public static AnomalyDetector randomAnomalyDetectorUsingCategoryFields(
String detectorId,
String timeField,
List<String> indices,
List<String> categoryFields,
String resultIndex
) throws IOException {
return new AnomalyDetector(
detectorId,
randomLong(),
randomAlphaOfLength(20),
randomAlphaOfLength(30),
randomAlphaOfLength(5),
ImmutableList.of(randomAlphaOfLength(10).toLowerCase()),
timeField,
indices,
ImmutableList.of(randomFeature(true)),
randomQuery(),
randomIntervalTimeConfiguration(),
Expand All @@ -388,7 +407,7 @@ public static AnomalyDetector randomAnomalyDetectorUsingCategoryFields(String de
Instant.now(),
categoryFields,
randomUser(),
null
resultIndex
);
}

Expand Down Expand Up @@ -463,6 +482,12 @@ public static AnomalyDetector randomAnomalyDetectorWithInterval(TimeConfiguratio
);
}

public static AnomalyResultBucket randomAnomalyResultBucket() {
Map<String, Object> map = new HashMap<>();
map.put(randomAlphaOfLength(5), randomAlphaOfLength(5));
return new AnomalyResultBucket(map, randomInt(), randomDouble());
}

public static class AnomalyDetectorBuilder {
private String detectorId = randomAlphaOfLength(10);
private Long version = randomLong();
Expand Down Expand Up @@ -842,6 +867,31 @@ public static ResultWriteRequest randomResultWriteRequest(String detectorId, dou
}

public static AnomalyResult randomHCADAnomalyDetectResult(double score, double grade, String error) {
return randomHCADAnomalyDetectResult(null, null, score, grade, error, null, null);
}

public static AnomalyResult randomHCADAnomalyDetectResult(
String detectorId,
String taskId,
double score,
double grade,
String error,
Long startTimeEpochMillis,
Long endTimeEpochMillis
) {
return randomHCADAnomalyDetectResult(detectorId, taskId, null, score, grade, error, startTimeEpochMillis, endTimeEpochMillis);
}

public static AnomalyResult randomHCADAnomalyDetectResult(
String detectorId,
String taskId,
Map<String, Object> entityAttrs,
double score,
double grade,
String error,
Long startTimeEpochMillis,
Long endTimeEpochMillis
) {
List<DataByFeatureId> relavantAttribution = new ArrayList<DataByFeatureId>();
relavantAttribution.add(new DataByFeatureId(randomAlphaOfLength(5), randomDoubleBetween(0, 1.0, true)));
relavantAttribution.add(new DataByFeatureId(randomAlphaOfLength(5), randomDoubleBetween(0, 1.0, true)));
Expand All @@ -857,18 +907,20 @@ public static AnomalyResult randomHCADAnomalyDetectResult(double score, double g
expectedValuesList.add(new ExpectedValueList(randomDoubleBetween(0, 1.0, true), expectedValues));

return new AnomalyResult(
randomAlphaOfLength(5),
null,
detectorId == null ? randomAlphaOfLength(5) : detectorId,
taskId,
score,
grade,
randomDouble(),
ImmutableList.of(randomFeatureData(), randomFeatureData()),
Instant.now().truncatedTo(ChronoUnit.SECONDS),
Instant.now().truncatedTo(ChronoUnit.SECONDS),
Instant.now().truncatedTo(ChronoUnit.SECONDS),
Instant.now().truncatedTo(ChronoUnit.SECONDS),
startTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(startTimeEpochMillis),
endTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(endTimeEpochMillis),
startTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(startTimeEpochMillis),
endTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(endTimeEpochMillis),
error,
Entity.createSingleAttributeEntity(randomAlphaOfLength(5), randomAlphaOfLength(5)),
entityAttrs == null
? Entity.createSingleAttributeEntity(randomAlphaOfLength(5), randomAlphaOfLength(5))
: Entity.createEntityByReordering(entityAttrs),
randomUser(),
CommonValue.NO_SCHEMA_VERSION,
null,
Expand Down Expand Up @@ -1001,6 +1053,44 @@ public static void createIndex(RestClient client, String indexName, HttpEntity d
);
}

public static void createIndexWithHCADFields(RestClient client, String indexName, Map<String, String> categoryFieldsAndTypes)
throws IOException {
StringBuilder indexMappings = new StringBuilder();
indexMappings.append("{\"properties\":{");
for (Map.Entry<String, String> entry : categoryFieldsAndTypes.entrySet()) {
indexMappings.append("\"" + entry.getKey() + "\":{\"type\":\"" + entry.getValue() + "\"},");
}
indexMappings.append("\"timestamp\":{\"type\":\"date\"}");
indexMappings.append("}}");
createEmptyIndex(client, indexName);
createIndexMapping(client, indexName, TestHelpers.toHttpEntity(indexMappings.toString()));
}

public static void createEmptyAnomalyResultIndex(RestClient client) throws IOException {
createEmptyIndex(client, CommonName.ANOMALY_RESULT_INDEX_ALIAS);
createIndexMapping(client, CommonName.ANOMALY_RESULT_INDEX_ALIAS, toHttpEntity(AnomalyDetectionIndices.getAnomalyResultMappings()));
}

public static void createEmptyIndex(RestClient client, String indexName) throws IOException {
TestHelpers.makeRequest(client, "PUT", "/" + indexName, ImmutableMap.of(), "", null);
}

public static void createIndexMapping(RestClient client, String indexName, HttpEntity mappings) throws IOException {
TestHelpers.makeRequest(client, "POST", "/" + indexName + "/_mapping", ImmutableMap.of(), mappings, null);
}

public static void ingestDataToIndex(RestClient client, String indexName, HttpEntity data) throws IOException {
TestHelpers
.makeRequest(
client,
"POST",
"/" + indexName + "/_doc/" + randomAlphaOfLength(5) + "?refresh=true",
ImmutableMap.of(),
data,
null
);
}

public static GetResponse createGetResponse(ToXContentObject o, String id, String indexName) throws IOException {
XContentBuilder content = o.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS);
return new GetResponse(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.ad.model;

import static org.opensearch.ad.model.AnomalyResultBucket.*;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import org.opensearch.ad.AbstractADTest;
import org.opensearch.ad.TestHelpers;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentParser;

public class AnomalyResultBucketTests extends AbstractADTest {

public void testSerializeAnomalyResultBucket() throws IOException {
AnomalyResultBucket anomalyResultBucket = TestHelpers.randomAnomalyResultBucket();
BytesStreamOutput output = new BytesStreamOutput();
anomalyResultBucket.writeTo(output);
StreamInput input = output.bytes().streamInput();
AnomalyResultBucket parsedAnomalyResultBucket = new AnomalyResultBucket(input);
assertTrue(parsedAnomalyResultBucket.equals(anomalyResultBucket));
}

@SuppressWarnings("unchecked")
public void testToXContent() throws IOException {
Map<String, Object> key = new HashMap<String, Object>() {
{
put("test-field-1", "test-value-1");
}
};
int docCount = 5;
double maxAnomalyGrade = 0.5;
AnomalyResultBucket testBucket = new AnomalyResultBucket(key, docCount, maxAnomalyGrade);
XContentBuilder builder = XContentFactory.jsonBuilder();
testBucket.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentParser parser = createParser(builder);
Map<String, Object> parsedMap = parser.map();

assertEquals(testBucket.getKey().get("test-field-1"), ((Map<String, Object>) parsedMap.get(KEY_FIELD)).get("test-field-1"));
assertEquals(testBucket.getDocCount(), parsedMap.get(DOC_COUNT_FIELD));
assertEquals(maxAnomalyGrade, (Double) parsedMap.get(MAX_ANOMALY_GRADE_FIELD), 0.000001d);
}
}
Loading

0 comments on commit 323dea1

Please sign in to comment.