diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 7afe7c856..686bc94b0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -41,6 +41,7 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.QueryLevelTrigger +import org.opensearch.alerting.model.docLevelInput.DocLevelMonitorInput import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteDestinationAction import org.opensearch.alerting.resthandler.RestDeleteEmailAccountAction @@ -210,6 +211,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R return listOf( Monitor.XCONTENT_REGISTRY, SearchInput.XCONTENT_REGISTRY, + DocLevelMonitorInput.XCONTENT_REGISTRY, QueryLevelTrigger.XCONTENT_REGISTRY, BucketLevelTrigger.XCONTENT_REGISTRY, ClusterMetricsInput.XCONTENT_REGISTRY diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index a109cce47..172020f72 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -13,18 +13,24 @@ import kotlinx.coroutines.launch import kotlinx.coroutines.withContext import org.apache.logging.log4j.LogManager import org.opensearch.action.bulk.BackoffPolicy +import org.opensearch.action.index.IndexRequest +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse +import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.alerts.moveAlerts import org.opensearch.alerting.core.JobRunner import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.elasticapi.InjectorContextElement import org.opensearch.alerting.elasticapi.retry +import org.opensearch.alerting.elasticapi.string import org.opensearch.alerting.elasticapi.withClosableContext import org.opensearch.alerting.model.ActionRunResult import org.opensearch.alerting.model.Alert import org.opensearch.alerting.model.AlertingConfigAccessor import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.BucketLevelTriggerRunResult +import org.opensearch.alerting.model.Finding import org.opensearch.alerting.model.InputRunResults import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult @@ -39,6 +45,8 @@ import org.opensearch.alerting.model.action.AlertCategory import org.opensearch.alerting.model.action.PerAlertActionScope import org.opensearch.alerting.model.action.PerExecutionActionScope import org.opensearch.alerting.model.destination.DestinationContextFactory +import org.opensearch.alerting.model.docLevelInput.DocLevelMonitorInput +import org.opensearch.alerting.model.docLevelInput.DocLevelQuery import org.opensearch.alerting.script.BucketLevelTriggerExecutionContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.script.TriggerExecutionContext @@ -60,17 +68,32 @@ import org.opensearch.alerting.util.getCombinedTriggerRunResult import org.opensearch.alerting.util.isADMonitor import org.opensearch.alerting.util.isAllowed import org.opensearch.alerting.util.isBucketLevelMonitor +import org.opensearch.alerting.util.isDocLevelMonitor +import org.opensearch.alerting.util.updateMonitor import org.opensearch.client.Client +import org.opensearch.cluster.routing.ShardRouting import org.opensearch.cluster.service.ClusterService import org.opensearch.common.Strings import org.opensearch.common.component.AbstractLifecycleComponent import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentType +import org.opensearch.index.query.BoolQueryBuilder +import org.opensearch.index.query.QueryBuilders +import org.opensearch.rest.RestStatus import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.TemplateScript +import org.opensearch.search.SearchHits +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.search.sort.SortOrder import org.opensearch.threadpool.ThreadPool +import java.io.IOException import java.time.Instant +import java.util.UUID +import kotlin.collections.HashMap import kotlin.coroutines.CoroutineContext object MonitorRunner : JobRunner, CoroutineScope, AbstractLifecycleComponent() { @@ -247,6 +270,8 @@ object MonitorRunner : JobRunner, CoroutineScope, AbstractLifecycleComponent() { launch { if (job.isBucketLevelMonitor()) { runBucketLevelMonitor(job, periodStart, periodEnd) + } else if (job.isDocLevelMonitor()) { + runDocLevelMonitor(job, periodStart, periodEnd) } else { runQueryLevelMonitor(job, periodStart, periodEnd) } @@ -707,4 +732,182 @@ object MonitorRunner : JobRunner, CoroutineScope, AbstractLifecycleComponent() { .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) .execute() } + + private suspend fun runDocLevelMonitor(monitor: Monitor, periodStart: Instant, periodEnd: Instant, dryrun: Boolean = false) { + + logger.info("Document-level-monitor is running ...") + try { + validate(monitor) + } catch (e: Exception) { + logger.info("Failed to start Document-level-monitor. Error: ${e.message}") + return + } + + val docLevelMonitorInput = monitor.inputs[0] as DocLevelMonitorInput + val index = docLevelMonitorInput.indices[0] + val queries: List = docLevelMonitorInput.queries + + var lastRunContext = monitor.lastRunContext.toMutableMap() + try { + if (lastRunContext.isNullOrEmpty()) { + lastRunContext = createRunContext(index).toMutableMap() + } + } catch (e: Exception) { + logger.info("Failed to start Document-level-monitor $index. Error: ${e.message}") + return + } + + for (query in queries) { + runForEachQuery(monitor, lastRunContext, index, query) + } + } + + private suspend fun runForEachQuery(monitor: Monitor, lastRunContext: MutableMap, index: String, query: DocLevelQuery) { + val count: Int = lastRunContext["shards_count"] as Int + for (i: Int in 0 until count) { + val shard = i.toString() + try { + logger.info("Monitor execution for shard: $shard") + + val maxSeqNo: Long = getMaxSeqNo(index, shard) + logger.info("MaxSeqNo of shard_$shard is $maxSeqNo") + + // todo: scope to optimize this: in prev seqno and current max seq no are same don't search. + val hits: SearchHits = searchShard(index, shard, lastRunContext[shard].toString().toLongOrNull(), maxSeqNo, query.query) + logger.info("Search hits for shard_$shard is: ${hits.hits.size}") + + if (hits.hits.isNotEmpty()) { + createFindings(monitor, index, query, hits) + } + + logger.info("Updating monitor: ${monitor.id}") + lastRunContext[shard] = maxSeqNo.toString() + val updatedMonitor = monitor.copy(lastRunContext = lastRunContext) + // note: update has to called in serial for shards of a given index. + updateMonitor(client, xContentRegistry, settings, updatedMonitor) + } catch (e: Exception) { + logger.info("Failed to run for shard $shard. Error: ${e.message}") + logger.debug("Failed to run for shard $shard", e) + } + } + } + + // todo: add more validations. + private fun validate(monitor: Monitor) { + if (monitor.inputs.size > 1) { + throw IOException("Only one input is supported with document-level-monitor.") + } + + if (monitor.inputs[0].name() != DocLevelMonitorInput.DOC_LEVEL_INPUT_FIELD) { + throw IOException("Invalid input with document-level-monitor.") + } + + val docLevelMonitorInput = monitor.inputs[0] as DocLevelMonitorInput + if (docLevelMonitorInput.indices.size > 1) { + throw IOException("Only one index is supported with document-level-monitor.") + } + } + + private fun getShardsCount(index: String): Int { + val allShards: List = clusterService.state().routingTable().allShards(index) + return allShards.size + } + + private fun createRunContext(index: String): HashMap { + val lastRunContext = HashMap() + lastRunContext["index"] = index + val count = getShardsCount(index) + lastRunContext["shards_count"] = count + + for (i: Int in 0 until count) { + val shard = i.toString() + val maxSeqNo: Long = getMaxSeqNo(index, shard) + lastRunContext[shard] = maxSeqNo + } + return lastRunContext + } + + /** + * Get the current max seq number of the shard. We find it by searching the last document + * in the primary shard. + */ + private fun getMaxSeqNo(index: String, shard: String): Long { + val request: SearchRequest = SearchRequest() + .indices(index) + .preference("_shards:$shard") + .source( + SearchSourceBuilder() + .version(true) + .sort("_seq_no", SortOrder.DESC) + .seqNoAndPrimaryTerm(true) + .query(QueryBuilders.matchAllQuery()) + .size(1) + ) + val response: SearchResponse = client.search(request).actionGet() + if (response.status() !== RestStatus.OK) { + throw IOException("Failed to get max seq no for shard: $shard") + } + if (response.hits.hits.isEmpty()) + return -1L + + return response.hits.hits[0].seqNo + } + + private fun searchShard(index: String, shard: String, prevSeqNo: Long?, maxSeqNo: Long, query: String): SearchHits { + val boolQueryBuilder = BoolQueryBuilder() + boolQueryBuilder.filter(QueryBuilders.rangeQuery("_seq_no").gt(prevSeqNo).lte(maxSeqNo)) + boolQueryBuilder.must(QueryBuilders.queryStringQuery(query)) + + val request: SearchRequest = SearchRequest() + .indices(index) + .preference("_shards:$shard") + .source( + SearchSourceBuilder() + .version(true) + .query(boolQueryBuilder) + .size(10000) // fixme: make this configurable. + ) + logger.info("Request: $request") + val response: SearchResponse = client.search(request).actionGet() + if (response.status() !== RestStatus.OK) { + throw IOException("Failed to search shard: $shard") + } + return response.hits + } + + private fun createFindings(monitor: Monitor, index: String, docLevelQuery: DocLevelQuery, hits: SearchHits) { + val finding = Finding( + id = UUID.randomUUID().toString(), + relatedDocId = getAllDocIds(hits), + monitorId = monitor.id, + monitorName = monitor.name, + index = index, + queryId = docLevelQuery.id, + queryTags = docLevelQuery.tags, + severity = docLevelQuery.severity, + timestamp = Instant.now(), + triggerId = null, // todo: add once integrated with actions/triggers + triggerName = null // todo: add once integrated with actions/triggers + ) + + val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() + // change this to debug. + logger.info("Findings: $findingStr") + + // todo: below is all hardcoded, temp code and added only to test. replace this with proper Findings index lifecycle management. + val indexRequest = IndexRequest(".opensearch-alerting-findings") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(findingStr, XContentType.JSON) + + client.index(indexRequest).actionGet() + } + + private fun getAllDocIds(hits: SearchHits): String { + var sb = StringBuilder() + for (hit in hits) { + sb.append(hit.id) + sb.append(",") + } + return sb.substring(0, sb.length - 1) + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt index e10b1a41c..ccd8689a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt @@ -21,37 +21,39 @@ import java.time.Instant */ class Finding( val id: String = NO_ID, - val logEvent: Map, + val relatedDocId: String, val monitorId: String, val monitorName: String, + val index: String, val queryId: String = NO_ID, val queryTags: List, val severity: String, val timestamp: Instant, - val triggerId: String, - val triggerName: String + val triggerId: String?, + val triggerName: String? ) : Writeable, ToXContent { - @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), - logEvent = suppressWarning(sin.readMap()), + relatedDocId = sin.readString(), monitorId = sin.readString(), monitorName = sin.readString(), + index = sin.readString(), queryId = sin.readString(), queryTags = sin.readStringList(), severity = sin.readString(), timestamp = sin.readInstant(), - triggerId = sin.readString(), - triggerName = sin.readString() + triggerId = sin.readOptionalString(), + triggerName = sin.readOptionalString() ) fun asTemplateArg(): Map { return mapOf( FINDING_ID_FIELD to id, - LOG_EVENT_FIELD to logEvent, + RELATED_DOC_ID_FIELD to relatedDocId, MONITOR_ID_FIELD to monitorId, MONITOR_NAME_FIELD to monitorName, + INDEX_FIELD to index, QUERY_ID_FIELD to queryId, QUERY_TAGS_FIELD to queryTags, SEVERITY_FIELD to severity, @@ -64,9 +66,10 @@ class Finding( override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { builder.startObject() .field(FINDING_ID_FIELD, id) - .field(LOG_EVENT_FIELD, logEvent) + .field(RELATED_DOC_ID_FIELD, relatedDocId) .field(MONITOR_ID_FIELD, monitorId) .field(MONITOR_NAME_FIELD, monitorName) + .field(INDEX_FIELD, index) .field(QUERY_ID_FIELD, queryId) .field(QUERY_TAGS_FIELD, queryTags.toTypedArray()) .field(SEVERITY_FIELD, severity) @@ -80,22 +83,24 @@ class Finding( @Throws(IOException::class) override fun writeTo(out: StreamOutput) { out.writeString(id) - out.writeMap(logEvent) + out.writeString(relatedDocId) out.writeString(monitorId) out.writeString(monitorName) + out.writeString(index) out.writeString(queryId) out.writeStringCollection(queryTags) out.writeString(severity) out.writeInstant(timestamp) - out.writeString(triggerId) - out.writeString(triggerName) + out.writeOptionalString(triggerId) + out.writeOptionalString(triggerName) } companion object { const val FINDING_ID_FIELD = "id" - const val LOG_EVENT_FIELD = "log_event" + const val RELATED_DOC_ID_FIELD = "related_doc_id" const val MONITOR_ID_FIELD = "monitor_id" const val MONITOR_NAME_FIELD = "monitor_name" + const val INDEX_FIELD = "index" const val QUERY_ID_FIELD = "query_id" const val QUERY_TAGS_FIELD = "query_tags" const val SEVERITY_FIELD = "severity" @@ -107,9 +112,10 @@ class Finding( @JvmStatic @JvmOverloads @Throws(IOException::class) fun parse(xcp: XContentParser, id: String = NO_ID): Finding { - var logEvent: Map = mapOf() + lateinit var relatedDocId: String lateinit var monitorId: String lateinit var monitorName: String + lateinit var index: String var queryId: String = NO_ID val queryTags: MutableList = mutableListOf() lateinit var severity: String @@ -123,9 +129,10 @@ class Finding( xcp.nextToken() when (fieldName) { - LOG_EVENT_FIELD -> logEvent = xcp.map() + RELATED_DOC_ID_FIELD -> relatedDocId = xcp.text() MONITOR_ID_FIELD -> monitorId = xcp.text() MONITOR_NAME_FIELD -> monitorName = xcp.text() + INDEX_FIELD -> index = xcp.text() QUERY_ID_FIELD -> queryId = xcp.text() QUERY_TAGS_FIELD -> { ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) @@ -142,9 +149,10 @@ class Finding( return Finding( id = id, - logEvent = logEvent, + relatedDocId = relatedDocId, monitorId = monitorId, monitorName = monitorName, + index = index, queryId = queryId, queryTags = queryTags, severity = severity, @@ -159,10 +167,5 @@ class Finding( fun readFrom(sin: StreamInput): Finding { return Finding(sin) } - - @Suppress("UNCHECKED_CAST") - fun suppressWarning(map: MutableMap?): MutableMap { - return map as MutableMap - } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt index cbeda00bc..3ea37bd11 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt @@ -120,7 +120,8 @@ data class Monitor( enum class MonitorType(val value: String) { QUERY_LEVEL_MONITOR("query_level_monitor"), BUCKET_LEVEL_MONITOR("bucket_level_monitor"), - CLUSTER_METRICS_MONITOR("cluster_metrics_monitor"); + CLUSTER_METRICS_MONITOR("cluster_metrics_monitor"), + DOC_LEVEL_MONITOR("doc_level_monitor"); override fun toString(): String { return value diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/docLevelInput/DocLevelMonitorInput.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/docLevelInput/DocLevelMonitorInput.kt index a1ac19a46..97d75a079 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/docLevelInput/DocLevelMonitorInput.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/docLevelInput/DocLevelMonitorInput.kt @@ -68,7 +68,10 @@ data class DocLevelMonitorInput( const val NO_DESCRIPTION = "" - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry(Input::class.java, ParseField(DOC_LEVEL_INPUT_FIELD), CheckedFunction { parse(it) }) + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Input::class.java, + ParseField(DOC_LEVEL_INPUT_FIELD), CheckedFunction { parse(it) } + ) @JvmStatic @Throws(IOException::class) fun parse(xcp: XContentParser): DocLevelMonitorInput { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt index 50fde1514..3a6f10a54 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt @@ -115,6 +115,7 @@ class TransportIndexMonitorAction @Inject constructor( user: User? ) { val indices = mutableListOf() + // todo: for doc level alerting: check if index is present before monitor is created. val searchInputs = request.monitor.inputs.filter { it.name() == SearchInput.SEARCH_FIELD } searchInputs.forEach { val searchInput = it as SearchInput diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index 12fa8f3ed..8ef96eeee 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -13,7 +13,6 @@ import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.destination.message.BaseMessage import org.opensearch.alerting.elasticapi.suspendUntil import org.opensearch.alerting.model.AggregationResultBucket -import org.opensearch.alerting.model.AlertingConfigAccessor import org.opensearch.alerting.model.BucketLevelTriggerRunResult import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.action.Action @@ -64,6 +63,8 @@ fun BaseMessage.isHostInDenylist(networks: List): Boolean { fun Monitor.isBucketLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.BUCKET_LEVEL_MONITOR +fun Monitor.isDocLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR + /** * Since buckets can have multi-value keys, this converts the bucket key values to a string that can be used * as the key for a HashMap to easily retrieve [AggregationResultBucket] based on the bucket key values. @@ -101,17 +102,17 @@ fun BucketLevelTriggerRunResult.getCombinedTriggerRunResult( // TODO: Check if this can be more generic such that TransportIndexMonitorAction class can use this. Also see if this should be refactored // to another class. Include tests for this as well. suspend fun updateMonitor(client: Client, xContentRegistry: NamedXContentRegistry, settings: Settings, monitor: Monitor): IndexResponse { - val currentMonitor = AlertingConfigAccessor.getMonitorInfo(client, xContentRegistry, monitor.id) + /*val currentMonitor = AlertingConfigAccessor.getMonitorInfo(client, xContentRegistry, monitor.id) var updateMonitor = monitor // If both are enabled, use the current existing monitor enabled time, otherwise the next execution will be // incorrect. if (monitor.enabled && currentMonitor.enabled) - updateMonitor = monitor.copy(enabledTime = currentMonitor.enabledTime) + updateMonitor = monitor.copy(enabledTime = currentMonitor.enabledTime)*/ val indexRequest = IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(updateMonitor.toXContentWithUser(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .source(monitor.toXContentWithUser(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) .id(monitor.id) .timeout(AlertingSettings.INDEX_TIMEOUT.get(settings)) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index 991ec33f3..d06e8fc63 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -330,11 +330,10 @@ fun randomDocLevelMonitorInput( fun randomFinding( id: String = OpenSearchRestTestCase.randomAlphaOfLength(10), - logEvent: Map = mapOf( - OpenSearchRestTestCase.randomAlphaOfLength(5) to OpenSearchRestTestCase.randomAlphaOfLength(5) - ), + relatedDocId: String = OpenSearchRestTestCase.randomAlphaOfLength(10), monitorId: String = OpenSearchRestTestCase.randomAlphaOfLength(10), monitorName: String = OpenSearchRestTestCase.randomAlphaOfLength(10), + index: String = OpenSearchRestTestCase.randomAlphaOfLength(10), queryId: String = OpenSearchRestTestCase.randomAlphaOfLength(10), queryTags: MutableList = mutableListOf(), severity: String = "${randomInt(5)}", @@ -344,9 +343,10 @@ fun randomFinding( ): Finding { return Finding( id = id, - logEvent = logEvent, + relatedDocId = relatedDocId, monitorId = monitorId, monitorName = monitorName, + index = index, queryId = queryId, queryTags = queryTags, severity = severity, diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt index 7f85b1895..4311dee19 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt @@ -5,11 +5,15 @@ package org.opensearch.alerting.model +import org.opensearch.alerting.elasticapi.string import org.opensearch.alerting.model.action.Action import org.opensearch.alerting.model.docLevelInput.DocLevelMonitorInput import org.opensearch.alerting.model.docLevelInput.DocLevelQuery import org.opensearch.alerting.randomDocLevelMonitorInput import org.opensearch.alerting.randomDocLevelQuery +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentType import org.opensearch.test.OpenSearchTestCase class DocLevelMonitorInputTests : OpenSearchTestCase() { @@ -34,15 +38,34 @@ class DocLevelMonitorInputTests : OpenSearchTestCase() { // GIVEN val input = randomDocLevelMonitorInput() + // test + val inputString = input.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() + // assertEquals("test", inputString) + // test end // WHEN val templateArgs = input.asTemplateArg() // THEN - assertEquals("Template args 'description' field does not match:", templateArgs[DocLevelMonitorInput.DESCRIPTION_FIELD], input.description) - assertEquals("Template args 'indices' field does not match:", templateArgs[DocLevelMonitorInput.INDICES_FIELD], input.indices) - assertEquals("Template args 'queries' field does not contain the expected number of queries:", input.queries.size, (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).size) + assertEquals( + "Template args 'description' field does not match:", + templateArgs[DocLevelMonitorInput.DESCRIPTION_FIELD], + input.description + ) + assertEquals( + "Template args 'indices' field does not match:", + templateArgs[DocLevelMonitorInput.INDICES_FIELD], + input.indices + ) + assertEquals( + "Template args 'queries' field does not contain the expected number of queries:", + input.queries.size, + (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).size + ) input.queries.forEach { - assertTrue("Template args 'queries' field does not match:", (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).contains(it.asTemplateArg())) + assertTrue( + "Template args 'queries' field does not match:", + (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).contains(it.asTemplateArg()) + ) } } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt index 4e0cde443..1ff468af1 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt @@ -18,14 +18,26 @@ class FindingTests : OpenSearchTestCase() { // THEN assertEquals("Template args 'id' field does not match:", templateArgs[Finding.FINDING_ID_FIELD], finding.id) - assertEquals("Template args 'logEvent' field does not match:", templateArgs[Finding.LOG_EVENT_FIELD], finding.logEvent) + assertEquals("Template args 'logEvent' field does not match:", templateArgs[Finding.RELATED_DOC_ID_FIELD], finding.relatedDocId) assertEquals("Template args 'monitorId' field does not match:", templateArgs[Finding.MONITOR_ID_FIELD], finding.monitorId) - assertEquals("Template args 'monitorName' field does not match:", templateArgs[Finding.MONITOR_NAME_FIELD], finding.monitorName) + assertEquals( + "Template args 'monitorName' field does not match:", + templateArgs[Finding.MONITOR_NAME_FIELD], + finding.monitorName + ) assertEquals("Template args 'queryId' field does not match:", templateArgs[Finding.QUERY_ID_FIELD], finding.queryId) assertEquals("Template args 'queryTags' field does not match:", templateArgs[Finding.QUERY_TAGS_FIELD], finding.queryTags) assertEquals("Template args 'severity' field does not match:", templateArgs[Finding.SEVERITY_FIELD], finding.severity) - assertEquals("Template args 'timestamp' field does not match:", templateArgs[Finding.TIMESTAMP_FIELD], finding.timestamp.toEpochMilli()) + assertEquals( + "Template args 'timestamp' field does not match:", + templateArgs[Finding.TIMESTAMP_FIELD], + finding.timestamp.toEpochMilli() + ) assertEquals("Template args 'triggerId' field does not match:", templateArgs[Finding.TRIGGER_ID_FIELD], finding.triggerId) - assertEquals("Template args 'triggerName' field does not match:", templateArgs[Finding.TRIGGER_NAME_FIELD], finding.triggerName) + assertEquals( + "Template args 'triggerName' field does not match:", + templateArgs[Finding.TRIGGER_NAME_FIELD], + finding.triggerName + ) } }