Skip to content

Commit

Permalink
Merge branch 'main' into others_apt
Browse files Browse the repository at this point in the history
  • Loading branch information
sbcd90 authored Feb 20, 2023
2 parents 3e8c70a + a705168 commit e66b116
Show file tree
Hide file tree
Showing 47 changed files with 926 additions and 351 deletions.
19 changes: 19 additions & 0 deletions .github/workflows/add-untriaged.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
name: Apply 'untriaged' label during issue lifecycle

on:
issues:
types: [opened, reopened, transferred]

jobs:
apply-label:
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v6
with:
script: |
github.rest.issues.addLabels({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
labels: ['untriaged']
})
7 changes: 7 additions & 0 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
@amsiglan
@awshurneyt
@getsaurabh02
@lezzago
@praveensameneni
@sbcd90
@eirsep
14 changes: 9 additions & 5 deletions MAINTAINERS.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,12 @@ This document contains a list of maintainers in this repo. See [opensearch-proje

## Current Maintainers

| Maintainer | GitHub ID | Affiliation |
| ---------------------- | ----------------------------------------------- | ----------- |
| Saurabh Singh | [getsaurabh02](https://github.com/getsaurabh02) | Amazon |
| Subhobrata Dey | [sbcd90](https://github.com/sbcd90) | Amazon |
| Surya Sashank Nistalai | [eirsep](https://github.com/eirsep) | Amazon |
| Maintainer | GitHub ID | Affiliation |
| ---------------- | ----------------------------------------------------- | ----------- |
| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon |
| Subhobrata Dey | [sbcd90](https://github.com/sbcd90) | Amazon |
| Thomas Hurney | [awshurneyt](https://github.com/AWSHurneyt) | Amazon |
| Surya Sashank Nistala | [eirsep](https://github.com/eirsep) | Amazon |
| Praveen Sameneni | [praveensameneni](https://github.com/praveensameneni) | Amazon |
| Amardeepsingh Siglani | [amsiglan](https://github.com/amsiglan) | Amazon |
| Saurabh Singh | [getsaurabh02](https://github.com/getsaurabh02) | Amazon |
Original file line number Diff line number Diff line change
Expand Up @@ -99,19 +99,47 @@ public void onResponse(AcknowledgedResponse acknowledgedResponse) {
if (acknowledgedResponse.isAcknowledged() == false) {
log.warn("Upserting component template not ack'd!");
}
boolean updateConflictingTemplate = false;
// Find template which matches input index best
String templateName =
MetadataIndexTemplateService.findV2Template(
state.metadata(),
normalizeIndexName(indexName),
false
);
// If we find conflicting templates(regardless of priority) and that template was created by us,
// we will silently update index_pattern of that template.
// Otherwise, we will fail since we don't want to change index_pattern of user created index template
Map<String, List<String>> conflictingTemplates =
MetadataIndexTemplateService.findConflictingV2Templates(
state,
computeIndexTemplateName(indexName),
List.of(computeIndexPattern(indexName))
);

// If there is 1 conflict with our own template, we will update that template's index_pattern field
if (conflictingTemplates.size() == 1) {
String conflictingTemplateName = conflictingTemplates.keySet().iterator().next();
if (conflictingTemplateName.startsWith(OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX)) {
templateName = conflictingTemplateName;
updateConflictingTemplate = true;
}
}

if (templateName == null && conflictingTemplates.size() > 0) {
String errorMessage = "Found conflicting templates: [" +
String.join(", ", conflictingTemplates.keySet()) + "]";
log.error(errorMessage);
actionListener.onFailure(SecurityAnalyticsException.wrap(new IllegalStateException(errorMessage)));
return;
}

String componentName = computeComponentTemplateName(indexName);

ComposableIndexTemplate template;
if (templateName == null) {
template = new ComposableIndexTemplate(
List.of(indexName.endsWith("*") == false ? indexName + "*": indexName),
List.of(computeIndexPattern(indexName)),
null,
List.of(componentName),
null,
Expand All @@ -123,10 +151,18 @@ public void onResponse(AcknowledgedResponse acknowledgedResponse) {
template = state.metadata().templatesV2().get(templateName);
// Check if we need to append our component to composedOf list
if (template.composedOf().contains(componentName) == false) {
List<String> newComposedOf = new ArrayList<>(template.composedOf());
newComposedOf.add(componentName);
List<String> newComposedOf;
List<String> indexPatterns;
if (updateConflictingTemplate) {
newComposedOf = new ArrayList<>(template.composedOf());
newComposedOf.add(componentName);
indexPatterns = List.of(computeIndexPattern(indexName));
} else {
newComposedOf = List.of(componentName);
indexPatterns = template.indexPatterns();
}
template = new ComposableIndexTemplate(
template.indexPatterns(),
indexPatterns,
template.template(),
newComposedOf,
template.priority(),
Expand Down Expand Up @@ -155,6 +191,10 @@ public void onFailure(Exception e) {

}

private String computeIndexPattern(String indexName) {
return indexName.endsWith("*") == false ? indexName + "*" : indexName;
}

private void upsertIndexTemplate(
IndicesAdminClient indicesClient,
boolean create,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,16 @@

package org.opensearch.securityanalytics.mapper;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand All @@ -21,21 +28,12 @@
import org.opensearch.action.support.GroupedActionListener;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.IndicesAdminClient;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.metadata.MappingMetadata;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.collect.ImmutableOpenMap;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.RestStatus;
import org.opensearch.securityanalytics.action.GetIndexMappingsResponse;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.opensearch.securityanalytics.action.GetMappingsViewResponse;
import org.opensearch.securityanalytics.model.CreateMappingResult;
import org.opensearch.securityanalytics.util.IndexUtils;
Expand Down Expand Up @@ -248,7 +246,7 @@ public void getMappingAction(String indexName, ActionListener<GetIndexMappingsRe
resolveConcreteIndex(indexName, new ActionListener<>() {
@Override
public void onResponse(String concreteIndex) {
doGetMappingAction(concreteIndex, actionListener);
doGetMappingAction(indexName, concreteIndex, actionListener);
}

@Override
Expand All @@ -263,17 +261,16 @@ public void onFailure(Exception e) {
}
}

public void doGetMappingAction(String indexName, ActionListener<GetIndexMappingsResponse> actionListener) {
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName);
public void doGetMappingAction(String indexName, String concreteIndexName, ActionListener<GetIndexMappingsResponse> actionListener) {
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(concreteIndexName);
indicesClient.getMappings(getMappingsRequest, new ActionListener<>() {
@Override
public void onResponse(GetMappingsResponse getMappingsResponse) {
try {
// Extract indexName and MappingMetadata
String indexName = getMappingsResponse.mappings().iterator().next().key;
// Extract MappingMetadata
MappingMetadata mappingMetadata = getMappingsResponse.mappings().iterator().next().value;
// List of all found applied aliases on index
List<String> appliedAliases = new ArrayList<>();
Set<String> appliedAliases = new HashSet<>();
// Get list of alias -> path pairs from index mappings
List<Pair<String, String>> indexAliasPathPairs = MapperUtils.getAllAliasPathPairs(mappingMetadata);

Expand All @@ -293,10 +290,6 @@ public void onResponse(GetMappingsResponse getMappingsResponse) {
}
}
}
// If we found all aliases we can stop searching further
if (indexAliasPathPairs.size() == appliedAliases.size()) {
break;
}
}

if (appliedAliases.size() == 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ private boolean shouldSkipNode(Map<String, Object> properties) {
return false;
}

public Map<String, Object> traverseAndCopyWithFilter(List<String> nodePathsToCopy) {
public Map<String, Object> traverseAndCopyWithFilter(Set<String> nodePathsToCopy) {

Map<String, Object> outRoot = new LinkedHashMap<>(Map.of(PROPERTIES, new LinkedHashMap()));
this.addListener(new MappingsTraverserListener() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,11 @@
import org.opensearch.securityanalytics.rules.exceptions.SigmaLevelError;
import org.opensearch.securityanalytics.rules.exceptions.SigmaLogsourceError;
import org.opensearch.securityanalytics.rules.exceptions.SigmaStatusError;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.LoaderOptions;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.SafeConstructor;
import org.yaml.snakeyaml.representer.Representer;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
Expand Down Expand Up @@ -168,7 +171,10 @@ protected static SigmaRule fromDict(Map<String, Object> rule, boolean collectErr
}

public static SigmaRule fromYaml(String rule, boolean collectErrors) throws SigmaError {
Yaml yaml = new Yaml(new SafeConstructor());
LoaderOptions loaderOptions = new LoaderOptions();
loaderOptions.setNestingDepthLimit(10);

Yaml yaml = new Yaml(new SafeConstructor(), new Representer(), new DumperOptions(), loaderOptions);
Map<String, Object> ruleMap = yaml.load(rule);
return fromDict(ruleMap, collectErrors);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,6 @@
*/
package org.opensearch.securityanalytics.transport;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand All @@ -41,6 +29,8 @@
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
import org.opensearch.client.node.NodeClient;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.metadata.MappingMetadata;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
Expand All @@ -59,6 +49,7 @@
import org.opensearch.commons.alerting.action.IndexMonitorRequest;
import org.opensearch.commons.alerting.action.IndexMonitorResponse;
import org.opensearch.commons.alerting.model.BucketLevelTrigger;
import org.opensearch.commons.alerting.model.CronSchedule;
import org.opensearch.commons.alerting.model.DataSources;
import org.opensearch.commons.alerting.model.DocLevelMonitorInput;
import org.opensearch.commons.alerting.model.DocLevelQuery;
Expand All @@ -69,8 +60,10 @@
import org.opensearch.commons.alerting.model.action.Action;
import org.opensearch.commons.authuser.User;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.index.query.BoolQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.index.query.RangeQueryBuilder;
import org.opensearch.index.reindex.BulkByScrollResponse;
import org.opensearch.index.seqno.SequenceNumbers;
import org.opensearch.rest.RestRequest;
Expand All @@ -80,11 +73,15 @@
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchHits;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.securityanalytics.action.GetIndexMappingsAction;
import org.opensearch.securityanalytics.action.GetIndexMappingsRequest;
import org.opensearch.securityanalytics.action.GetIndexMappingsResponse;
import org.opensearch.securityanalytics.action.IndexDetectorAction;
import org.opensearch.securityanalytics.action.IndexDetectorRequest;
import org.opensearch.securityanalytics.action.IndexDetectorResponse;
import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig;
import org.opensearch.securityanalytics.mapper.MapperService;
import org.opensearch.securityanalytics.mapper.MapperUtils;
import org.opensearch.securityanalytics.model.Detector;
import org.opensearch.securityanalytics.model.DetectorInput;
import org.opensearch.securityanalytics.model.DetectorRule;
Expand All @@ -105,10 +102,24 @@
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportService;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

public class TransportIndexDetectorAction extends HandledTransportAction<IndexDetectorRequest, IndexDetectorResponse> implements SecureTransportAction {

public static final String PLUGIN_OWNER_FIELD = "security_analytics";
private static final Logger log = LogManager.getLogger(TransportIndexDetectorAction.class);
public static final String TIMESTAMP_FIELD_ALIAS = "timestamp";

private final Client client;

Expand All @@ -133,6 +144,8 @@ public class TransportIndexDetectorAction extends HandledTransportAction<IndexDe

private final NamedWriteableRegistry namedWriteableRegistry;

private final IndexNameExpressionResolver indexNameExpressionResolver;

private volatile TimeValue indexTimeout;
@Inject
public TransportIndexDetectorAction(TransportService transportService,
Expand All @@ -145,7 +158,8 @@ public TransportIndexDetectorAction(TransportService transportService,
MapperService mapperService,
ClusterService clusterService,
Settings settings,
NamedWriteableRegistry namedWriteableRegistry) {
NamedWriteableRegistry namedWriteableRegistry,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(IndexDetectorAction.NAME, transportService, actionFilters, IndexDetectorRequest::new);
this.client = client;
this.xContentRegistry = xContentRegistry;
Expand All @@ -156,6 +170,7 @@ public TransportIndexDetectorAction(TransportService transportService,
this.clusterService = clusterService;
this.settings = settings;
this.namedWriteableRegistry = namedWriteableRegistry;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.threadPool = this.detectorIndices.getThreadPool();
this.indexTimeout = SecurityAnalyticsSettings.INDEX_TIMEOUT.get(this.settings);
this.filterByEnabled = SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES.get(this.settings);
Expand Down Expand Up @@ -477,6 +492,39 @@ private IndexMonitorRequest createBucketLevelMonitorRequest(
// Build query string filter
.query(QueryBuilders.queryStringQuery(rule.getQueries().get(0).getValue()))
.aggregation(aggregationQueries.getAggBuilder());
String concreteIndex = IndexUtils.getNewIndexByCreationDate( // index variable in method signature can also be an index pattern
clusterService.state(),
indexNameExpressionResolver,
index
);
try {
GetIndexMappingsResponse getIndexMappingsResponse = client.execute(
GetIndexMappingsAction.INSTANCE,
new GetIndexMappingsRequest(concreteIndex))
.actionGet();
MappingMetadata mappingMetadata = getIndexMappingsResponse.mappings().get(concreteIndex);
List<Pair<String, String>> pairs = MapperUtils.getAllAliasPathPairs(mappingMetadata);
boolean timeStampAliasPresent = pairs.
stream()
.anyMatch(p ->
TIMESTAMP_FIELD_ALIAS.equals(p.getLeft()) || TIMESTAMP_FIELD_ALIAS.equals(p.getRight()));
if(timeStampAliasPresent) {
BoolQueryBuilder boolQueryBuilder = searchSourceBuilder.query() == null
? new BoolQueryBuilder()
: QueryBuilders.boolQuery().must(searchSourceBuilder.query());
RangeQueryBuilder timeRangeFilter = QueryBuilders.rangeQuery(TIMESTAMP_FIELD_ALIAS)
.gt("{{period_end}}||-1h")
.lte("{{period_end}}")
.format("epoch_millis");
boolQueryBuilder.must(timeRangeFilter);
searchSourceBuilder.query(boolQueryBuilder);
}
} catch (Exception e) {
log.error(
String.format(Locale.getDefault(),
"Unable to verify presence of timestamp alias for index [%s] in detector [%s]. Not setting time range filter for bucket level monitor.",
concreteIndex, detector.getName()), e);
}

List<SearchInput> bucketLevelMonitorInputs = new ArrayList<>();
bucketLevelMonitorInputs.add(new SearchInput(Arrays.asList(index), searchSourceBuilder));
Expand Down
Loading

0 comments on commit e66b116

Please sign in to comment.