Skip to content

Commit

Permalink
Adding back [Time series based workload desc order optimization throu…
Browse files Browse the repository at this point in the history
…gh reverse segment read (opensearch-project#7244)] with fixes (opensearch-project#7967)

* Revert "Revert "Time series based workload desc order optimization through reverse segment read (opensearch-project#7244)" (opensearch-project#7892)"

This reverts commit bb26536.

Signed-off-by: gashutos <[email protected]>

* Enable time series optimization only if it is not IndexSorted index, also ASC order reverse should only consider in @timestamp field

Signed-off-by: gashutos <[email protected]>

* Modifying CHANGELOG

Signed-off-by: gashutos <[email protected]>

* Adding integ test for scroll API where sort by _doc is getting early termination

Signed-off-by: gashutos <[email protected]>

---------

Signed-off-by: gashutos <[email protected]>
Signed-off-by: Rishab Nahata <[email protected]>
  • Loading branch information
gashutos authored and imRishN committed Jun 27, 2023
1 parent cdb6e99 commit 0391cdc
Show file tree
Hide file tree
Showing 12 changed files with 288 additions and 6 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add task cancellation monitoring service ([#7642](https://github.com/opensearch-project/OpenSearch/pull/7642))
- Add TokenManager Interface ([#7452](https://github.com/opensearch-project/OpenSearch/pull/7452))
- Add Remote store as a segment replication source ([#7653](https://github.com/opensearch-project/OpenSearch/pull/7653))
- Add descending order search optimization through reverse segment read. ([#7967](https://github.com/opensearch-project/OpenSearch/pull/7967))


### Dependencies
- Bump `jackson` from 2.15.1 to 2.15.2 ([#7897](https://github.com/opensearch-project/OpenSearch/pull/7897))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
---
"Basic scroll on time series workload for reversed leaf sorter":
- do:
indices.create:
index: test_scroll_time_series
body:
mappings:
properties:
name:
type: keyword
'@timestamp':
type: date

- do:
bulk:
refresh: true
index: test_scroll_time_series
body:
- '{"index": {}}'
- '{"name": "1", "@timestamp": "2010-03-12T01:07:00"}'
- '{"index": {}}'
- '{"name": "2", "@timestamp": "2010-03-12T01:07:01"}'
- '{"index": {}}'
- '{"name": "3", "@timestamp": "2010-03-12T01:07:02"}'
- '{"index": {}}'
- '{"name": "4", "@timestamp": "2010-03-12T01:07:03"}'
- '{"index": {}}'
- '{"name": "5", "@timestamp": "2010-03-12T01:07:04"}'
- '{"index": {}}'
- '{"name": "6", "@timestamp": "2010-03-12T01:07:05"}'
- '{"index": {}}'
- '{"name": "7", "@timestamp": "2010-03-12T01:07:06"}'
- '{"index": {}}'
- '{"name": "8", "@timestamp": "2010-03-12T01:07:07"}'
- '{"index": {}}'
- '{"name": "9", "@timestamp": "2010-03-12T01:07:08"}'
- '{"index": {}}'
- '{"name": "10", "@timestamp": "2010-03-12T01:07:09"}'
- do:
indices.refresh: {}
- do:
bulk:
refresh: true
index: test_scroll_time_series
body:
- '{"index": {}}'
- '{"name": "11", "@timestamp": "2010-03-12T01:07:10"}'
- '{"index": {}}'
- '{"name": "12", "@timestamp": "2010-03-12T01:07:11"}'
- '{"index": {}}'
- '{"name": "13", "@timestamp": "2010-03-12T01:07:12"}'
- '{"index": {}}'
- '{"name": "14", "@timestamp": "2010-03-12T01:07:13"}'
- '{"index": {}}'
- '{"name": "15", "@timestamp": "2010-03-12T01:07:14"}'
- '{"index": {}}'
- '{"name": "16", "@timestamp": "2010-03-12T01:07:15"}'
- '{"index": {}}'
- '{"name": "17", "@timestamp": "2010-03-12T01:07:16"}'
- '{"index": {}}'
- '{"name": "18", "@timestamp": "2010-03-12T01:07:17"}'
- '{"index": {}}'
- '{"name": "19", "@timestamp": "2010-03-12T01:07:18"}'
- '{"index": {}}'
- '{"name": "20", "@timestamp": "2010-03-12T01:07:19"}'
- do:
indices.refresh: { }
- do:
bulk:
refresh: true
index: test_scroll_time_series
body:
- '{"index": {}}'
- '{"name": "21", "@timestamp": "2010-03-12T01:07:20"}'
- '{"index": {}}'
- '{"name": "22", "@timestamp": "2010-03-12T01:07:21"}'
- '{"index": {}}'
- '{"name": "23", "@timestamp": "2010-03-12T01:07:22"}'
- '{"index": {}}'
- '{"name": "24", "@timestamp": "2010-03-12T01:07:23"}'
- '{"index": {}}'
- '{"name": "25", "@timestamp": "2010-03-12T01:07:24"}'
- '{"index": {}}'
- '{"name": "26", "@timestamp": "2010-03-12T01:07:25"}'
- '{"index": {}}'
- '{"name": "27", "@timestamp": "2010-03-12T01:07:26"}'
- '{"index": {}}'
- '{"name": "28", "@timestamp": "2010-03-12T01:07:27"}'
- '{"index": {}}'
- '{"name": "29", "@timestamp": "2010-03-12T01:07:28"}'
- '{"index": {}}'
- '{"name": "30", "@timestamp": "2010-03-12T01:07:29"}'
- do:
indices.refresh: { }

- do:
search:
rest_total_hits_as_int: true
index: test_scroll_time_series
size: 5
scroll: 1m
sort: _doc
body:
query:
match_all: {}

- set: {_scroll_id: scroll_id}
- match: {hits.total: 30 }
- length: {hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m"}

- match: {hits.total: 30 }
- length: {hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m" }

- match: { hits.total: 30 }
- length: { hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m" }

- match: { hits.total: 30 }
- length: { hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m" }

- match: { hits.total: 30 }
- length: { hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m" }

- match: { hits.total: 30 }
- length: { hits.hits: 5 }

- do:
scroll:
rest_total_hits_as_int: true
body: { "scroll_id": "$scroll_id", "scroll": "1m" }

- match: { hits.total: 30 }
- length: { hits.hits: 0 }

- do:
clear_scroll:
scroll_id: $scroll_id
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@

package org.opensearch.cluster.metadata;

import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PointValues;
import org.opensearch.OpenSearchException;
import org.opensearch.cluster.AbstractDiffable;
import org.opensearch.cluster.Diff;
import org.opensearch.core.ParseField;
Expand All @@ -46,6 +50,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
Expand All @@ -59,6 +64,24 @@
public final class DataStream extends AbstractDiffable<DataStream> implements ToXContentObject {

public static final String BACKING_INDEX_PREFIX = ".ds-";
public static final String TIMESERIES_FIELDNAME = "@timestamp";
public static final Comparator<LeafReader> TIMESERIES_LEAF_SORTER = Comparator.comparingLong((LeafReader r) -> {
try {
PointValues points = r.getPointValues(TIMESERIES_FIELDNAME);
if (points != null) {
// could be a multipoint (probably not) but get the maximum time value anyway
byte[] sortValue = points.getMaxPackedValue();
// decode the first dimension because this should not be a multi dimension field
// it's a bug in the date field if it is
return LongPoint.decodeDimension(sortValue, 0);
} else {
// segment does not have a timestamp field, just return the minimum value
return Long.MIN_VALUE;
}
} catch (IOException e) {
throw new OpenSearchException("Not a timeseries Index! Field [{}] not found!", TIMESERIES_FIELDNAME);
}
}).reversed();

private final String name;
private final TimestampField timeStampField;
Expand Down
12 changes: 12 additions & 0 deletions server/src/main/java/org/opensearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -665,6 +665,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
private volatile long mappingTotalFieldsLimit;
private volatile long mappingDepthLimit;
private volatile long mappingFieldNameLengthLimit;
private volatile boolean searchSegmentOrderReversed;

/**
* The maximum number of refresh listeners allows on this shard.
Expand Down Expand Up @@ -905,6 +906,10 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
scopedSettings.addSettingsUpdateConsumer(DEFAULT_SEARCH_PIPELINE, this::setDefaultSearchPipeline);
}

private void setSearchSegmentOrderReversed(boolean reversed) {
this.searchSegmentOrderReversed = reversed;
}

private void setSearchIdleAfter(TimeValue searchIdleAfter) {
this.searchIdleAfter = searchIdleAfter;
}
Expand Down Expand Up @@ -1084,6 +1089,13 @@ public Settings getNodeSettings() {
return nodeSettings;
}

/**
* Returns true if index level setting for leaf reverse order search optimization is enabled
*/
public boolean getSearchSegmentOrderReversed() {
return this.searchSegmentOrderReversed;
}

/**
* Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one
* setting has changed.
Expand Down
19 changes: 19 additions & 0 deletions server/src/main/java/org/opensearch/index/engine/EngineConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.search.QueryCache;
import org.apache.lucene.search.QueryCachingPolicy;
Expand All @@ -59,6 +60,7 @@
import org.opensearch.indices.breaker.CircuitBreakerService;
import org.opensearch.threadpool.ThreadPool;

import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.function.BooleanSupplier;
Expand Down Expand Up @@ -102,6 +104,7 @@ public final class EngineConfig {
private final Supplier<RetentionLeases> retentionLeasesSupplier;
private final boolean isReadOnlyReplica;
private final BooleanSupplier primaryModeSupplier;
private final Comparator<LeafReader> leafSorter;

/**
* A supplier of the outstanding retention leases. This is used during merged operations to determine which operations that have been
Expand Down Expand Up @@ -204,6 +207,7 @@ private EngineConfig(Builder builder) {
this.isReadOnlyReplica = builder.isReadOnlyReplica;
this.primaryModeSupplier = builder.primaryModeSupplier;
this.translogFactory = builder.translogFactory;
this.leafSorter = builder.leafSorter;
}

/**
Expand Down Expand Up @@ -451,6 +455,15 @@ public TranslogDeletionPolicyFactory getCustomTranslogDeletionPolicyFactory() {
return translogDeletionPolicyFactory;
}

/**
* Returns subReaderSorter for org.apache.lucene.index.BaseCompositeReader.
* This gets used in lucene IndexReader and decides order of segment read.
* @return comparator
*/
public Comparator<LeafReader> getLeafSorter() {
return this.leafSorter;
}

/**
* Builder for EngineConfig class
*
Expand Down Expand Up @@ -483,6 +496,7 @@ public static class Builder {
private boolean isReadOnlyReplica;
private BooleanSupplier primaryModeSupplier;
private TranslogFactory translogFactory = new InternalTranslogFactory();
Comparator<LeafReader> leafSorter;

public Builder shardId(ShardId shardId) {
this.shardId = shardId;
Expand Down Expand Up @@ -614,6 +628,11 @@ public Builder translogFactory(TranslogFactory translogFactory) {
return this;
}

public Builder leafSorter(Comparator<LeafReader> leafSorter) {
this.leafSorter = leafSorter;
return this;
}

public EngineConfig build() {
return new EngineConfig(this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.search.QueryCache;
import org.apache.lucene.search.QueryCachingPolicy;
Expand All @@ -36,6 +37,7 @@

import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.function.BooleanSupplier;
Expand Down Expand Up @@ -151,7 +153,8 @@ public EngineConfig newEngineConfig(
EngineConfig.TombstoneDocSupplier tombstoneDocSupplier,
boolean isReadOnlyReplica,
BooleanSupplier primaryModeSupplier,
TranslogFactory translogFactory
TranslogFactory translogFactory,
Comparator<LeafReader> leafSorter
) {
CodecService codecServiceToUse = codecService;
if (codecService == null && this.codecServiceFactory != null) {
Expand Down Expand Up @@ -184,6 +187,7 @@ public EngineConfig newEngineConfig(
.readOnlyReplica(isReadOnlyReplica)
.primaryModeSupplier(primaryModeSupplier)
.translogFactory(translogFactory)
.leafSorter(leafSorter)
.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2322,6 +2322,9 @@ private IndexWriterConfig getIndexWriterConfig() {
if (config().getIndexSort() != null) {
iwc.setIndexSort(config().getIndexSort());
}
if (config().getLeafSorter() != null) {
iwc.setLeafSorter(config().getLeafSorter()); // The default segment search order
}
return iwc;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
package org.opensearch.index.mapper;

import org.apache.lucene.analysis.Analyzer;
import org.opensearch.cluster.metadata.DataStream;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.analysis.FieldNameAnalyzer;

Expand Down Expand Up @@ -261,6 +262,15 @@ public String getNestedScope(String path) {
return null;
}

/**
* If this index contains @timestamp field with Date type, it will return true
* @return true or false based on above condition
*/
public boolean containsTimeStampField() {
MappedFieldType timeSeriesFieldType = this.fieldTypeLookup.get(DataStream.TIMESERIES_FIELDNAME);
return timeSeriesFieldType != null && timeSeriesFieldType instanceof DateFieldMapper.DateFieldType; // has to be Date field type
}

private static String parentObject(String field) {
int lastDot = field.lastIndexOf('.');
if (lastDot == -1) {
Expand Down
Loading

0 comments on commit 0391cdc

Please sign in to comment.