Skip to content

Commit

Permalink
Merge branch 'master' into ccr
Browse files Browse the repository at this point in the history
* master: (22 commits)
  Update Tika version to 1.15
  Aggregations: bucket_sort pipeline aggregation (#27152)
  Introduce templating support to timezone/locale in DateProcessor (#27089)
  Increase logging on qa:mixed-cluster tests
  Update to AWS SDK 1.11.223 (#27278)
  Improve error message for parse failures of completion fields (#27297)
  Ensure external refreshes will also refresh internal searcher to minimize segment creation (#27253)
  Remove optimisations to reuse objects when applying a new `ClusterState` (#27317)
  Decouple `ChannelFactory` from Tcp classes (#27286)
  Fix find remote when building BWC
  Remove colons from task and configuration names
  Add unreleased 5.6.5 version number
  testCreateSplitIndexToN: do not set `routing_partition_size` to >= `number_of_routing_shards`
  Snapshot/Restore: better handle incorrect chunk_size settings in FS repo (#26844)
  Add limits for ngram and shingle settings (#27211) (#27318)
  Correct comment in index shard test
  Roll translog generation on primary promotion
  ObjectParser: Replace IllegalStateException with ParsingException (#27302)
  scripted_metric _agg parameter disappears if params are provided (#27159)
  Update discovery-ec2.asciidoc
  ...
  • Loading branch information
jasontedor committed Nov 9, 2017
2 parents ddd3ed4 + ac5fd6a commit 2032881
Show file tree
Hide file tree
Showing 106 changed files with 2,225 additions and 293 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ task verifyVersions {
* after the backport of the backcompat code is complete.
*/
allprojects {
ext.bwc_tests_enabled = false
ext.bwc_tests_enabled = true
}

task verifyBwcTestsEnabled {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ class BuildPlugin implements Plugin<Project> {

/** Return the configuration name used for finding transitive deps of the given dependency. */
private static String transitiveDepConfigName(String groupId, String artifactId, String version) {
return "_transitive_${groupId}:${artifactId}:${version}"
return "_transitive_${groupId}_${artifactId}_${version}"
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ class ClusterFormationTasks {

Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
String configurationName = "_plugin_${prefix}_${pluginProject.path}"
String configurationName = pluginConfigurationName(prefix, pluginProject)
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
Expand Down Expand Up @@ -454,13 +454,21 @@ class ClusterFormationTasks {
return copyPlugins
}

private static String pluginConfigurationName(final String prefix, final Project project) {
return "_plugin_${prefix}_${project.path}".replace(':', '_')
}

private static String pluginBwcConfigurationName(final String prefix, final Project project) {
return "_plugin_bwc_${prefix}_${project.path}".replace(':', '_')
}

/** Configures task to copy a plugin based on a zip file resolved using dependencies for an older version */
static Task configureCopyBwcPluginsTask(String name, Project project, Task setup, NodeInfo node, String prefix) {
Configuration bwcPlugins = project.configurations.getByName("${prefix}_elasticsearchBwcPlugins")
for (Map.Entry<String, Project> plugin : node.config.plugins.entrySet()) {
Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
String configurationName = "_plugin_bwc_${prefix}_${pluginProject.path}"
String configurationName = pluginBwcConfigurationName(prefix, pluginProject)
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
Expand Down Expand Up @@ -499,9 +507,9 @@ class ClusterFormationTasks {
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Project plugin, String prefix) {
final FileCollection pluginZip;
if (node.nodeVersion != VersionProperties.elasticsearch) {
pluginZip = project.configurations.getByName("_plugin_bwc_${prefix}_${plugin.path}")
pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, plugin))
} else {
pluginZip = project.configurations.getByName("_plugin_${prefix}_${plugin.path}")
pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, plugin))
}
// delay reading the file location until execution time by wrapping in a closure within a GString
final Object file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}"
Expand Down
4 changes: 4 additions & 0 deletions core/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ public class Version implements Comparable<Version> {
public static final Version V_5_6_3 = new Version(V_5_6_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_5_6_4_ID = 5060499;
public static final Version V_5_6_4 = new Version(V_5_6_4_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_5_6_5_ID = 5060599;
public static final Version V_5_6_5 = new Version(V_5_6_5_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_6_0_0_alpha1_ID = 6000001;
public static final Version V_6_0_0_alpha1 =
new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
Expand Down Expand Up @@ -160,6 +162,8 @@ public static Version fromId(int id) {
return V_6_0_0_alpha2;
case V_6_0_0_alpha1_ID:
return V_6_0_0_alpha1;
case V_5_6_5_ID:
return V_5_6_5;
case V_5_6_4_ID:
return V_5_6_4;
case V_5_6_3_ID:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public class ResizeAction extends Action<ResizeRequest, ResizeResponse, ResizeRe

public static final ResizeAction INSTANCE = new ResizeAction();
public static final String NAME = "indices:admin/resize";
public static final Version COMPATIBILITY_VERSION = Version.V_7_0_0_alpha1; // TODO remove this once it's backported
public static final Version COMPATIBILITY_VERSION = Version.V_6_1_0; // TODO remove this once it's backported

private ResizeAction() {
super(NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING,
IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING,
IndexSettings.MAX_SCRIPT_FIELDS_SETTING,
IndexSettings.MAX_NGRAM_DIFF_SETTING,
IndexSettings.MAX_SHINGLE_DIFF_SETTING,
IndexSettings.MAX_RESCORE_WINDOW_SETTING,
IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING,
IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING,
Expand Down Expand Up @@ -150,6 +152,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
EngineConfig.INDEX_CODEC_SETTING,
EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS,
IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS,

// validate that built-in similarities don't get redefined
Setting.groupSetting("index.similarity.", (s) -> {
Map<String, Settings> groups = s.getAsGroups();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ public Value parse(XContentParser parser, Value value, Context context) throws I
} else {
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("[" + name + "] Expected START_OBJECT but was: " + token);
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
}
}

Expand All @@ -159,13 +159,13 @@ public Value parse(XContentParser parser, Value value, Context context) throws I
fieldParser = getParser(currentFieldName);
} else {
if (currentFieldName == null) {
throw new IllegalStateException("[" + name + "] no field found");
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] no field found");
}
if (fieldParser == null) {
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
} else {
fieldParser.assertSupports(name, token, currentFieldName);
fieldParser.assertSupports(name, token, currentFieldName, parser.getTokenLocation());
parseSub(parser, fieldParser, currentFieldName, value, context);
}
fieldParser = null;
Expand Down Expand Up @@ -330,7 +330,7 @@ private void parseSub(XContentParser parser, FieldParser fieldParser, String cur
case END_OBJECT:
case END_ARRAY:
case FIELD_NAME:
throw new IllegalStateException("[" + name + "]" + token + " is unexpected");
throw new ParsingException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
case VALUE_STRING:
case VALUE_NUMBER:
case VALUE_BOOLEAN:
Expand Down Expand Up @@ -361,12 +361,12 @@ private class FieldParser {
this.type = type;
}

void assertSupports(String parserName, XContentParser.Token token, String currentFieldName) {
void assertSupports(String parserName, XContentParser.Token token, String currentFieldName, XContentLocation location) {
if (parseField.match(currentFieldName) == false) {
throw new IllegalStateException("[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
throw new ParsingException(location, "[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
}
if (supportedTokens.contains(token) == false) {
throw new IllegalArgumentException(
throw new ParsingException(location,
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + token);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,6 @@ boolean processNextCommittedClusterState(String reason) {

final ClusterState newClusterState = pendingStatesQueue.getNextClusterStateToProcess();
final ClusterState currentState = committedState.get();
final ClusterState adaptedNewClusterState;
// all pending states have been processed
if (newClusterState == null) {
return false;
Expand Down Expand Up @@ -773,54 +772,23 @@ boolean processNextCommittedClusterState(String reason) {
if (currentState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock())) {
// its a fresh update from the master as we transition from a start of not having a master to having one
logger.debug("got first state from fresh master [{}]", newClusterState.nodes().getMasterNodeId());
adaptedNewClusterState = newClusterState;
} else if (newClusterState.nodes().isLocalNodeElectedMaster() == false) {
// some optimizations to make sure we keep old objects where possible
ClusterState.Builder builder = ClusterState.builder(newClusterState);

// if the routing table did not change, use the original one
if (newClusterState.routingTable().version() == currentState.routingTable().version()) {
builder.routingTable(currentState.routingTable());
}
// same for metadata
if (newClusterState.metaData().version() == currentState.metaData().version()) {
builder.metaData(currentState.metaData());
} else {
// if its not the same version, only copy over new indices or ones that changed the version
MetaData.Builder metaDataBuilder = MetaData.builder(newClusterState.metaData()).removeAllIndices();
for (IndexMetaData indexMetaData : newClusterState.metaData()) {
IndexMetaData currentIndexMetaData = currentState.metaData().index(indexMetaData.getIndex());
if (currentIndexMetaData != null && currentIndexMetaData.isSameUUID(indexMetaData.getIndexUUID()) &&
currentIndexMetaData.getVersion() == indexMetaData.getVersion()) {
// safe to reuse
metaDataBuilder.put(currentIndexMetaData, false);
} else {
metaDataBuilder.put(indexMetaData, false);
}
}
builder.metaData(metaDataBuilder);
}

adaptedNewClusterState = builder.build();
} else {
adaptedNewClusterState = newClusterState;
}

if (currentState == adaptedNewClusterState) {
if (currentState == newClusterState) {
return false;
}

committedState.set(adaptedNewClusterState);
committedState.set(newClusterState);

// update failure detection only after the state has been updated to prevent race condition with handleLeaveRequest
// and handleNodeFailure as those check the current state to determine whether the failure is to be handled by this node
if (adaptedNewClusterState.nodes().isLocalNodeElectedMaster()) {
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
// update the set of nodes to ping
nodesFD.updateNodesAndPing(adaptedNewClusterState);
nodesFD.updateNodesAndPing(newClusterState);
} else {
// check to see that we monitor the correct master of the cluster
if (masterFD.masterNode() == null || !masterFD.masterNode().equals(adaptedNewClusterState.nodes().getMasterNode())) {
masterFD.restart(adaptedNewClusterState.nodes().getMasterNode(),
if (masterFD.masterNode() == null || !masterFD.masterNode().equals(newClusterState.nodes().getMasterNode())) {
masterFD.restart(newClusterState.nodes().getMasterNode(),
"new cluster state received and we are monitoring the wrong master [" + masterFD.masterNode() + "]");
}
}
Expand Down
40 changes: 40 additions & 0 deletions core/src/main/java/org/elasticsearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,26 @@ public final class IndexSettings {
public static final Setting<Integer> MAX_SCRIPT_FIELDS_SETTING =
Setting.intSetting("index.max_script_fields", 32, 0, Property.Dynamic, Property.IndexScope);

/**
* Index setting describing for NGramTokenizer and NGramTokenFilter
* the maximum difference between
* max_gram (maximum length of characters in a gram) and
* min_gram (minimum length of characters in a gram).
* The default value is 1 as this is default difference in NGramTokenizer,
* and is defensive as it prevents generating too many index terms.
*/
public static final Setting<Integer> MAX_NGRAM_DIFF_SETTING =
Setting.intSetting("index.max_ngram_diff", 1, 0, Property.Dynamic, Property.IndexScope);

/**
* Index setting describing for ShingleTokenFilter
* the maximum difference between
* max_shingle_size and min_shingle_size.
* The default value is 3 is defensive as it prevents generating too many tokens.
*/
public static final Setting<Integer> MAX_SHINGLE_DIFF_SETTING =
Setting.intSetting("index.max_shingle_diff", 3, 0, Property.Dynamic, Property.IndexScope);

/**
* Index setting describing the maximum value of allowed `docvalue_fields`that can be retrieved
* per search request. The default maximum of 100 is defensive for the reason that retrieving
Expand Down Expand Up @@ -239,6 +259,8 @@ public final class IndexSettings {
private volatile int maxRescoreWindow;
private volatile int maxDocvalueFields;
private volatile int maxScriptFields;
private volatile int maxNgramDiff;
private volatile int maxShingleDiff;
private volatile boolean TTLPurgeDisabled;
/**
* The maximum number of refresh listeners allows on this shard.
Expand Down Expand Up @@ -342,6 +364,8 @@ public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSetti
maxRescoreWindow = scopedSettings.get(MAX_RESCORE_WINDOW_SETTING);
maxDocvalueFields = scopedSettings.get(MAX_DOCVALUE_FIELDS_SEARCH_SETTING);
maxScriptFields = scopedSettings.get(MAX_SCRIPT_FIELDS_SETTING);
maxNgramDiff = scopedSettings.get(MAX_NGRAM_DIFF_SETTING);
maxShingleDiff = scopedSettings.get(MAX_SHINGLE_DIFF_SETTING);
TTLPurgeDisabled = scopedSettings.get(INDEX_TTL_DISABLE_PURGE_SETTING);
maxRefreshListeners = scopedSettings.get(MAX_REFRESH_LISTENERS_PER_SHARD);
maxSlicesPerScroll = scopedSettings.get(MAX_SLICES_PER_SCROLL);
Expand Down Expand Up @@ -373,6 +397,8 @@ public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSetti
scopedSettings.addSettingsUpdateConsumer(MAX_RESCORE_WINDOW_SETTING, this::setMaxRescoreWindow);
scopedSettings.addSettingsUpdateConsumer(MAX_DOCVALUE_FIELDS_SEARCH_SETTING, this::setMaxDocvalueFields);
scopedSettings.addSettingsUpdateConsumer(MAX_SCRIPT_FIELDS_SETTING, this::setMaxScriptFields);
scopedSettings.addSettingsUpdateConsumer(MAX_NGRAM_DIFF_SETTING, this::setMaxNgramDiff);
scopedSettings.addSettingsUpdateConsumer(MAX_SHINGLE_DIFF_SETTING, this::setMaxShingleDiff);
scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer);
scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes);
scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, this::setTranslogFlushThresholdSize);
Expand Down Expand Up @@ -641,6 +667,20 @@ private void setMaxDocvalueFields(int maxDocvalueFields) {
this.maxDocvalueFields = maxDocvalueFields;
}

/**
* Returns the maximum allowed difference between max and min length of ngram
*/
public int getMaxNgramDiff() { return this.maxNgramDiff; }

private void setMaxNgramDiff(int maxNgramDiff) { this.maxNgramDiff = maxNgramDiff; }

/**
* Returns the maximum allowed difference between max and min shingle_size
*/
public int getMaxShingleDiff() { return this.maxShingleDiff; }

private void setMaxShingleDiff(int maxShingleDiff) { this.maxShingleDiff = maxShingleDiff; }

/**
* Returns the maximum number of allowed script_fields to retrieve in a search request
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
Expand Down Expand Up @@ -84,8 +85,21 @@ static CharMatcher parseTokenChars(List<String> characterClasses) {

public NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff();
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
int ngramDiff = maxGram - minGram;
if (ngramDiff > maxAllowedNgramDiff) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.");
} else {
deprecationLogger.deprecated("Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
+ "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]");
}
}
this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}

Expand Down
Loading

0 comments on commit 2032881

Please sign in to comment.