From 088e05b36849f0659d3b20cd0376c1a89e2b426b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 13 Sep 2013 23:54:04 +0200 Subject: [PATCH] Migrate from Trove to Hppc. --- pom.xml | 12 +- .../classic/MapperQueryParser.java | 5 +- .../classic/QueryParserSettings.java | 8 +- .../analyzing/XAnalyzingSuggester.java | 8 +- .../IndexDeleteByQueryRequest.java | 4 +- .../ShardDeleteByQueryRequest.java | 4 +- .../action/get/MultiGetShardRequest.java | 16 +- .../action/get/MultiGetShardResponse.java | 8 +- .../TransportMultiPercolateAction.java | 14 +- ...ransportSearchDfsQueryThenFetchAction.java | 12 +- .../TransportSearchQueryThenFetchAction.java | 12 +- ...sportSearchScrollQueryThenFetchAction.java | 8 +- .../type/TransportSearchTypeAction.java | 4 +- .../MultiTermVectorsShardRequest.java | 8 +- .../MultiTermVectorsShardResponse.java | 8 +- .../action/termvector/TermVectorFields.java | 33 +++- .../cache/recycler/CacheRecycler.java | 150 +++++++++--------- .../cluster/metadata/MetaData.java | 42 +++-- .../cluster/routing/RoutingNodes.java | 12 +- .../allocator/EvenShardsCountAllocator.java | 6 +- .../decider/AwarenessAllocationDecider.java | 18 +-- .../org/elasticsearch/common/Strings.java | 5 +- .../elasticsearch/common/collect/XMaps.java | 58 ------- .../elasticsearch/common/hppc/HppcMaps.java | 81 ++++++++++ .../common/io/stream/HandlesStreamInput.java | 6 +- .../common/io/stream/HandlesStreamOutput.java | 29 ++-- .../lucene/search/MultiPhrasePrefixQuery.java | 8 +- .../common/transport/PortsRange.java | 7 +- .../common/trove/ExtTDoubleObjectHashMap.java | 53 ------- .../common/trove/ExtTHashMap.java | 54 ------- .../common/trove/ExtTIntArrayList.java | 43 ----- .../common/trove/ExtTLongObjectHashMap.java | 53 ------- .../common/trove/ExtTObjectIntHasMap.java | 55 ------- .../trove/StringIdentityHashingStrategy.java | 39 ----- .../gateway/local/LocalGateway.java | 53 ++++--- .../gateway/local/LocalGatewayAllocator.java | 34 ++-- .../index/analysis/NumericDoubleAnalyzer.java | 6 +- .../index/analysis/NumericFloatAnalyzer.java | 6 +- .../analysis/NumericIntegerAnalyzer.java | 6 +- .../index/analysis/NumericLongAnalyzer.java | 6 +- .../index/cache/id/simple/SimpleIdCache.java | 12 +- .../id/simple/SimpleIdReaderTypeCache.java | 39 +++-- .../index/fielddata/FieldDataStats.java | 49 +++--- .../index/fielddata/ShardFieldData.java | 6 +- .../query/CustomFiltersScoreQueryBuilder.java | 4 +- .../query/CustomFiltersScoreQueryParser.java | 4 +- .../index/query/MultiMatchQueryBuilder.java | 15 +- .../index/query/QueryStringQueryBuilder.java | 15 +- .../index/query/QueryStringQueryParser.java | 7 +- .../index/search/child/ChildrenQuery.java | 72 ++++----- .../index/search/child/HasChildFilter.java | 16 +- .../index/search/child/HasParentFilter.java | 12 +- .../index/search/child/ParentIdsFilter.java | 17 +- .../index/search/child/ParentQuery.java | 17 +- .../index/search/child/TopChildrenQuery.java | 29 ++-- .../cache/filter/IndicesFilterCache.java | 4 +- .../cluster/IndicesClusterStateService.java | 4 +- .../percolator/PercolatorService.java | 11 +- .../percolator/QueryCollector.java | 6 +- .../search/builder/SearchSourceBuilder.java | 17 +- .../controller/SearchPhaseController.java | 48 +++--- .../search/dfs/AggregatedDfs.java | 41 ++--- .../elasticsearch/search/dfs/DfsPhase.java | 79 +++++++-- .../search/dfs/DfsSearchResult.java | 42 ++--- .../CountDateHistogramFacetExecutor.java | 30 ++-- .../InternalCountDateHistogramFacet.java | 25 +-- .../InternalFullDateHistogramFacet.java | 7 +- .../ValueDateHistogramFacetExecutor.java | 19 ++- ...ValueScriptDateHistogramFacetExecutor.java | 19 ++- .../CountHistogramFacetExecutor.java | 25 +-- .../histogram/FullHistogramFacetExecutor.java | 19 ++- .../InternalCountHistogramFacet.java | 19 ++- .../histogram/InternalFullHistogramFacet.java | 7 +- .../ScriptHistogramFacetExecutor.java | 19 ++- .../ValueHistogramFacetExecutor.java | 19 ++- .../ValueScriptHistogramFacetExecutor.java | 19 ++- .../doubles/InternalDoubleTermsFacet.java | 18 ++- .../doubles/TermsDoubleFacetExecutor.java | 38 +++-- .../terms/longs/InternalLongTermsFacet.java | 18 ++- .../terms/longs/TermsLongFacetExecutor.java | 39 +++-- .../facet/terms/strings/HashedAggregator.java | 8 +- .../strings/InternalStringTermsFacet.java | 20 ++- .../ScriptTermsStringFieldFacetExecutor.java | 34 ++-- .../InternalTermsStatsDoubleFacet.java | 8 +- .../TermsStatsDoubleFacetExecutor.java | 24 ++- .../longs/InternalTermsStatsLongFacet.java | 8 +- .../longs/TermsStatsLongFacetExecutor.java | 25 ++- .../InternalTermsStatsStringFacet.java | 8 +- .../TermsStatsStringFacetExecutor.java | 23 ++- .../search/fetch/FetchSearchRequest.java | 6 +- .../search/internal/InternalSearchHits.java | 6 +- .../AnalyzingCompletionLookupProvider.java | 10 +- .../suggest/completion/CompletionStats.java | 50 +++--- .../StringMapAdjustOrPutBenchmark.java | 46 +++--- .../search/child/ChildSearchBenchmark.java | 5 +- .../allocation/AwarenessAllocationTests.java | 20 +-- .../index/fielddata/LongFieldDataTests.java | 42 ++--- .../recovery/RelocationTests.java | 22 +-- 98 files changed, 1124 insertions(+), 1141 deletions(-) create mode 100644 src/main/java/org/elasticsearch/common/hppc/HppcMaps.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/ExtTDoubleObjectHashMap.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/ExtTHashMap.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/ExtTIntArrayList.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/ExtTLongObjectHashMap.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java delete mode 100644 src/main/java/org/elasticsearch/common/trove/StringIdentityHashingStrategy.java rename src/test/java/org/elasticsearch/benchmark/{trove => hppc}/StringMapAdjustOrPutBenchmark.java (83%) diff --git a/pom.xml b/pom.xml index 86a5600ff049c..aab1e0bc7a37b 100644 --- a/pom.xml +++ b/pom.xml @@ -170,9 +170,9 @@ - net.sf.trove4j - trove4j - 3.0.3 + com.carrotsearch + hppc + 0.5.2 @@ -439,7 +439,7 @@ com.google.guava:guava - net.sf.trove4j:trove4j + com.carrotsearch:hppc org.mvel:mvel2 com.fasterxml.jackson.core:jackson-core com.fasterxml.jackson.dataformat:jackson-dataformat-smile @@ -455,8 +455,8 @@ org.elasticsearch.common - gnu.trove - org.elasticsearch.common.trove + com.carrotsearch.hppc + org.elasticsearch.common.hppc jsr166y diff --git a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 970d78581a3c6..c1b15c190f35a 100644 --- a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -755,7 +755,10 @@ protected Query getBooleanQuery(List clauses, boolean disableCoor private void applyBoost(String field, Query q) { if (settings.boosts() != null) { - float boost = settings.boosts().get(field); + float boost = 1f; + if (settings.boosts().containsKey(field)) { + boost = settings.boosts().lget(); + } q.setBoost(boost); } } diff --git a/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java b/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java index 73c0d69de219d..7fff0454045dc 100644 --- a/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java +++ b/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java @@ -19,7 +19,7 @@ package org.apache.lucene.queryparser.classic; -import gnu.trove.map.hash.TObjectFloatHashMap; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; @@ -63,7 +63,7 @@ public class QueryParserSettings { List fields = null; Collection queryTypes = null; - TObjectFloatHashMap boosts = null; + ObjectFloatOpenHashMap boosts = null; float tieBreaker = 0.0f; boolean useDisMax = true; @@ -272,11 +272,11 @@ public void queryTypes(Collection queryTypes) { this.queryTypes = queryTypes; } - public TObjectFloatHashMap boosts() { + public ObjectFloatOpenHashMap boosts() { return boosts; } - public void boosts(TObjectFloatHashMap boosts) { + public void boosts(ObjectFloatOpenHashMap boosts) { this.boosts = boosts; } diff --git a/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java b/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java index 51249984a2fc1..ca2e3bcb40c07 100644 --- a/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java +++ b/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java @@ -18,7 +18,7 @@ */ package org.apache.lucene.search.suggest.analyzing; -import gnu.trove.map.hash.TObjectIntHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStreamToAutomaton; @@ -33,6 +33,7 @@ import org.apache.lucene.util.fst.FST.BytesReader; import org.apache.lucene.util.fst.PairOutputs.Pair; import org.apache.lucene.util.fst.Util.MinResult; +import org.elasticsearch.common.hppc.HppcMaps; import java.io.File; import java.io.IOException; @@ -917,7 +918,7 @@ public static class XBuilder { private BytesRef analyzed = new BytesRef(); private final SurfaceFormAndPayload[] surfaceFormsAndPayload; private int count; - private TObjectIntHashMap seenSurfaceForms = new TObjectIntHashMap(256, 0.75f, -1); + private ObjectIntOpenHashMap seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f); public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads) { this.outputs = new PairOutputs(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()); @@ -969,7 +970,8 @@ public void addSurface(BytesRef surface, BytesRef payload, long cost) throws IOE return; } BytesRef surfaceCopy; - if (count > 0 && (surfaceIndex = seenSurfaceForms.get(surface)) >= 0) { + if (count > 0 && seenSurfaceForms.containsKey(surface)) { + surfaceIndex = seenSurfaceForms.lget(); SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex]; if (encodedWeight >= surfaceFormAndPayload.weight) { return; diff --git a/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryRequest.java b/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryRequest.java index b63ef92c3c9e1..3e171f33fb535 100644 --- a/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.deletebyquery; -import gnu.trove.set.hash.THashSet; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.replication.IndexReplicationOperationRequest; import org.elasticsearch.common.Nullable; @@ -30,6 +29,7 @@ import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; +import java.util.HashSet; import java.util.Set; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -102,7 +102,7 @@ public void readFrom(StreamInput in) throws IOException { } int routingSize = in.readVInt(); if (routingSize > 0) { - routing = new THashSet(routingSize); + routing = new HashSet(routingSize); for (int i = 0; i < routingSize; i++) { routing.add(in.readString()); } diff --git a/src/main/java/org/elasticsearch/action/deletebyquery/ShardDeleteByQueryRequest.java b/src/main/java/org/elasticsearch/action/deletebyquery/ShardDeleteByQueryRequest.java index a31dcfcae34c7..005b2e1cb8673 100644 --- a/src/main/java/org/elasticsearch/action/deletebyquery/ShardDeleteByQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/deletebyquery/ShardDeleteByQueryRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.deletebyquery; -import gnu.trove.set.hash.THashSet; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest; import org.elasticsearch.common.Nullable; @@ -31,6 +30,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.HashSet; import java.util.Set; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -101,7 +101,7 @@ public void readFrom(StreamInput in) throws IOException { types = in.readStringArray(); int routingSize = in.readVInt(); if (routingSize > 0) { - routing = new THashSet(routingSize); + routing = new HashSet(routingSize); for (int i = 0; i < routingSize; i++) { routing.add(in.readString()); } diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java index 4b9e0e879ce34..b6a330a06179d 100644 --- a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java +++ b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java @@ -19,8 +19,8 @@ package org.elasticsearch.action.get; -import gnu.trove.list.array.TIntArrayList; -import gnu.trove.list.array.TLongArrayList; +import com.carrotsearch.hppc.IntArrayList; +import com.carrotsearch.hppc.LongArrayList; import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -39,11 +39,11 @@ public class MultiGetShardRequest extends SingleShardOperationRequest types; List ids; List fields; - TLongArrayList versions; + LongArrayList versions; List versionTypes; List fetchSourceContexts; @@ -54,11 +54,11 @@ public class MultiGetShardRequest extends SingleShardOperationRequest(); ids = new ArrayList(); fields = new ArrayList(); - versions = new TLongArrayList(); + versions = new LongArrayList(); versionTypes = new ArrayList(); fetchSourceContexts = new ArrayList(); } @@ -113,11 +113,11 @@ public void add(int location, @Nullable String type, String id, String[] fields, public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - locations = new TIntArrayList(size); + locations = new IntArrayList(size); types = new ArrayList(size); ids = new ArrayList(size); fields = new ArrayList(size); - versions = new TLongArrayList(size); + versions = new LongArrayList(size); versionTypes = new ArrayList(size); fetchSourceContexts = new ArrayList(size); for (int i = 0; i < size; i++) { diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetShardResponse.java b/src/main/java/org/elasticsearch/action/get/MultiGetShardResponse.java index 661c565b26003..b5437d12f78bf 100644 --- a/src/main/java/org/elasticsearch/action/get/MultiGetShardResponse.java +++ b/src/main/java/org/elasticsearch/action/get/MultiGetShardResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.get; -import gnu.trove.list.array.TIntArrayList; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,12 +30,12 @@ public class MultiGetShardResponse extends ActionResponse { - TIntArrayList locations; + IntArrayList locations; List responses; List failures; MultiGetShardResponse() { - locations = new TIntArrayList(); + locations = new IntArrayList(); responses = new ArrayList(); failures = new ArrayList(); } @@ -56,7 +56,7 @@ public void add(int location, MultiGetResponse.Failure failure) { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - locations = new TIntArrayList(size); + locations = new IntArrayList(size); responses = new ArrayList(size); failures = new ArrayList(size); for (int i = 0; i < size; i++) { diff --git a/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java index 5ed98a5024d74..95149102c6f5b 100644 --- a/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.percolate; -import gnu.trove.list.array.TIntArrayList; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.*; @@ -76,7 +76,7 @@ protected void doExecute(final MultiPercolateRequest request, final ActionListen final List percolateRequests = new ArrayList(request.requests().size()); // Can have a mixture of percolate requests. (normal percolate requests & percolate existing doc), // so we need to keep track for what percolate request we had a get request - final TIntArrayList getRequestSlots = new TIntArrayList(); + final IntArrayList getRequestSlots = new IntArrayList(); List existingDocsRequests = new ArrayList(); for (int slot = 0; slot < request.requests().size(); slot++) { PercolateRequest percolateRequest = request.requests().get(slot); @@ -139,7 +139,7 @@ private class ASyncAction { final Map requestsByShard; final List percolateRequests; - final Map shardToSlots; + final Map shardToSlots; final AtomicInteger expectedOperations; final AtomicArray reducedResponses; final AtomicReferenceArray expectedOperationsPerItem; @@ -155,7 +155,7 @@ private class ASyncAction { // Resolving concrete indices and routing and grouping the requests by shard requestsByShard = new HashMap(); // Keep track what slots belong to what shard, in case a request to a shard fails on all copies - shardToSlots = new HashMap(); + shardToSlots = new HashMap(); int expectedResults = 0; for (int slot = 0; slot < percolateRequests.size(); slot++) { Object element = percolateRequests.get(slot); @@ -180,9 +180,9 @@ private class ASyncAction { logger.trace("Adding shard[{}] percolate request for item[{}]", shardId, slot); requests.add(new TransportShardMultiPercolateAction.Request.Item(slot, new PercolateShardRequest(shardId, percolateRequest))); - TIntArrayList items = shardToSlots.get(shardId); + IntArrayList items = shardToSlots.get(shardId); if (items == null) { - shardToSlots.put(shardId, items = new TIntArrayList()); + shardToSlots.put(shardId, items = new IntArrayList()); } items.add(slot); } @@ -257,7 +257,7 @@ void onShardResponse(ShardId shardId, TransportShardMultiPercolateAction.Respons void onShardFailure(ShardId shardId, Throwable e) { logger.debug("{} Shard multi percolate failure", e, shardId); try { - TIntArrayList slots = shardToSlots.get(shardId); + IntArrayList slots = shardToSlots.get(shardId); for (int i = 0; i < slots.size(); i++) { int slot = slots.get(i); AtomicReferenceArray shardResults = responsesByItemAndShard.get(slot); diff --git a/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java index 706f3d0905836..4fecf854234d1 100644 --- a/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java +++ b/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search.type; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchOperationThreading; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.action.SearchServiceListener; @@ -66,13 +66,13 @@ private class AsyncAction extends BaseAsyncAction { final AtomicArray queryResults; final AtomicArray fetchResults; - final AtomicArray docIdsToLoad; + final AtomicArray docIdsToLoad; private AsyncAction(SearchRequest request, ActionListener listener) { super(request, listener); queryResults = new AtomicArray(firstResults.length()); fetchResults = new AtomicArray(firstResults.length()); - docIdsToLoad = new AtomicArray(firstResults.length()); + docIdsToLoad = new AtomicArray(firstResults.length()); } @Override @@ -192,7 +192,7 @@ void innerExecuteFetchPhase() { final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size()); int localOperations = 0; - for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { QuerySearchResult queryResult = queryResults.get(entry.index); DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { @@ -208,7 +208,7 @@ void innerExecuteFetchPhase() { threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() { @Override public void run() { - for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { QuerySearchResult queryResult = queryResults.get(entry.index); DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { @@ -220,7 +220,7 @@ public void run() { }); } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; - for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { final QuerySearchResult queryResult = queryResults.get(entry.index); final DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { diff --git a/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java index a3ace10d496f8..1785657024097 100644 --- a/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java +++ b/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search.type; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchOperationThreading; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.action.SearchServiceListener; @@ -62,12 +62,12 @@ protected void doExecute(SearchRequest searchRequest, ActionListener { final AtomicArray fetchResults; - final AtomicArray docIdsToLoad; + final AtomicArray docIdsToLoad; private AsyncAction(SearchRequest request, ActionListener listener) { super(request, listener); fetchResults = new AtomicArray(firstResults.length()); - docIdsToLoad = new AtomicArray(firstResults.length()); + docIdsToLoad = new AtomicArray(firstResults.length()); } @Override @@ -93,7 +93,7 @@ protected void moveToSecondPhase() { final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size()); int localOperations = 0; - for (AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (AtomicArray.Entry entry : docIdsToLoad.asList()) { QuerySearchResult queryResult = firstResults.get(entry.index); DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { @@ -109,7 +109,7 @@ protected void moveToSecondPhase() { threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() { @Override public void run() { - for (AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (AtomicArray.Entry entry : docIdsToLoad.asList()) { QuerySearchResult queryResult = firstResults.get(entry.index); DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { @@ -121,7 +121,7 @@ public void run() { }); } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; - for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { + for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { final QuerySearchResult queryResult = firstResults.get(entry.index); final DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { diff --git a/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index b398fab16fcad..3a3d621563280 100644 --- a/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search.type; +import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; @@ -29,7 +30,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.action.SearchServiceListener; import org.elasticsearch.search.action.SearchServiceTransportAction; @@ -226,7 +226,7 @@ void onQueryPhaseFailure(final int shardIndex, final AtomicInteger counter, fina private void executeFetchPhase() { sortedShardList = searchPhaseController.sortDocs(queryResults); - AtomicArray docIdsToLoad = new AtomicArray(queryResults.length()); + AtomicArray docIdsToLoad = new AtomicArray(queryResults.length()); searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList); if (docIdsToLoad.asList().isEmpty()) { @@ -235,8 +235,8 @@ private void executeFetchPhase() { final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size()); - for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { - ExtTIntArrayList docIds = entry.value; + for (final AtomicArray.Entry entry : docIdsToLoad.asList()) { + IntArrayList docIds = entry.value; final QuerySearchResult querySearchResult = queryResults.get(entry.index); FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, querySearchResult.id(), docIds); DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId()); diff --git a/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 976b422e4e8c3..10234a6df671e 100644 --- a/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search.type; +import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; @@ -35,7 +36,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -356,7 +356,7 @@ protected final void addShardFailure(final int shardIndex, @Nullable SearchShard * Releases shard targets that are not used in the docsIdsToLoad. */ protected void releaseIrrelevantSearchContexts(AtomicArray queryResults, - AtomicArray docIdsToLoad) { + AtomicArray docIdsToLoad) { if (docIdsToLoad == null) { return; } diff --git a/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardRequest.java b/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardRequest.java index ef4e45654a3df..ce3d2f3df83cf 100644 --- a/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardRequest.java +++ b/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvector; -import gnu.trove.list.array.TIntArrayList; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ public class MultiTermVectorsShardRequest extends SingleShardOperationRequest requests; MultiTermVectorsShardRequest() { @@ -43,7 +43,7 @@ public class MultiTermVectorsShardRequest extends SingleShardOperationRequest(); } @@ -75,7 +75,7 @@ public void add(int location, TermVectorRequest request) { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - locations = new TIntArrayList(size); + locations = new IntArrayList(size); requests = new ArrayList(size); for (int i = 0; i < size; i++) { locations.add(in.readVInt()); diff --git a/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardResponse.java b/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardResponse.java index b610fc9f81ddb..0b9ef4553b43d 100644 --- a/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardResponse.java +++ b/src/main/java/org/elasticsearch/action/termvector/MultiTermVectorsShardResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvector; -import gnu.trove.list.array.TIntArrayList; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,12 +30,12 @@ public class MultiTermVectorsShardResponse extends ActionResponse { - TIntArrayList locations; + IntArrayList locations; List responses; List failures; MultiTermVectorsShardResponse() { - locations = new TIntArrayList(); + locations = new IntArrayList(); responses = new ArrayList(); failures = new ArrayList(); } @@ -56,7 +56,7 @@ public void add(int location, MultiTermVectorsResponse.Failure failure) { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - locations = new TIntArrayList(size); + locations = new IntArrayList(size); responses = new ArrayList(size); failures = new ArrayList(size); for (int i = 0; i < size; i++) { diff --git a/src/main/java/org/elasticsearch/action/termvector/TermVectorFields.java b/src/main/java/org/elasticsearch/action/termvector/TermVectorFields.java index 027fdbe744001..72ac9e842c64c 100644 --- a/src/main/java/org/elasticsearch/action/termvector/TermVectorFields.java +++ b/src/main/java/org/elasticsearch/action/termvector/TermVectorFields.java @@ -19,14 +19,15 @@ package org.elasticsearch.action.termvector; -import gnu.trove.impl.Constants; -import gnu.trove.map.hash.TObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; +import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.apache.lucene.index.*; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.hppc.HppcMaps; import org.elasticsearch.common.io.stream.BytesStreamInput; import java.io.IOException; @@ -112,7 +113,7 @@ public final class TermVectorFields extends Fields { - final private TObjectLongHashMap fieldMap; + final private ObjectLongOpenHashMap fieldMap; final private BytesReference termVectors; final boolean hasTermStatistic; final boolean hasFieldStatistic; @@ -124,7 +125,7 @@ public final class TermVectorFields extends Fields { */ public TermVectorFields(BytesReference headerRef, BytesReference termVectors) throws IOException { BytesStreamInput header = new BytesStreamInput(headerRef); - fieldMap = new TObjectLongHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); + fieldMap = new ObjectLongOpenHashMap(); // here we read the header to fill the field offset map String headerString = header.readString(); @@ -144,20 +145,36 @@ public TermVectorFields(BytesReference headerRef, BytesReference termVectors) th @Override public Iterator iterator() { - return fieldMap.keySet().iterator(); + final Iterator> iterator = fieldMap.iterator(); + return new Iterator() { + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public String next() { + return iterator.next().key; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; } @Override public Terms terms(String field) throws IOException { // first, find where in the termVectors bytes the actual term vector for // this field is stored - Long offset = fieldMap.get(field); - if (offset.longValue() < 0) { + if (!fieldMap.containsKey(field)) { return null; // we don't have it. } + long offset = fieldMap.lget(); final BytesStreamInput perFieldTermVectorInput = new BytesStreamInput(this.termVectors); perFieldTermVectorInput.reset(); - perFieldTermVectorInput.skip(offset.longValue()); + perFieldTermVectorInput.skip(offset); // read how many terms.... final long numTerms = perFieldTermVectorInput.readVLong(); diff --git a/src/main/java/org/elasticsearch/cache/recycler/CacheRecycler.java b/src/main/java/org/elasticsearch/cache/recycler/CacheRecycler.java index fcdbb01e07f06..074d9ec57990c 100644 --- a/src/main/java/org/elasticsearch/cache/recycler/CacheRecycler.java +++ b/src/main/java/org/elasticsearch/cache/recycler/CacheRecycler.java @@ -19,8 +19,7 @@ package org.elasticsearch.cache.recycler; -import gnu.trove.map.hash.*; -import gnu.trove.set.hash.THashSet; +import com.carrotsearch.hppc.*; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -29,25 +28,22 @@ import org.elasticsearch.common.recycler.SoftThreadLocalRecycler; import org.elasticsearch.common.recycler.ThreadLocalRecycler; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap; -import org.elasticsearch.common.trove.ExtTHashMap; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; @SuppressWarnings("unchecked") public class CacheRecycler extends AbstractComponent { - public final Recycler hashMap; - public final Recycler hashSet; - public final Recycler doubleObjectMap; - public final Recycler longObjectMap; - public final Recycler longLongMap; - public final Recycler intIntMap; - public final Recycler floatIntMap; - public final Recycler doubleIntMap; - public final Recycler longIntMap; - public final Recycler objectIntMap; - public final Recycler intObjectMap; - public final Recycler objectFloatMap; + public final Recycler hashMap; + public final Recycler hashSet; + public final Recycler doubleObjectMap; + public final Recycler longObjectMap; + public final Recycler longLongMap; + public final Recycler intIntMap; + public final Recycler floatIntMap; + public final Recycler doubleIntMap; + public final Recycler longIntMap; + public final Recycler objectIntMap; + public final Recycler intObjectMap; + public final Recycler objectFloatMap; public void close() { hashMap.close(); @@ -71,185 +67,185 @@ public CacheRecycler(Settings settings) { int limit = settings.getAsInt("limit", 10); int smartSize = settings.getAsInt("smart_size", 1024); - hashMap = build(type, limit, smartSize, new Recycler.C() { + hashMap = build(type, limit, smartSize, new Recycler.C() { @Override - public ExtTHashMap newInstance(int sizing) { - return new ExtTHashMap(size(sizing)); + public ObjectObjectOpenHashMap newInstance(int sizing) { + return new ObjectObjectOpenHashMap(size(sizing)); } @Override - public void clear(ExtTHashMap value) { + public void clear(ObjectObjectOpenHashMap value) { value.clear(); } }); - hashSet = build(type, limit, smartSize, new Recycler.C() { + hashSet = build(type, limit, smartSize, new Recycler.C() { @Override - public THashSet newInstance(int sizing) { - return new THashSet(size(sizing)); + public ObjectOpenHashSet newInstance(int sizing) { + return new ObjectOpenHashSet(size(sizing), 0.5f); } @Override - public void clear(THashSet value) { + public void clear(ObjectOpenHashSet value) { value.clear(); } }); - doubleObjectMap = build(type, limit, smartSize, new Recycler.C() { + doubleObjectMap = build(type, limit, smartSize, new Recycler.C() { @Override - public ExtTDoubleObjectHashMap newInstance(int sizing) { - return new ExtTDoubleObjectHashMap(size(sizing)); + public DoubleObjectOpenHashMap newInstance(int sizing) { + return new DoubleObjectOpenHashMap(size(sizing)); } @Override - public void clear(ExtTDoubleObjectHashMap value) { + public void clear(DoubleObjectOpenHashMap value) { value.clear(); } }); - longObjectMap = build(type, limit, smartSize, new Recycler.C() { + longObjectMap = build(type, limit, smartSize, new Recycler.C() { @Override - public ExtTLongObjectHashMap newInstance(int sizing) { - return new ExtTLongObjectHashMap(size(sizing)); + public LongObjectOpenHashMap newInstance(int sizing) { + return new LongObjectOpenHashMap(size(sizing)); } @Override - public void clear(ExtTLongObjectHashMap value) { + public void clear(LongObjectOpenHashMap value) { value.clear(); } }); - longLongMap = build(type, limit, smartSize, new Recycler.C() { + longLongMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TLongLongHashMap newInstance(int sizing) { - return new TLongLongHashMap(size(sizing)); + public LongLongOpenHashMap newInstance(int sizing) { + return new LongLongOpenHashMap(size(sizing)); } @Override - public void clear(TLongLongHashMap value) { + public void clear(LongLongOpenHashMap value) { value.clear(); } }); - intIntMap = build(type, limit, smartSize, new Recycler.C() { + intIntMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TIntIntHashMap newInstance(int sizing) { - return new TIntIntHashMap(size(sizing)); + public IntIntOpenHashMap newInstance(int sizing) { + return new IntIntOpenHashMap(size(sizing)); } @Override - public void clear(TIntIntHashMap value) { + public void clear(IntIntOpenHashMap value) { value.clear(); } }); - floatIntMap = build(type, limit, smartSize, new Recycler.C() { + floatIntMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TFloatIntHashMap newInstance(int sizing) { - return new TFloatIntHashMap(size(sizing)); + public FloatIntOpenHashMap newInstance(int sizing) { + return new FloatIntOpenHashMap(size(sizing)); } @Override - public void clear(TFloatIntHashMap value) { + public void clear(FloatIntOpenHashMap value) { value.clear(); } }); - doubleIntMap = build(type, limit, smartSize, new Recycler.C() { + doubleIntMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TDoubleIntHashMap newInstance(int sizing) { - return new TDoubleIntHashMap(size(sizing)); + public DoubleIntOpenHashMap newInstance(int sizing) { + return new DoubleIntOpenHashMap(size(sizing)); } @Override - public void clear(TDoubleIntHashMap value) { + public void clear(DoubleIntOpenHashMap value) { value.clear(); } }); - longIntMap = build(type, limit, smartSize, new Recycler.C() { + longIntMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TLongIntHashMap newInstance(int sizing) { - return new TLongIntHashMap(size(sizing)); + public LongIntOpenHashMap newInstance(int sizing) { + return new LongIntOpenHashMap(size(sizing)); } @Override - public void clear(TLongIntHashMap value) { + public void clear(LongIntOpenHashMap value) { value.clear(); } }); - objectIntMap = build(type, limit, smartSize, new Recycler.C() { + objectIntMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TObjectIntHashMap newInstance(int sizing) { - return new TObjectIntHashMap(size(sizing)); + public ObjectIntOpenHashMap newInstance(int sizing) { + return new ObjectIntOpenHashMap(size(sizing)); } @Override - public void clear(TObjectIntHashMap value) { + public void clear(ObjectIntOpenHashMap value) { value.clear(); } }); - intObjectMap = build(type, limit, smartSize, new Recycler.C() { + intObjectMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TIntObjectHashMap newInstance(int sizing) { - return new TIntObjectHashMap(size(sizing)); + public IntObjectOpenHashMap newInstance(int sizing) { + return new IntObjectOpenHashMap(size(sizing)); } @Override - public void clear(TIntObjectHashMap value) { + public void clear(IntObjectOpenHashMap value) { value.clear(); } }); - objectFloatMap = build(type, limit, smartSize, new Recycler.C() { + objectFloatMap = build(type, limit, smartSize, new Recycler.C() { @Override - public TObjectFloatHashMap newInstance(int sizing) { - return new TObjectFloatHashMap(size(sizing)); + public ObjectFloatOpenHashMap newInstance(int sizing) { + return new ObjectFloatOpenHashMap(size(sizing)); } @Override - public void clear(TObjectFloatHashMap value) { + public void clear(ObjectFloatOpenHashMap value) { value.clear(); } }); } - public Recycler.V> hashMap(int sizing) { + public Recycler.V> hashMap(int sizing) { return (Recycler.V) hashMap.obtain(sizing); } - public Recycler.V> hashSet(int sizing) { + public Recycler.V> hashSet(int sizing) { return (Recycler.V) hashSet.obtain(sizing); } - public Recycler.V> doubleObjectMap(int sizing) { + public Recycler.V> doubleObjectMap(int sizing) { return (Recycler.V) doubleObjectMap.obtain(sizing); } - public Recycler.V> longObjectMap(int sizing) { + public Recycler.V> longObjectMap(int sizing) { return (Recycler.V) longObjectMap.obtain(sizing); } - public Recycler.V longLongMap(int sizing) { + public Recycler.V longLongMap(int sizing) { return longLongMap.obtain(sizing); } - public Recycler.V intIntMap(int sizing) { + public Recycler.V intIntMap(int sizing) { return intIntMap.obtain(sizing); } - public Recycler.V floatIntMap(int sizing) { + public Recycler.V floatIntMap(int sizing) { return floatIntMap.obtain(sizing); } - public Recycler.V doubleIntMap(int sizing) { + public Recycler.V doubleIntMap(int sizing) { return doubleIntMap.obtain(sizing); } - public Recycler.V longIntMap(int sizing) { + public Recycler.V longIntMap(int sizing) { return longIntMap.obtain(sizing); } - public Recycler.V> objectIntMap(int sizing) { + public Recycler.V> objectIntMap(int sizing) { return (Recycler.V) objectIntMap.obtain(sizing); } - public Recycler.V> intObjectMap(int sizing) { + public Recycler.V> intObjectMap(int sizing) { return (Recycler.V) intObjectMap.obtain(sizing); } - public Recycler.V> objectFloatMap(int sizing) { + public Recycler.V> objectFloatMap(int sizing) { return (Recycler.V) objectFloatMap.obtain(sizing); } diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index f66774ea0f0e0..388dd7d75caa0 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -21,7 +21,6 @@ import com.google.common.base.Predicate; import com.google.common.collect.*; -import gnu.trove.set.hash.THashSet; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticSearchIllegalArgumentException; @@ -38,7 +37,6 @@ import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; -import org.elasticsearch.common.trove.ExtTHashMap; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.Index; import org.elasticsearch.indices.IndexMissingException; @@ -160,26 +158,24 @@ public static Custom.Factory lookupFactorySafe(String type this.allOpenIndices = allOpenIndices.toArray(new String[allOpenIndices.size()]); // build aliases map - ExtTHashMap> aliases = new ExtTHashMap>(numAliases); + Map> tmpAliases = new HashMap>(numAliases); for (IndexMetaData indexMetaData : indices.values()) { String index = indexMetaData.index(); for (AliasMetaData aliasMd : indexMetaData.aliases().values()) { - Map indexAliasMap = aliases.get(aliasMd.alias()); + Map indexAliasMap = tmpAliases.get(aliasMd.alias()); if (indexAliasMap == null) { - indexAliasMap = new ExtTHashMap(indices.size()); - aliases.put(aliasMd.alias(), indexAliasMap); + indexAliasMap = new HashMap(indices.size()); + tmpAliases.put(aliasMd.alias(), indexAliasMap); } indexAliasMap.put(index, aliasMd); } } - for (int i = 0; i < aliases.internalValues().length; i++) { - if (aliases.internalValues()[i] != null) { - aliases.internalValues()[i] = XMaps.makeReadOnly((Map) aliases.internalValues()[i]); - } + for (String alias : tmpAliases.keySet()) { + tmpAliases.put(alias, XMaps.makeReadOnly(tmpAliases.get(alias))); } - this.aliases = XMaps.makeReadOnly(aliases); + this.aliases = XMaps.makeReadOnly(tmpAliases); - ExtTHashMap aliasAndIndexToIndexMap = new ExtTHashMap(numAliases + numIndices); + Map aliasAndIndexToIndexMap = new HashMap(numAliases + numIndices); for (IndexMetaData indexMetaData : indices.values()) { StringArray indicesLst = aliasAndIndexToIndexMap.get(indexMetaData.index()); if (indicesLst == null) { @@ -198,8 +194,8 @@ public static Custom.Factory lookupFactorySafe(String type } } - for (StringArray stringArray : aliasAndIndexToIndexMap.values()) { - stringArray.trim(); + for (StringArray value : aliasAndIndexToIndexMap.values()) { + value.trim(); } this.aliasAndIndexToIndexMap = XMaps.makeReadOnly(aliasAndIndexToIndexMap); @@ -439,7 +435,7 @@ public Map> resolveSearchRouting(@Nullable String routing, S Map> routings = null; Set paramRouting = null; // List of indices that don't require any routing - Set norouting = new THashSet(); + Set norouting = new HashSet(); if (routing != null) { paramRouting = Strings.splitStringByCommaToSet(routing); } @@ -456,7 +452,7 @@ public Map> resolveSearchRouting(@Nullable String routing, S } Set r = routings.get(indexRouting.getKey()); if (r == null) { - r = new THashSet(); + r = new HashSet(); routings.put(indexRouting.getKey(), r); } r.addAll(indexRouting.getValue().searchRoutingValues()); @@ -471,7 +467,7 @@ public Map> resolveSearchRouting(@Nullable String routing, S if (!norouting.contains(indexRouting.getKey())) { norouting.add(indexRouting.getKey()); if (paramRouting != null) { - Set r = new THashSet(paramRouting); + Set r = new HashSet(paramRouting); if (routings == null) { routings = newHashMap(); } @@ -490,7 +486,7 @@ public Map> resolveSearchRouting(@Nullable String routing, S if (!norouting.contains(aliasOrIndex)) { norouting.add(aliasOrIndex); if (paramRouting != null) { - Set r = new THashSet(paramRouting); + Set r = new HashSet(paramRouting); if (routings == null) { routings = newHashMap(); } @@ -523,7 +519,7 @@ private Map> resolveSearchRoutingSingleValue(@Nullable Strin for (Map.Entry indexRouting : indexToRoutingMap.entrySet()) { if (!indexRouting.getValue().searchRoutingValues().isEmpty()) { // Routing alias - Set r = new THashSet(indexRouting.getValue().searchRoutingValues()); + Set r = new HashSet(indexRouting.getValue().searchRoutingValues()); if (paramRouting != null) { r.retainAll(paramRouting); } @@ -536,7 +532,7 @@ private Map> resolveSearchRoutingSingleValue(@Nullable Strin } else { // Non-routing alias if (paramRouting != null) { - Set r = new THashSet(paramRouting); + Set r = new HashSet(paramRouting); if (routings == null) { routings = newHashMap(); } @@ -619,7 +615,7 @@ public String[] concreteIndices(String[] aliasesOrIndices, IgnoreIndices ignoreI return aliasesOrIndices; } - Set actualIndices = new THashSet(); + Set actualIndices = new HashSet(); for (String index : aliasesOrIndices) { StringArray actualLst = aliasAndIndexToIndexMap.get(index); if (actualLst == null) { @@ -680,7 +676,7 @@ public String[] convertFromWildcards(String[] aliasesOrIndices, boolean wildcard } else if (aliasOrIndex.charAt(0) == '-') { // if its the first, fill it with all the indices... if (i == 0) { - result = new THashSet(Arrays.asList(wildcardOnlyOpen ? concreteAllOpenIndices() : concreteAllIndices())); + result = new HashSet(Arrays.asList(wildcardOnlyOpen ? concreteAllOpenIndices() : concreteAllIndices())); } add = false; aliasOrIndex = aliasOrIndex.substring(1); @@ -700,7 +696,7 @@ public String[] convertFromWildcards(String[] aliasesOrIndices, boolean wildcard } if (result == null) { // add all the previous ones... - result = new THashSet(); + result = new HashSet(); result.addAll(Arrays.asList(aliasesOrIndices).subList(0, i)); } String[] indices = wildcardOnlyOpen ? concreteAllOpenIndices() : concreteAllIndices(); diff --git a/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 0a78a6a33fd66..1f105a878dd8b 100644 --- a/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -19,10 +19,10 @@ package org.elasticsearch.cluster.routing; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; -import gnu.trove.map.hash.TObjectIntHashMap; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -55,7 +55,7 @@ public class RoutingNodes implements Iterable { private Set clearPostAllocationFlag; - private final Map> nodesPerAttributeNames = new HashMap>(); + private final Map> nodesPerAttributeNames = new HashMap>(); public RoutingNodes(ClusterState clusterState) { this.metaData = clusterState.metaData(); @@ -188,15 +188,15 @@ public RoutingNode node(String nodeId) { return nodesToShards.get(nodeId); } - public TObjectIntHashMap nodesPerAttributesCounts(String attributeName) { - TObjectIntHashMap nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName); + public ObjectIntOpenHashMap nodesPerAttributesCounts(String attributeName) { + ObjectIntOpenHashMap nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName); if (nodesPerAttributesCounts != null) { return nodesPerAttributesCounts; } - nodesPerAttributesCounts = new TObjectIntHashMap(); + nodesPerAttributesCounts = new ObjectIntOpenHashMap(); for (RoutingNode routingNode : this) { String attrValue = routingNode.node().attributes().get(attributeName); - nodesPerAttributesCounts.adjustOrPutValue(attrValue, 1, 1); + nodesPerAttributesCounts.addTo(attrValue, 1); } nodesPerAttributeNames.put(attributeName, nodesPerAttributesCounts); return nodesPerAttributesCounts; diff --git a/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/EvenShardsCountAllocator.java b/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/EvenShardsCountAllocator.java index be1bec613075b..a2e4401e41ad3 100644 --- a/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/EvenShardsCountAllocator.java +++ b/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/EvenShardsCountAllocator.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; -import gnu.trove.map.hash.TObjectIntHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -225,12 +225,12 @@ public boolean move(MutableShardRouting shardRouting, RoutingNode node, RoutingA private RoutingNode[] sortedNodesLeastToHigh(RoutingAllocation allocation) { // create count per node id, taking into account relocations - final TObjectIntHashMap nodeCounts = new TObjectIntHashMap(); + final ObjectIntOpenHashMap nodeCounts = new ObjectIntOpenHashMap(); for (RoutingNode node : allocation.routingNodes()) { for (int i = 0; i < node.shards().size(); i++) { ShardRouting shardRouting = node.shards().get(i); String nodeId = shardRouting.relocating() ? shardRouting.relocatingNodeId() : shardRouting.currentNodeId(); - nodeCounts.adjustOrPutValue(nodeId, 1, 1); + nodeCounts.addTo(nodeId, 1); } } RoutingNode[] nodes = allocation.routingNodes().nodesToShards().values().toArray(new RoutingNode[allocation.routingNodes().nodesToShards().values().size()]); diff --git a/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 684d824326b1b..21ab3bbf28c10 100644 --- a/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -19,8 +19,8 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.google.common.collect.Maps; -import gnu.trove.map.hash.TObjectIntHashMap; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; @@ -176,10 +176,10 @@ private boolean underCapacity(ShardRouting shardRouting, RoutingNode node, Routi } // build attr_value -> nodes map - TObjectIntHashMap nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute); + ObjectIntOpenHashMap nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute); // build the count of shards per attribute value - TObjectIntHashMap shardPerAttribute = new TObjectIntHashMap(); + ObjectIntOpenHashMap shardPerAttribute = new ObjectIntOpenHashMap(); for (RoutingNode routingNode : allocation.routingNodes()) { for (int i = 0; i < routingNode.shards().size(); i++) { MutableShardRouting nodeShardRouting = routingNode.shards().get(i); @@ -187,9 +187,9 @@ private boolean underCapacity(ShardRouting shardRouting, RoutingNode node, Routi // if the shard is relocating, then make sure we count it as part of the node it is relocating to if (nodeShardRouting.relocating()) { RoutingNode relocationNode = allocation.routingNodes().node(nodeShardRouting.relocatingNodeId()); - shardPerAttribute.adjustOrPutValue(relocationNode.node().attributes().get(awarenessAttribute), 1, 1); + shardPerAttribute.addTo(relocationNode.node().attributes().get(awarenessAttribute), 1); } else if (nodeShardRouting.started()) { - shardPerAttribute.adjustOrPutValue(routingNode.node().attributes().get(awarenessAttribute), 1, 1); + shardPerAttribute.addTo(routingNode.node().attributes().get(awarenessAttribute), 1); } } } @@ -199,11 +199,11 @@ private boolean underCapacity(ShardRouting shardRouting, RoutingNode node, Routi String nodeId = shardRouting.relocating() ? shardRouting.relocatingNodeId() : shardRouting.currentNodeId(); if (!node.nodeId().equals(nodeId)) { // we work on different nodes, move counts around - shardPerAttribute.adjustOrPutValue(allocation.routingNodes().node(nodeId).node().attributes().get(awarenessAttribute), -1, 0); - shardPerAttribute.adjustOrPutValue(node.node().attributes().get(awarenessAttribute), 1, 1); + shardPerAttribute.putOrAdd(allocation.routingNodes().node(nodeId).node().attributes().get(awarenessAttribute), 0, -1); + shardPerAttribute.addTo(node.node().attributes().get(awarenessAttribute), 1); } } else { - shardPerAttribute.adjustOrPutValue(node.node().attributes().get(awarenessAttribute), 1, 1); + shardPerAttribute.addTo(node.node().attributes().get(awarenessAttribute), 1); } } @@ -211,7 +211,7 @@ private boolean underCapacity(ShardRouting shardRouting, RoutingNode node, Routi String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute); if (fullValues != null) { for (String fullValue : fullValues) { - if (!shardPerAttribute.contains(fullValue)) { + if (!shardPerAttribute.containsKey(fullValue)) { numberOfAttributes++; } } diff --git a/src/main/java/org/elasticsearch/common/Strings.java b/src/main/java/org/elasticsearch/common/Strings.java index 7ee2a275c69cd..b75560f2e84f4 100644 --- a/src/main/java/org/elasticsearch/common/Strings.java +++ b/src/main/java/org/elasticsearch/common/Strings.java @@ -21,7 +21,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import gnu.trove.set.hash.THashSet; + import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.ElasticSearchIllegalStateException; @@ -1021,7 +1021,8 @@ public static Set splitStringToSet(final String s, final char c) { count++; } } - final THashSet result = new THashSet(count); + // TODO (MvG): No push: hppc or jcf? + final Set result = new HashSet(count); final int len = chars.length; int start = 0; // starting index in chars of the current substring. int pos = 0; // current index in chars. diff --git a/src/main/java/org/elasticsearch/common/collect/XMaps.java b/src/main/java/org/elasticsearch/common/collect/XMaps.java index 52e51081285eb..842cbc8b04e72 100644 --- a/src/main/java/org/elasticsearch/common/collect/XMaps.java +++ b/src/main/java/org/elasticsearch/common/collect/XMaps.java @@ -19,11 +19,6 @@ package org.elasticsearch.common.collect; -import com.google.common.collect.ForwardingMap; -import gnu.trove.impl.Constants; -import org.elasticsearch.ElasticSearchIllegalArgumentException; -import org.elasticsearch.common.trove.ExtTHashMap; - import java.util.Collections; import java.util.Map; @@ -36,59 +31,6 @@ */ public final class XMaps { - public static final int DEFAULT_CAPACITY = Constants.DEFAULT_CAPACITY; - - /** - * Returns a new map with the given initial capacity - */ - public static Map newMap(int capacity) { - return new ExtTHashMap(capacity, Constants.DEFAULT_LOAD_FACTOR); - } - - /** - * Returns a new map with a default initial capacity of - * {@value #DEFAULT_CAPACITY} - */ - public static Map newMap() { - return newMap(DEFAULT_CAPACITY); - } - - /** - * Returns a map like {@link #newMap()} that does not accept null keys - */ - public static Map newNoNullKeysMap() { - Map delegate = newMap(); - return ensureNoNullKeys(delegate); - } - - /** - * Returns a map like {@link #newMap(in)} that does not accept null keys - */ - public static Map newNoNullKeysMap(int capacity) { - Map delegate = newMap(capacity); - return ensureNoNullKeys(delegate); - } - - /** - * Wraps the given map and prevent adding of null keys. - */ - public static Map ensureNoNullKeys(final Map delegate) { - return new ForwardingMap() { - @Override - public V put(K key, V value) { - if (key == null) { - throw new ElasticSearchIllegalArgumentException("Map key must not be null"); - } - return super.put(key, value); - } - - @Override - protected Map delegate() { - return delegate; - } - }; - } - /** * Wraps the given map into a read only implementation. */ diff --git a/src/main/java/org/elasticsearch/common/hppc/HppcMaps.java b/src/main/java/org/elasticsearch/common/hppc/HppcMaps.java new file mode 100644 index 0000000000000..f2bb0fc993861 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/hppc/HppcMaps.java @@ -0,0 +1,81 @@ +package org.elasticsearch.common.hppc; + +import com.carrotsearch.hppc.ObjectIntOpenHashMap; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; +import org.elasticsearch.ElasticSearchIllegalArgumentException; + +/** + */ +public final class HppcMaps { + + private HppcMaps() { + } + + /** + * Returns a new map with the given initial capacity + */ + public static ObjectObjectOpenHashMap newMap(int capacity) { + return new ObjectObjectOpenHashMap(capacity); + } + + /** + * Returns a new map with a default initial capacity of + * {@value com.carrotsearch.hppc.HashContainerUtils#DEFAULT_CAPACITY} + */ + public static ObjectObjectOpenHashMap newMap() { + return newMap(16); + } + + /** + * Returns a map like {@link #newMap()} that does not accept null keys + */ + public static ObjectObjectOpenHashMap newNoNullKeysMap() { + return ensureNoNullKeys(16); + } + + /** + * Returns a map like {@link #newMap(int)} that does not accept null keys + */ + public static ObjectObjectOpenHashMap newNoNullKeysMap(int capacity) { + return ensureNoNullKeys(capacity); + } + + /** + * Wraps the given map and prevent adding of null keys. + */ + public static ObjectObjectOpenHashMap ensureNoNullKeys(int capacity) { + return new ObjectObjectOpenHashMap(capacity) { + + @Override + public V put(K key, V value) { + if (key == null) { + throw new ElasticSearchIllegalArgumentException("Map key must not be null"); + } + return super.put(key, value); + } + + }; + } + + public final static class Object { + + public final static class Integer { + + public static ObjectIntOpenHashMap ensureNoNullKeys(int capacity, float loadFactor) { + return new ObjectIntOpenHashMap(capacity, loadFactor) { + + @Override + public int put(V key, int value) { + if (key == null) { + throw new ElasticSearchIllegalArgumentException("Map key must not be null"); + } + return super.put(key, value); + } + }; + } + + } + + } + +} diff --git a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java index 4520daed2e2be..ebd4c9a2c3d0e 100644 --- a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java +++ b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.io.stream; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -29,8 +29,8 @@ */ public class HandlesStreamInput extends AdapterStreamInput { - private final TIntObjectHashMap handles = new TIntObjectHashMap(); - private final TIntObjectHashMap handlesText = new TIntObjectHashMap(); + private final IntObjectOpenHashMap handles = new IntObjectOpenHashMap(); + private final IntObjectOpenHashMap handlesText = new IntObjectOpenHashMap(); HandlesStreamInput() { super(); diff --git a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java index a092e7bd72a58..dfc8306c749d8 100644 --- a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java +++ b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java @@ -19,8 +19,7 @@ package org.elasticsearch.common.io.stream; -import gnu.trove.impl.Constants; -import gnu.trove.map.hash.TObjectIntHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -30,8 +29,8 @@ */ public class HandlesStreamOutput extends AdapterStreamOutput { - private final TObjectIntHashMap handles = new TObjectIntHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); - private final TObjectIntHashMap handlesText = new TObjectIntHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); + private final ObjectIntOpenHashMap handles = new ObjectIntOpenHashMap(); + private final ObjectIntOpenHashMap handlesText = new ObjectIntOpenHashMap(); public HandlesStreamOutput(StreamOutput out) { super(out); @@ -39,16 +38,15 @@ public HandlesStreamOutput(StreamOutput out) { @Override public void writeSharedString(String str) throws IOException { - int handle = handles.get(str); - if (handle == -1) { - handle = handles.size(); + if (handles.containsKey(str)) { + out.writeByte((byte) 1); + out.writeVInt(handles.lget()); + } else { + int handle = handles.size(); handles.put(str, handle); out.writeByte((byte) 0); out.writeVInt(handle); out.writeString(str); - } else { - out.writeByte((byte) 1); - out.writeVInt(handle); } } @@ -59,16 +57,15 @@ public void writeString(String s) throws IOException { @Override public void writeSharedText(Text text) throws IOException { - int handle = handlesText.get(text); - if (handle == -1) { - handle = handlesText.size(); + if (handlesText.containsKey(text)) { + out.writeByte((byte) 1); + out.writeVInt(handlesText.lget()); + } else { + int handle = handlesText.size(); handlesText.put(text, handle); out.writeByte((byte) 0); out.writeVInt(handle); out.writeText(text); - } else { - out.writeByte((byte) 1); - out.writeVInt(handle); } } diff --git a/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 824f539c30a48..f480e6995f42e 100644 --- a/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.lucene.search; -import gnu.trove.set.hash.THashSet; +import com.carrotsearch.hppc.ObjectOpenHashSet; import org.apache.lucene.index.*; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; @@ -138,7 +138,7 @@ public Query rewrite(IndexReader reader) throws IOException { } Term[] suffixTerms = termArrays.get(sizeMinus1); int position = positions.get(sizeMinus1); - Set terms = new THashSet(); + ObjectOpenHashSet terms = new ObjectOpenHashSet(); for (Term term : suffixTerms) { getPrefixTerms(terms, term, reader); if (terms.size() > maxExpansions) { @@ -148,11 +148,11 @@ public Query rewrite(IndexReader reader) throws IOException { if (terms.isEmpty()) { return MatchNoDocsQuery.INSTANCE; } - query.add(terms.toArray(new Term[terms.size()]), position); + query.add(terms.toArray(Term.class), position); return query.rewrite(reader); } - private void getPrefixTerms(Set terms, final Term prefix, final IndexReader reader) throws IOException { + private void getPrefixTerms(ObjectOpenHashSet terms, final Term prefix, final IndexReader reader) throws IOException { // SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms // instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually. TermsEnum termsEnum = null; diff --git a/src/main/java/org/elasticsearch/common/transport/PortsRange.java b/src/main/java/org/elasticsearch/common/transport/PortsRange.java index 13e9670f4aaeb..3c6ded0a4c6fc 100644 --- a/src/main/java/org/elasticsearch/common/transport/PortsRange.java +++ b/src/main/java/org/elasticsearch/common/transport/PortsRange.java @@ -19,7 +19,8 @@ package org.elasticsearch.common.transport; -import gnu.trove.list.array.TIntArrayList; + +import com.carrotsearch.hppc.IntArrayList; import java.util.StringTokenizer; @@ -35,7 +36,7 @@ public PortsRange(String portRange) { } public int[] ports() throws NumberFormatException { - final TIntArrayList ports = new TIntArrayList(); + final IntArrayList ports = new IntArrayList(); iterate(new PortCallback() { @Override public boolean onPortNumber(int portNumber) { @@ -43,7 +44,7 @@ public boolean onPortNumber(int portNumber) { return false; } }); - return ports.toArray(new int[ports.size()]); + return ports.toArray(); } public boolean iterate(PortCallback callback) throws NumberFormatException { diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTDoubleObjectHashMap.java b/src/main/java/org/elasticsearch/common/trove/ExtTDoubleObjectHashMap.java deleted file mode 100644 index 95de35314267e..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/ExtTDoubleObjectHashMap.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.map.TDoubleObjectMap; -import gnu.trove.map.hash.TDoubleObjectHashMap; - -public class ExtTDoubleObjectHashMap extends TDoubleObjectHashMap { - - public ExtTDoubleObjectHashMap() { - } - - public ExtTDoubleObjectHashMap(int initialCapacity) { - super(initialCapacity); - } - - public ExtTDoubleObjectHashMap(int initialCapacity, float loadFactor) { - super(initialCapacity, loadFactor); - } - - public ExtTDoubleObjectHashMap(int initialCapacity, float loadFactor, double noEntryKey) { - super(initialCapacity, loadFactor, noEntryKey); - } - - public ExtTDoubleObjectHashMap(TDoubleObjectMap vtDoubleObjectMap) { - super(vtDoubleObjectMap); - } - - /** - * Internal method to get the actual values associated. Some values might have "null" or no entry - * values. - */ - public Object[] internalValues() { - return this._values; - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTHashMap.java b/src/main/java/org/elasticsearch/common/trove/ExtTHashMap.java deleted file mode 100644 index 9fca8d6bb52fe..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/ExtTHashMap.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.map.hash.THashMap; - -import java.util.Map; - -public class ExtTHashMap extends THashMap { - - public ExtTHashMap() { - } - - public ExtTHashMap(int initialCapacity) { - super(initialCapacity); - } - - public ExtTHashMap(int initialCapacity, float loadFactor) { - super(initialCapacity, loadFactor); - } - - public ExtTHashMap(Map kvMap) { - super(kvMap); - } - - public ExtTHashMap(THashMap kvtHashMap) { - super(kvtHashMap); - } - - /** - * Internal method to get the actual values associated. Some values might have "null" or no entry - * values. - */ - public Object[] internalValues() { - return this._values; - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTIntArrayList.java b/src/main/java/org/elasticsearch/common/trove/ExtTIntArrayList.java deleted file mode 100644 index 20229cedd1ac5..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/ExtTIntArrayList.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.list.array.TIntArrayList; - -/** - * - */ -public class ExtTIntArrayList extends TIntArrayList { - - public ExtTIntArrayList() { - } - - public ExtTIntArrayList(int capacity) { - super(capacity); - } - - public ExtTIntArrayList(int[] values) { - super(values); - } - - public int[] unsafeArray() { - return _data; - } -} diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTLongObjectHashMap.java b/src/main/java/org/elasticsearch/common/trove/ExtTLongObjectHashMap.java deleted file mode 100644 index 994585f48b50f..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/ExtTLongObjectHashMap.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.map.TLongObjectMap; -import gnu.trove.map.hash.TLongObjectHashMap; - -public class ExtTLongObjectHashMap extends TLongObjectHashMap { - - public ExtTLongObjectHashMap() { - } - - public ExtTLongObjectHashMap(int initialCapacity) { - super(initialCapacity); - } - - public ExtTLongObjectHashMap(int initialCapacity, float loadFactor) { - super(initialCapacity, loadFactor); - } - - public ExtTLongObjectHashMap(int initialCapacity, float loadFactor, long noEntryKey) { - super(initialCapacity, loadFactor, noEntryKey); - } - - public ExtTLongObjectHashMap(TLongObjectMap vtLongObjectMap) { - super(vtLongObjectMap); - } - - /** - * Internal method to get the actual values associated. Some values might have "null" or no entry - * values. - */ - public Object[] internalValues() { - return this._values; - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java b/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java deleted file mode 100644 index 41422876a2515..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.map.hash.TObjectIntHashMap; - -/** - * - */ -public class ExtTObjectIntHasMap extends TObjectIntHashMap { - - public ExtTObjectIntHasMap() { - } - - public ExtTObjectIntHasMap(int initialCapacity) { - super(initialCapacity); - } - - public ExtTObjectIntHasMap(int initialCapacity, float loadFactor) { - super(initialCapacity, loadFactor); - } - - public ExtTObjectIntHasMap(int initialCapacity, float loadFactor, int noEntryValue) { - super(initialCapacity, loadFactor, noEntryValue); - } - - /** - * Returns an already existing key, or null if it does not exists. - */ - public T key(T key) { - int index = index(key); - return index < 0 ? null : (T) _set[index]; - } - - public int _valuesSize() { - return _values.length; - } -} diff --git a/src/main/java/org/elasticsearch/common/trove/StringIdentityHashingStrategy.java b/src/main/java/org/elasticsearch/common/trove/StringIdentityHashingStrategy.java deleted file mode 100644 index 93de9ea80f997..0000000000000 --- a/src/main/java/org/elasticsearch/common/trove/StringIdentityHashingStrategy.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to ElasticSearch and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. ElasticSearch licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.trove; - -import gnu.trove.strategy.HashingStrategy; - -/** - * A string based hash code with identity equality. - */ -public class StringIdentityHashingStrategy implements HashingStrategy { - - static final long serialVersionUID = -5188534454583764905L; - - public int computeHashCode(String object) { - return object.hashCode(); - } - - @SuppressWarnings({"StringEquality"}) - public boolean equals(String o1, String o2) { - return o1 == o2; - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/gateway/local/LocalGateway.java b/src/main/java/org/elasticsearch/gateway/local/LocalGateway.java index fd82e43fef814..4047a281ba6c7 100644 --- a/src/main/java/org/elasticsearch/gateway/local/LocalGateway.java +++ b/src/main/java/org/elasticsearch/gateway/local/LocalGateway.java @@ -19,8 +19,8 @@ package org.elasticsearch.gateway.local; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.google.common.collect.Sets; -import gnu.trove.map.hash.TObjectIntHashMap; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -136,7 +136,7 @@ public void performStateRecovery(final GatewayStateRecoveredListener listener) t } MetaData.Builder metaDataBuilder = MetaData.builder(); - TObjectIntHashMap indices = new TObjectIntHashMap(); + ObjectFloatOpenHashMap indices = new ObjectFloatOpenHashMap(); MetaData electedGlobalState = null; int found = 0; for (TransportNodesListGatewayMetaState.NodeLocalGatewayMetaState nodeState : nodesState) { @@ -150,7 +150,7 @@ public void performStateRecovery(final GatewayStateRecoveredListener listener) t electedGlobalState = nodeState.metaData(); } for (IndexMetaData indexMetaData : nodeState.metaData().indices().values()) { - indices.adjustOrPutValue(indexMetaData.index(), 1, 1); + indices.addTo(indexMetaData.index(), 1); } } if (found < requiredAllocation) { @@ -159,29 +159,34 @@ public void performStateRecovery(final GatewayStateRecoveredListener listener) t } // update the global state, and clean the indices, we elect them in the next phase metaDataBuilder.metaData(electedGlobalState).removeAllIndices(); - for (String index : indices.keySet()) { - IndexMetaData electedIndexMetaData = null; - int indexMetaDataCount = 0; - for (TransportNodesListGatewayMetaState.NodeLocalGatewayMetaState nodeState : nodesState) { - if (nodeState.metaData() == null) { - continue; + final boolean[] states = indices.allocated; + final Object[] keys = indices.keys; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + String index = (String) keys[i]; + IndexMetaData electedIndexMetaData = null; + int indexMetaDataCount = 0; + for (TransportNodesListGatewayMetaState.NodeLocalGatewayMetaState nodeState : nodesState) { + if (nodeState.metaData() == null) { + continue; + } + IndexMetaData indexMetaData = nodeState.metaData().index(index); + if (indexMetaData == null) { + continue; + } + if (electedIndexMetaData == null) { + electedIndexMetaData = indexMetaData; + } else if (indexMetaData.version() > electedIndexMetaData.version()) { + electedIndexMetaData = indexMetaData; + } + indexMetaDataCount++; } - IndexMetaData indexMetaData = nodeState.metaData().index(index); - if (indexMetaData == null) { - continue; + if (electedIndexMetaData != null) { + if (indexMetaDataCount < requiredAllocation) { + logger.debug("[{}] found [{}], required [{}], not adding", index, indexMetaDataCount, requiredAllocation); + } + metaDataBuilder.put(electedIndexMetaData, false); } - if (electedIndexMetaData == null) { - electedIndexMetaData = indexMetaData; - } else if (indexMetaData.version() > electedIndexMetaData.version()) { - electedIndexMetaData = indexMetaData; - } - indexMetaDataCount++; - } - if (electedIndexMetaData != null) { - if (indexMetaDataCount < requiredAllocation) { - logger.debug("[{}] found [{}], required [{}], not adding", index, indexMetaDataCount, requiredAllocation); - } - metaDataBuilder.put(electedIndexMetaData, false); } } ClusterState.Builder builder = ClusterState.builder(); diff --git a/src/main/java/org/elasticsearch/gateway/local/LocalGatewayAllocator.java b/src/main/java/org/elasticsearch/gateway/local/LocalGatewayAllocator.java index d0cc63c67929b..cd098a7742a0c 100644 --- a/src/main/java/org/elasticsearch/gateway/local/LocalGatewayAllocator.java +++ b/src/main/java/org/elasticsearch/gateway/local/LocalGatewayAllocator.java @@ -19,10 +19,10 @@ package org.elasticsearch.gateway.local; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; +import com.carrotsearch.hppc.cursors.ObjectLongCursor; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import gnu.trove.iterator.TObjectLongIterator; -import gnu.trove.map.hash.TObjectLongHashMap; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -66,7 +66,7 @@ public class LocalGatewayAllocator extends AbstractComponent implements GatewayA private final ConcurrentMap> cachedStores = ConcurrentCollections.newConcurrentMap(); - private final ConcurrentMap> cachedShardsState = ConcurrentCollections.newConcurrentMap(); + private final ConcurrentMap> cachedShardsState = ConcurrentCollections.newConcurrentMap(); private final TimeValue listTimeout; @@ -121,15 +121,21 @@ public boolean allocateUnassigned(RoutingAllocation allocation) { continue; } - TObjectLongHashMap nodesState = buildShardStates(nodes, shard); + ObjectLongOpenHashMap nodesState = buildShardStates(nodes, shard); int numberOfAllocationsFound = 0; long highestVersion = -1; Set nodesWithHighestVersion = Sets.newHashSet(); - for (TObjectLongIterator it = nodesState.iterator(); it.hasNext(); ) { - it.advance(); - DiscoveryNode node = it.key(); - long version = it.value(); + final boolean[] states = nodesState.allocated; + final Object[] keys = nodesState.keys; + final long[] values = nodesState.values; + for (int i = 0; i < states.length; i++) { + if (!states[i]) { + continue; + } + + DiscoveryNode node = (DiscoveryNode) keys[i]; + long version = values[i]; // since we don't check in NO allocation, we need to double check here if (allocation.shouldIgnoreShardForNode(shard.shardId(), node.id())) { continue; @@ -352,18 +358,18 @@ public boolean allocateUnassigned(RoutingAllocation allocation) { return changed; } - private TObjectLongHashMap buildShardStates(DiscoveryNodes nodes, MutableShardRouting shard) { - TObjectLongHashMap shardStates = cachedShardsState.get(shard.shardId()); + private ObjectLongOpenHashMap buildShardStates(DiscoveryNodes nodes, MutableShardRouting shard) { + ObjectLongOpenHashMap shardStates = cachedShardsState.get(shard.shardId()); Set nodeIds; if (shardStates == null) { - shardStates = new TObjectLongHashMap(); + shardStates = new ObjectLongOpenHashMap(); cachedShardsState.put(shard.shardId(), shardStates); nodeIds = nodes.dataNodes().keySet(); } else { // clean nodes that have failed - for (TObjectLongIterator it = shardStates.iterator(); it.hasNext(); ) { - it.advance(); - if (!nodes.nodeExists(it.key().id())) { + for (Iterator> it = shardStates.iterator(); it.hasNext(); ) { + DiscoveryNode node = it.next().key; + if (!nodes.nodeExists(node.id())) { it.remove(); } } diff --git a/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java b/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java index e23caae374440..ab0c283fe9ee2 100644 --- a/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java +++ b/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import org.apache.lucene.util.NumericUtils; import java.io.IOException; @@ -30,10 +30,10 @@ */ public class NumericDoubleAnalyzer extends NumericAnalyzer { - private final static TIntObjectHashMap builtIn; + private final static IntObjectOpenHashMap builtIn; static { - builtIn = new TIntObjectHashMap(); + builtIn = new IntObjectOpenHashMap(); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE))); for (int i = 0; i <= 64; i += 4) { builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i))); diff --git a/src/main/java/org/elasticsearch/index/analysis/NumericFloatAnalyzer.java b/src/main/java/org/elasticsearch/index/analysis/NumericFloatAnalyzer.java index ee62c6b7d9598..108b33bb59a7f 100644 --- a/src/main/java/org/elasticsearch/index/analysis/NumericFloatAnalyzer.java +++ b/src/main/java/org/elasticsearch/index/analysis/NumericFloatAnalyzer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import org.apache.lucene.util.NumericUtils; import java.io.IOException; @@ -30,10 +30,10 @@ */ public class NumericFloatAnalyzer extends NumericAnalyzer { - private final static TIntObjectHashMap builtIn; + private final static IntObjectOpenHashMap builtIn; static { - builtIn = new TIntObjectHashMap(); + builtIn = new IntObjectOpenHashMap(); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE))); for (int i = 0; i <= 64; i += 4) { builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i))); diff --git a/src/main/java/org/elasticsearch/index/analysis/NumericIntegerAnalyzer.java b/src/main/java/org/elasticsearch/index/analysis/NumericIntegerAnalyzer.java index 8ae78ffce120a..7208e54e6e34c 100644 --- a/src/main/java/org/elasticsearch/index/analysis/NumericIntegerAnalyzer.java +++ b/src/main/java/org/elasticsearch/index/analysis/NumericIntegerAnalyzer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import org.apache.lucene.util.NumericUtils; import java.io.IOException; @@ -30,10 +30,10 @@ */ public class NumericIntegerAnalyzer extends NumericAnalyzer { - private final static TIntObjectHashMap builtIn; + private final static IntObjectOpenHashMap builtIn; static { - builtIn = new TIntObjectHashMap(); + builtIn = new IntObjectOpenHashMap(); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE))); for (int i = 0; i <= 64; i += 4) { builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i))); diff --git a/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java b/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java index 079fd04e950ff..92ad80051433f 100644 --- a/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java +++ b/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import org.apache.lucene.util.NumericUtils; import java.io.IOException; @@ -30,10 +30,10 @@ */ public class NumericLongAnalyzer extends NumericAnalyzer { - private final static TIntObjectHashMap builtIn; + private final static IntObjectOpenHashMap builtIn; static { - builtIn = new TIntObjectHashMap(); + builtIn = new IntObjectOpenHashMap(); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE))); for (int i = 0; i <= 64; i += 4) { builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i))); diff --git a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdCache.java b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdCache.java index 34f04f6f65993..8196a5eb41700 100644 --- a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdCache.java +++ b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdCache.java @@ -19,10 +19,9 @@ package org.elasticsearch.index.cache.id.simple; -import gnu.trove.impl.Constants; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.apache.lucene.index.*; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.HashedBytesArray; @@ -30,7 +29,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.UTF8SortedAsUnicodeComparator; -import org.elasticsearch.common.trove.ExtTObjectIntHasMap; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; @@ -304,7 +302,7 @@ private boolean refreshNeeded(List atomicReaderContexts) { } static class TypeBuilder { - final ExtTObjectIntHasMap idToDoc = new ExtTObjectIntHasMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); + final ObjectIntOpenHashMap idToDoc = new ObjectIntOpenHashMap(); final HashedBytesArray[] docToId; final ArrayList parentIdsValues = new ArrayList(); final int[] parentIdsOrdinals; @@ -321,7 +319,11 @@ static class TypeBuilder { * Returns an already stored instance if exists, if not, returns null; */ public HashedBytesArray canReuse(HashedBytesArray id) { - return idToDoc.key(id); + if (idToDoc.containsKey(id)) { + return idToDoc.lkey(); + } else { + return id; + } } } } diff --git a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java index c31c043f0498a..e27f9d9290ce4 100644 --- a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java +++ b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java @@ -19,10 +19,9 @@ package org.elasticsearch.index.cache.id.simple; -import gnu.trove.impl.hash.TObjectHash; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.HashedBytesArray; -import org.elasticsearch.common.trove.ExtTObjectIntHasMap; import org.elasticsearch.index.cache.id.IdReaderTypeCache; /** @@ -32,7 +31,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache { private final String type; - private final ExtTObjectIntHasMap idToDoc; + private final ObjectIntOpenHashMap idToDoc; private final HashedBytesArray[] docIdToId; @@ -42,12 +41,11 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache { private long sizeInBytes = -1; - public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap idToDoc, HashedBytesArray[] docIdToId, + public SimpleIdReaderTypeCache(String type, ObjectIntOpenHashMap idToDoc, HashedBytesArray[] docIdToId, HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) { this.type = type; this.idToDoc = idToDoc; this.docIdToId = docIdToId; - this.idToDoc.trimToSize(); this.parentIdsValues = parentIdsValues; this.parentIdsOrdinals = parentIdsOrdinals; } @@ -61,7 +59,11 @@ public HashedBytesArray parentIdByDoc(int docId) { } public int docById(HashedBytesArray uid) { - return idToDoc.get(uid); + if (idToDoc.containsKey(uid)) { + return idToDoc.lget(); + } else { + return -1; + } } public HashedBytesArray idByDoc(int docId) { @@ -79,20 +81,29 @@ public long sizeInBytes() { * Returns an already stored instance if exists, if not, returns null; */ public HashedBytesArray canReuse(HashedBytesArray id) { - return idToDoc.key(id); + if (idToDoc.containsKey(id)) { + return idToDoc.lkey(); + } else { + return id; + } } long computeSizeInBytes() { long sizeInBytes = 0; // Ignore type field // sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER; - sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsageEstimator.NUM_BYTES_INT); - for (Object o : idToDoc._set) { - if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) { - sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF; - } else { - HashedBytesArray bytesArray = (HashedBytesArray) o; - sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT); + sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc.values.length * RamUsageEstimator.NUM_BYTES_INT); + sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc.allocated.length); + final boolean[] states = idToDoc.allocated; + final Object[] keys = idToDoc.keys; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + HashedBytesArray bytesArray = (HashedBytesArray) keys[i]; + if (bytesArray != null) { + sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT); + } else { + sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF; + } } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/FieldDataStats.java b/src/main/java/org/elasticsearch/index/fielddata/FieldDataStats.java index 9c7fa15eb5ad6..5537d8a109762 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/FieldDataStats.java +++ b/src/main/java/org/elasticsearch/index/fielddata/FieldDataStats.java @@ -19,8 +19,7 @@ package org.elasticsearch.index.fielddata; -import gnu.trove.iterator.TObjectLongIterator; -import gnu.trove.map.hash.TObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -39,13 +38,13 @@ public class FieldDataStats implements Streamable, ToXContent { long memorySize; long evictions; @Nullable - TObjectLongHashMap fields; + ObjectLongOpenHashMap fields; public FieldDataStats() { } - public FieldDataStats(long memorySize, long evictions, @Nullable TObjectLongHashMap fields) { + public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongOpenHashMap fields) { this.memorySize = memorySize; this.evictions = evictions; this.fields = fields; @@ -55,10 +54,14 @@ public void add(FieldDataStats stats) { this.memorySize += stats.memorySize; this.evictions += stats.evictions; if (stats.fields != null) { - if (fields == null) fields = new TObjectLongHashMap(); - for (TObjectLongIterator it = stats.fields.iterator(); it.hasNext(); ) { - it.advance(); - fields.adjustOrPutValue(it.key(), it.value(), it.value()); + if (fields == null) fields = new ObjectLongOpenHashMap(); + final boolean[] states = stats.fields.allocated; + final Object[] keys = stats.fields.keys; + final long[] values = stats.fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + fields.addTo((String) keys[i], values[i]); + } } } } @@ -76,7 +79,7 @@ public long getEvictions() { } @Nullable - public TObjectLongHashMap getFields() { + public ObjectLongOpenHashMap getFields() { return fields; } @@ -92,7 +95,7 @@ public void readFrom(StreamInput in) throws IOException { evictions = in.readVLong(); if (in.readBoolean()) { int size = in.readVInt(); - fields = new TObjectLongHashMap(size); + fields = new ObjectLongOpenHashMap(size); for (int i = 0; i < size; i++) { fields.put(in.readString(), in.readVLong()); } @@ -108,10 +111,14 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(true); out.writeVInt(fields.size()); - for (TObjectLongIterator it = fields.iterator(); it.hasNext(); ) { - it.advance(); - out.writeString(it.key()); - out.writeVLong(it.value()); + final boolean[] states = fields.allocated; + final Object[] keys = fields.keys; + final long[] values = fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + out.writeString((String) keys[i]); + out.writeVLong(values[i]); + } } } } @@ -123,11 +130,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.EVICTIONS, getEvictions()); if (fields != null) { builder.startObject(Fields.FIELDS); - for (TObjectLongIterator it = fields.iterator(); it.hasNext(); ) { - it.advance(); - builder.startObject(it.key(), XContentBuilder.FieldCaseConversion.NONE); - builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, it.value()); - builder.endObject(); + final boolean[] states = fields.allocated; + final Object[] keys = fields.keys; + final long[] values = fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE); + builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]); + builder.endObject(); + } } builder.endObject(); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java index 330f9caa49e97..7958927bacdcf 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.fielddata; -import gnu.trove.map.hash.TObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; @@ -49,9 +49,9 @@ public ShardFieldData(ShardId shardId, @IndexSettings Settings indexSettings) { } public FieldDataStats stats(String... fields) { - TObjectLongHashMap fieldTotals = null; + ObjectLongOpenHashMap fieldTotals = null; if (fields != null && fields.length > 0) { - fieldTotals = new TObjectLongHashMap(); + fieldTotals = new ObjectLongOpenHashMap(); for (Map.Entry entry : perFieldTotals.entrySet()) { for (String field : fields) { if (Regex.simpleMatch(field, entry.getKey())) { diff --git a/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryBuilder.java index 359a7fa75e1a0..8aa7a379cbbb1 100644 --- a/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryBuilder.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.query; +import com.carrotsearch.hppc.FloatArrayList; import com.google.common.collect.Maps; -import gnu.trove.list.array.TFloatArrayList; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -50,7 +50,7 @@ public class CustomFiltersScoreQueryBuilder extends BaseQueryBuilder implements private ArrayList filters = new ArrayList(); private ArrayList scripts = new ArrayList(); - private TFloatArrayList boosts = new TFloatArrayList(); + private FloatArrayList boosts = new FloatArrayList(); public CustomFiltersScoreQueryBuilder(QueryBuilder queryBuilder) { this.queryBuilder = queryBuilder; diff --git a/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryParser.java b/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryParser.java index 276e2eae088b2..fb9bd5979f94b 100644 --- a/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/CustomFiltersScoreQueryParser.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.query; -import gnu.trove.list.array.TFloatArrayList; +import com.carrotsearch.hppc.FloatArrayList; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; import org.elasticsearch.common.Strings; @@ -66,7 +66,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars ArrayList filters = new ArrayList(); boolean filtersFound = false; ArrayList scripts = new ArrayList(); - TFloatArrayList boosts = new TFloatArrayList(); + FloatArrayList boosts = new FloatArrayList(); float maxBoost = Float.MAX_VALUE; String currentFieldName = null; diff --git a/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index 1bb18defabc0d..95a03e488e685 100644 --- a/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -19,9 +19,8 @@ package org.elasticsearch.index.query; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.google.common.collect.Lists; -import gnu.trove.impl.Constants; -import gnu.trove.map.hash.TObjectFloatHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -37,7 +36,7 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl private final Object text; private final List fields; - private TObjectFloatHashMap fieldsBoosts; + private ObjectFloatOpenHashMap fieldsBoosts; private MatchQueryBuilder.Type type; @@ -96,7 +95,7 @@ public MultiMatchQueryBuilder field(String field) { public MultiMatchQueryBuilder field(String field, float boost) { fields.add(field); if (fieldsBoosts == null) { - fieldsBoosts = new TObjectFloatHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); + fieldsBoosts = new ObjectFloatOpenHashMap(); } fieldsBoosts.put(field, boost); return this; @@ -230,12 +229,8 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.field("query", text); builder.startArray("fields"); for (String field : fields) { - float boost = -1; - if (fieldsBoosts != null) { - boost = fieldsBoosts.get(field); - } - if (boost != -1) { - field += "^" + boost; + if (fieldsBoosts != null && fieldsBoosts.containsKey(field)) { + field += "^" + fieldsBoosts.lget(); } builder.value(field); } diff --git a/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 61158c7ae7280..7c282efd5e90e 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -19,8 +19,7 @@ package org.elasticsearch.index.query; -import gnu.trove.impl.Constants; -import gnu.trove.map.hash.TObjectFloatHashMap; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -78,7 +77,7 @@ public static enum Operator { private List fields; - private TObjectFloatHashMap fieldsBoosts; + private ObjectFloatOpenHashMap fieldsBoosts; private Boolean useDisMax; @@ -125,7 +124,7 @@ public QueryStringQueryBuilder field(String field, float boost) { } fields.add(field); if (fieldsBoosts == null) { - fieldsBoosts = new TObjectFloatHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1); + fieldsBoosts = new ObjectFloatOpenHashMap(); } fieldsBoosts.put(field, boost); return this; @@ -323,12 +322,8 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep if (fields != null) { builder.startArray("fields"); for (String field : fields) { - float boost = -1; - if (fieldsBoosts != null) { - boost = fieldsBoosts.get(field); - } - if (boost != -1) { - field += "^" + boost; + if (fieldsBoosts != null && fieldsBoosts.containsKey(field)) { + field += "^" + fieldsBoosts.get(field); } builder.value(field); } diff --git a/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java b/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java index e9484a4c75676..07eea1fa9a5ce 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java @@ -19,9 +19,8 @@ package org.elasticsearch.index.query; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.google.common.collect.Lists; -import gnu.trove.impl.Constants; -import gnu.trove.map.hash.TObjectFloatHashMap; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; import org.apache.lucene.search.BooleanQuery; @@ -104,7 +103,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars qpSettings.fields().add(field); if (fBoost != -1) { if (qpSettings.boosts() == null) { - qpSettings.boosts(new TObjectFloatHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, 1.0f)); + qpSettings.boosts(new ObjectFloatOpenHashMap()); } qpSettings.boosts().put(field, fBoost); } @@ -113,7 +112,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars qpSettings.fields().add(fField); if (fBoost != -1) { if (qpSettings.boosts() == null) { - qpSettings.boosts(new TObjectFloatHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, 1.0f)); + qpSettings.boosts(new ObjectFloatOpenHashMap()); } qpSettings.boosts().put(fField, fBoost); } diff --git a/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java b/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java index 2447941e2f363..9efa885796553 100644 --- a/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java +++ b/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.search.child; -import gnu.trove.map.hash.TObjectFloatHashMap; -import gnu.trove.map.hash.TObjectIntHashMap; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -64,8 +64,8 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite { private final int shortCircuitParentDocSet; private Query rewrittenChildQuery; - private Recycler.V> uidToScore; - private Recycler.V> uidToCount; + private Recycler.V> uidToScore; + private Recycler.V> uidToCount; public ChildrenQuery(SearchContext searchContext, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet) { this.searchContext = searchContext; @@ -177,10 +177,10 @@ public Weight createWeight(IndexSearcher searcher) throws IOException { Filter parentFilter; if (size == 1) { - BytesRef id = uidToScore.v().keySet().iterator().next().toBytesRef(); + BytesRef id = uidToScore.v().keys().iterator().next().value.toBytesRef(); parentFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))); } else if (size <= shortCircuitParentDocSet) { - parentFilter = new ParentIdsFilter(parentType, uidToScore.v().keySet()); + parentFilter = new ParentIdsFilter(parentType, uidToScore.v().keys, uidToScore.v().allocated); } else { parentFilter = this.parentFilter; } @@ -239,7 +239,7 @@ public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, bool class ParentScorer extends Scorer { - final TObjectFloatHashMap uidToScore; + final ObjectFloatOpenHashMap uidToScore; final IdReaderTypeCache idTypeCache; final DocIdSetIterator parentsIterator; @@ -247,7 +247,7 @@ class ParentScorer extends Scorer { int currentDocId = -1; float currentScore; - ParentScorer(Weight weight, IdReaderTypeCache idTypeCache, TObjectFloatHashMap uidToScore, DocIdSetIterator parentsIterator) { + ParentScorer(Weight weight, IdReaderTypeCache idTypeCache, ObjectFloatOpenHashMap uidToScore, DocIdSetIterator parentsIterator) { super(weight); this.idTypeCache = idTypeCache; this.parentsIterator = parentsIterator; @@ -323,9 +323,9 @@ public long cost() { final class AvgParentScorer extends ParentScorer { HashedBytesArray currentUid; - final TObjectIntHashMap uidToCount; + final ObjectIntOpenHashMap uidToCount; - AvgParentScorer(Weight weight, IdReaderTypeCache idTypeCache, TObjectFloatHashMap uidToScore, TObjectIntHashMap uidToCount, DocIdSetIterator parentsIterator) { + AvgParentScorer(Weight weight, IdReaderTypeCache idTypeCache, ObjectFloatOpenHashMap uidToScore, ObjectIntOpenHashMap uidToCount, DocIdSetIterator parentsIterator) { super(weight, idTypeCache, uidToScore, parentsIterator); this.uidToCount = uidToCount; } @@ -371,11 +371,11 @@ public int advance(int target) throws IOException { static class ChildUidCollector extends ParentIdCollector { - final TObjectFloatHashMap uidToScore; + final ObjectFloatOpenHashMap uidToScore; final ScoreType scoreType; Scorer scorer; - ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap uidToScore) { + ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, ObjectFloatOpenHashMap uidToScore) { super(childType, searchContext); this.uidToScore = uidToScore; this.scoreType = scoreType; @@ -388,27 +388,27 @@ public void setScorer(Scorer scorer) throws IOException { @Override protected void collect(int doc, HashedBytesArray parentUid) throws IOException { - float previousScore = uidToScore.get(parentUid); float currentScore = scorer.score(); - if (previousScore == 0) { - uidToScore.put(parentUid, currentScore); - } else { - switch (scoreType) { - case SUM: - uidToScore.adjustValue(parentUid, currentScore); - break; - case MAX: + switch (scoreType) { + case SUM: + uidToScore.addTo(parentUid, currentScore); + break; + case MAX: + if (uidToScore.containsKey(parentUid)) { + float previousScore = uidToScore.lget(); if (currentScore > previousScore) { - uidToScore.put(parentUid, currentScore); + uidToScore.lset(currentScore); } - break; - case AVG: - assert false : "AVG has it's own collector"; + } else { + uidToScore.put(parentUid, currentScore); + } + break; + case AVG: + assert false : "AVG has it's own collector"; - default: - assert false : "Are we missing a score type here? -- " + scoreType; - break; - } + default: + assert false : "Are we missing a score type here? -- " + scoreType; + break; } } @@ -416,9 +416,9 @@ protected void collect(int doc, HashedBytesArray parentUid) throws IOException { final static class AvgChildUidCollector extends ChildUidCollector { - final TObjectIntHashMap uidToCount; + final ObjectIntOpenHashMap uidToCount; - AvgChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap uidToScore, TObjectIntHashMap uidToCount) { + AvgChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, ObjectFloatOpenHashMap uidToScore, ObjectIntOpenHashMap uidToCount) { super(scoreType, searchContext, childType, uidToScore); this.uidToCount = uidToCount; assert scoreType == ScoreType.AVG; @@ -426,15 +426,9 @@ final static class AvgChildUidCollector extends ChildUidCollector { @Override protected void collect(int doc, HashedBytesArray parentUid) throws IOException { - float previousScore = uidToScore.get(parentUid); float currentScore = scorer.score(); - if (previousScore == 0) { - uidToScore.put(parentUid, currentScore); - uidToCount.put(parentUid, 1); - } else { - uidToScore.adjustValue(parentUid, currentScore); - uidToCount.increment(parentUid); - } + uidToCount.addTo(parentUid, 1); + uidToScore.addTo(parentUid, currentScore); } } diff --git a/src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java b/src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java index fc24372271593..6ad4ab0bf8335 100644 --- a/src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java +++ b/src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.search.child; -import gnu.trove.set.hash.THashSet; +import com.carrotsearch.hppc.ObjectOpenHashSet; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -56,7 +56,7 @@ public class HasChildFilter extends Filter implements SearchContext.Rewrite { Filter shortCircuitFilter; int remaining; - Recycler.V> collectedUids; + Recycler.V> collectedUids; public HasChildFilter(Query childQuery, String parentType, String childType, Filter parentFilter, SearchContext searchContext, int shortCircuitParentDocSet) { this.parentFilter = parentFilter; @@ -135,10 +135,10 @@ public void contextRewrite(SearchContext searchContext) throws Exception { if (remaining == 0) { shortCircuitFilter = Queries.MATCH_NO_FILTER; } else if (remaining == 1) { - BytesRef id = collectedUids.v().iterator().next().toBytesRef(); + BytesRef id = collectedUids.v().iterator().next().value.toBytesRef(); shortCircuitFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))); } else if (remaining <= shortCircuitParentDocSet) { - shortCircuitFilter = new ParentIdsFilter(parentType, collectedUids.v()); + shortCircuitFilter = new ParentIdsFilter(parentType, collectedUids.v().keys, collectedUids.v().allocated); } } @@ -158,10 +158,10 @@ public void contextClear() { final class ParentDocSet extends MatchDocIdSet { final IndexReader reader; - final THashSet parents; + final ObjectOpenHashSet parents; final IdReaderTypeCache typeCache; - ParentDocSet(IndexReader reader, Bits acceptDocs, THashSet parents, IdReaderTypeCache typeCache) { + ParentDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet parents, IdReaderTypeCache typeCache) { super(reader.maxDoc(), acceptDocs); this.reader = reader; this.parents = parents; @@ -185,9 +185,9 @@ protected boolean matchDoc(int doc) { final static class UidCollector extends ParentIdCollector { - final THashSet collectedUids; + final ObjectOpenHashSet collectedUids; - UidCollector(String parentType, SearchContext context, THashSet collectedUids) { + UidCollector(String parentType, SearchContext context, ObjectOpenHashSet collectedUids) { super(parentType, context); this.collectedUids = collectedUids; } diff --git a/src/main/java/org/elasticsearch/index/search/child/HasParentFilter.java b/src/main/java/org/elasticsearch/index/search/child/HasParentFilter.java index ed94422c1dc9c..2df08c368923f 100644 --- a/src/main/java/org/elasticsearch/index/search/child/HasParentFilter.java +++ b/src/main/java/org/elasticsearch/index/search/child/HasParentFilter.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.search.child; -import gnu.trove.set.hash.THashSet; +import com.carrotsearch.hppc.ObjectOpenHashSet; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.DocIdSet; @@ -47,7 +47,7 @@ public class HasParentFilter extends Filter implements SearchContext.Rewrite { final SearchContext context; final Filter childrenFilter; - Recycler.V> parents; + Recycler.V> parents; public HasParentFilter(Query parentQuery, String parentType, SearchContext context, Filter childrenFilter) { this.parentQuery = parentQuery; @@ -120,10 +120,10 @@ public void contextClear() { final static class ChildrenDocSet extends MatchDocIdSet { final IndexReader reader; - final THashSet parents; + final ObjectOpenHashSet parents; final IdReaderTypeCache idReaderTypeCache; - ChildrenDocSet(IndexReader reader, Bits acceptDocs, THashSet parents, IdReaderTypeCache idReaderTypeCache) { + ChildrenDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet parents, IdReaderTypeCache idReaderTypeCache) { super(reader.maxDoc(), acceptDocs); this.reader = reader; this.parents = parents; @@ -139,13 +139,13 @@ protected boolean matchDoc(int doc) { final static class ParentUidsCollector extends NoopCollector { - final THashSet collectedUids; + final ObjectOpenHashSet collectedUids; final SearchContext context; final String parentType; IdReaderTypeCache typeCache; - ParentUidsCollector(THashSet collectedUids, SearchContext context, String parentType) { + ParentUidsCollector(ObjectOpenHashSet collectedUids, SearchContext context, String parentType) { this.collectedUids = collectedUids; this.context = context; this.parentType = parentType; diff --git a/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java b/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java index dd2fddbfe828f..adae685218b26 100644 --- a/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java +++ b/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; -import java.util.Set; /** * Advantages over using this filter over Lucene's TermsFilter in the parent child context: @@ -46,11 +45,13 @@ final class ParentIdsFilter extends Filter { private final BytesRef parentTypeBr; - private final Set collectedUids; + private final Object[] keys; + private final boolean[] allocated; - public ParentIdsFilter(String parentType, Set collectedUids) { + public ParentIdsFilter(String parentType, Object[] keys, boolean[] allocated) { this.parentTypeBr = new BytesRef(parentType); - this.collectedUids = collectedUids; + this.keys = keys; + this.allocated = allocated; } @Override @@ -66,8 +67,12 @@ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws DocsEnum docsEnum = null; FixedBitSet result = null; - for (HashedBytesArray parentId : collectedUids) { - idSpare.bytes = parentId.toBytes(); + for (int i = 0; i < allocated.length; i++) { + if (!allocated[i]) { + continue; + } + + idSpare.bytes = ((HashedBytesArray) keys[i]).toBytes(); idSpare.length = idSpare.bytes.length; Uid.createUidAsBytes(parentTypeBr, idSpare, uidSpare); if (termsEnum.seekExact(uidSpare, false)) { diff --git a/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java b/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java index 8095ef0f4a9db..a7fba2fbb36aa 100644 --- a/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java +++ b/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.search.child; -import gnu.trove.map.hash.TObjectFloatHashMap; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -27,7 +27,6 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.ToStringUtils; import org.elasticsearch.ElasticSearchIllegalStateException; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.HashedBytesArray; import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter; @@ -54,7 +53,7 @@ public class ParentQuery extends Query implements SearchContext.Rewrite { private final Filter childrenFilter; private Query rewrittenParentQuery; - private Recycler.V> uidToScore; + private Recycler.V> uidToScore; public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, Filter childrenFilter) { this.searchContext = searchContext; @@ -152,14 +151,14 @@ public Weight createWeight(IndexSearcher searcher) throws IOException { static class ParentUidCollector extends NoopCollector { - final TObjectFloatHashMap uidToScore; + final ObjectFloatOpenHashMap uidToScore; final SearchContext searchContext; final String parentType; Scorer scorer; IdReaderTypeCache typeCache; - ParentUidCollector(TObjectFloatHashMap uidToScore, SearchContext searchContext, String parentType) { + ParentUidCollector(ObjectFloatOpenHashMap uidToScore, SearchContext searchContext, String parentType) { this.uidToScore = uidToScore; this.searchContext = searchContext; this.parentType = parentType; @@ -232,14 +231,14 @@ public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, bool static class ChildScorer extends Scorer { - final TObjectFloatHashMap uidToScore; + final ObjectFloatOpenHashMap uidToScore; final DocIdSetIterator childrenIterator; final IdReaderTypeCache typeCache; int currentChildDoc = -1; float currentScore; - ChildScorer(Weight weight, TObjectFloatHashMap uidToScore, DocIdSetIterator childrenIterator, IdReaderTypeCache typeCache) { + ChildScorer(Weight weight, ObjectFloatOpenHashMap uidToScore, DocIdSetIterator childrenIterator, IdReaderTypeCache typeCache) { super(weight); this.uidToScore = uidToScore; this.childrenIterator = childrenIterator; @@ -271,7 +270,7 @@ public int nextDoc() throws IOException { return currentChildDoc; } - BytesReference uid = typeCache.parentIdByDoc(currentChildDoc); + HashedBytesArray uid = typeCache.parentIdByDoc(currentChildDoc); if (uid == null) { continue; } @@ -288,7 +287,7 @@ public int advance(int target) throws IOException { if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) { return currentChildDoc; } - BytesReference uid = typeCache.idByDoc(currentChildDoc); + HashedBytesArray uid = typeCache.idByDoc(currentChildDoc); if (uid == null) { return nextDoc(); } diff --git a/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java b/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java index 8e2b4c45f51bd..8d96229603180 100644 --- a/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java +++ b/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java @@ -19,7 +19,8 @@ package org.elasticsearch.index.search.child; -import gnu.trove.map.hash.TIntObjectHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import org.apache.lucene.index.*; import org.apache.lucene.search.*; import org.apache.lucene.util.Bits; @@ -29,13 +30,11 @@ import org.elasticsearch.common.bytes.HashedBytesArray; import org.elasticsearch.common.lucene.search.EmptyScorer; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTHashMap; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Arrays; import java.util.Comparator; -import java.util.Map; import java.util.Set; /** @@ -64,7 +63,7 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite { // This field will hold the rewritten form of originalChildQuery, so that we can reuse it private Query rewrittenChildQuery; - private Recycler.V> parentDocs; + private Recycler.V> parentDocs; // Note, the query is expected to already be filtered to only child type docs public TopChildrenQuery(Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor, CacheRecycler cacheRecycler) { @@ -146,7 +145,7 @@ public void executionDone() { int resolveParentDocuments(TopDocs topDocs, SearchContext context) { int parentHitsResolved = 0; - Recycler.V>>> parentDocsPerReader = cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size()); + Recycler.V>>> parentDocsPerReader = cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { int readerIndex = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves()); AtomicReaderContext subContext = context.searcher().getIndexReader().leaves().get(readerIndex); @@ -166,7 +165,7 @@ int resolveParentDocuments(TopDocs topDocs, SearchContext context) { if (parentDocId != -1 && (liveDocs == null || liveDocs.get(parentDocId))) { // we found a match, add it and break - Recycler.V> readerParentDocs = parentDocsPerReader.v().get(indexReader.getCoreCacheKey()); + Recycler.V> readerParentDocs = parentDocsPerReader.v().get(indexReader.getCoreCacheKey()); if (readerParentDocs == null) { readerParentDocs = cacheRecycler.intObjectMap(indexReader.maxDoc()); parentDocsPerReader.v().put(indexReader.getCoreCacheKey(), readerParentDocs); @@ -191,12 +190,18 @@ int resolveParentDocuments(TopDocs topDocs, SearchContext context) { } } } - - for (Map.Entry>> entry : parentDocsPerReader.v().entrySet()) { - ParentDoc[] values = entry.getValue().v().values(new ParentDoc[entry.getValue().v().size()]); - Arrays.sort(values, PARENT_DOC_COMP); - parentDocs.v().put(entry.getKey(), values); - entry.getValue().release(); + boolean[] states = parentDocsPerReader.v().allocated; + Object[] keys = parentDocsPerReader.v().keys; + Object[] values = parentDocsPerReader.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + Recycler.V> value = (Recycler.V>) values[i]; + ParentDoc[] parentDocs = value.v().values().toArray(ParentDoc.class); + Arrays.sort(parentDocs, PARENT_DOC_COMP); + + this.parentDocs.v().put(keys[i], parentDocs); + value.release(); + } } parentDocsPerReader.release(); return parentHitsResolved; diff --git a/src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java b/src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java index 1de146b3c90d5..2e490bf8e2e67 100644 --- a/src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java +++ b/src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java @@ -19,12 +19,12 @@ package org.elasticsearch.indices.cache.filter; +import com.carrotsearch.hppc.ObjectOpenHashSet; import com.google.common.base.Objects; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; -import gnu.trove.set.hash.THashSet; import org.apache.lucene.search.DocIdSet; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.common.component.AbstractComponent; @@ -176,7 +176,7 @@ public void run() { threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() { @Override public void run() { - Recycler.V> keys = cacheRecycler.hashSet(-1); + Recycler.V> keys = cacheRecycler.hashSet(-1); try { for (Iterator it = readersKeysToClean.iterator(); it.hasNext(); ) { keys.v().add(it.next()); diff --git a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 3131ebbca01ff..990310b559f5a 100644 --- a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -19,8 +19,8 @@ package org.elasticsearch.indices.cluster; +import com.carrotsearch.hppc.IntOpenHashSet; import com.google.common.collect.Lists; -import gnu.trove.set.hash.TIntHashSet; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -277,7 +277,7 @@ private void applyDeletedShards(final ClusterChangedEvent event) { if (routingNode == null) { return; } - TIntHashSet newShardIds = new TIntHashSet(); + IntOpenHashSet newShardIds = new IntOpenHashSet(); for (IndexService indexService : indicesService) { String index = indexService.index().name(); IndexMetaData indexMetaData = event.state().metaData().index(index); diff --git a/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/src/main/java/org/elasticsearch/percolator/PercolatorService.java index 0488b25f3d6d5..162fdf6c8619a 100644 --- a/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -18,8 +18,8 @@ package org.elasticsearch.percolator; +import com.carrotsearch.hppc.ByteObjectOpenHashMap; import com.google.common.collect.ImmutableMap; -import gnu.trove.map.hash.TByteObjectHashMap; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.IndexableField; @@ -92,7 +92,7 @@ public class PercolatorService extends AbstractComponent { private final CloseableThreadLocal cache; private final IndicesService indicesService; - private final TByteObjectHashMap percolatorTypes; + private final ByteObjectOpenHashMap percolatorTypes; private final ClusterService clusterService; @@ -112,8 +112,8 @@ protected MemoryIndex initialValue() { return new ExtendedMemoryIndex(true, maxReuseBytes); } }; - - percolatorTypes = new TByteObjectHashMap(6); + + percolatorTypes = new ByteObjectOpenHashMap(6); percolatorTypes.put(countPercolator.id(), countPercolator); percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator); percolatorTypes.put(matchPercolator.id(), matchPercolator); @@ -450,8 +450,7 @@ public ReduceResult reduce(List shardResults) { // Use a custom impl of AbstractBigArray for Object[]? List finalMatches = new ArrayList(requestedSize == 0 ? numMatches : requestedSize); - outer: - for (PercolateShardResponse response : shardResults) { + outer: for (PercolateShardResponse response : shardResults) { Text index = new StringText(response.getIndex()); for (int i = 0; i < response.matches().length; i++) { float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i]; diff --git a/src/main/java/org/elasticsearch/percolator/QueryCollector.java b/src/main/java/org/elasticsearch/percolator/QueryCollector.java index 9ed21c38ade93..ac705e4d8714a 100644 --- a/src/main/java/org/elasticsearch/percolator/QueryCollector.java +++ b/src/main/java/org/elasticsearch/percolator/QueryCollector.java @@ -19,8 +19,8 @@ package org.elasticsearch.percolator; +import com.carrotsearch.hppc.FloatArrayList; import com.google.common.collect.ImmutableMap; -import gnu.trove.list.array.TFloatArrayList; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.*; import org.apache.lucene.util.BytesRef; @@ -218,7 +218,7 @@ final static class MatchAndScore extends QueryCollector { final List matches = new ArrayList(); final List> hls = new ArrayList>(); // TODO: Use thread local in order to cache the scores lists? - final TFloatArrayList scores = new TFloatArrayList(); + final FloatArrayList scores = new FloatArrayList(); final boolean limit; final int size; long counter = 0; @@ -278,7 +278,7 @@ List matches() { return matches; } - TFloatArrayList scores() { + FloatArrayList scores() { return scores; } diff --git a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 7d0828241d470..af419eac82fb8 100644 --- a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -19,11 +19,10 @@ package org.elasticsearch.search.builder; +import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import gnu.trove.iterator.TObjectFloatIterator; -import gnu.trove.map.hash.TObjectFloatHashMap; import org.elasticsearch.ElasticSearchGenerationException; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; @@ -112,7 +111,7 @@ public static HighlightBuilder highlight() { private RescoreBuilder rescoreBuilder; - private TObjectFloatHashMap indexBoost = null; + private ObjectFloatOpenHashMap indexBoost = null; private String[] stats; @@ -590,7 +589,7 @@ public SearchSourceBuilder partialField(String name, @Nullable String[] includes */ public SearchSourceBuilder indexBoost(String index, float indexBoost) { if (this.indexBoost == null) { - this.indexBoost = new TObjectFloatHashMap(); + this.indexBoost = new ObjectFloatOpenHashMap(); } this.indexBoost.put(index, indexBoost); return this; @@ -761,9 +760,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (indexBoost != null) { builder.startObject("indices_boost"); - for (TObjectFloatIterator it = indexBoost.iterator(); it.hasNext(); ) { - it.advance(); - builder.field(it.key(), it.value()); + final boolean[] states = indexBoost.allocated; + final Object[] keys = indexBoost.keys; + final float[] values = indexBoost.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + builder.field((String) keys[i], values[i]); + } } builder.endObject(); } diff --git a/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index c939817a79644..249fb97e0715a 100644 --- a/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -19,16 +19,17 @@ package org.elasticsearch.search.controller; +import com.carrotsearch.hppc.IntArrayList; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.google.common.collect.Lists; import org.apache.lucene.index.Term; import org.apache.lucene.search.*; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.cache.recycler.CacheRecycler; -import org.elasticsearch.common.collect.XMaps; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.hppc.HppcMaps; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; @@ -79,8 +80,8 @@ public boolean optimizeSingleShard() { } public AggregatedDfs aggregateDfs(AtomicArray results) { - Map termStatistics = XMaps.newNoNullKeysMap(); - Map fieldStatistics = XMaps.newNoNullKeysMap(); + ObjectObjectOpenHashMap termStatistics = HppcMaps.newNoNullKeysMap(); + ObjectObjectOpenHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); long aggMaxDoc = 0; for (AtomicArray.Entry lEntry : results.asList()) { final Term[] terms = lEntry.value.terms(); @@ -101,19 +102,26 @@ public AggregatedDfs aggregateDfs(AtomicArray results) { } } - for (Map.Entry entry : lEntry.value.fieldStatistics().entrySet()) { - assert entry.getKey() != null; - CollectionStatistics existing = fieldStatistics.get(entry.getKey()); - if (existing != null) { - CollectionStatistics merged = new CollectionStatistics( - entry.getKey(), existing.maxDoc() + entry.getValue().maxDoc(), - optionalSum(existing.docCount(), entry.getValue().docCount()), - optionalSum(existing.sumTotalTermFreq(), entry.getValue().sumTotalTermFreq()), - optionalSum(existing.sumDocFreq(), entry.getValue().sumDocFreq()) - ); - fieldStatistics.put(entry.getKey(), merged); - } else { - fieldStatistics.put(entry.getKey(), entry.getValue()); + final boolean[] states = lEntry.value.fieldStatistics().allocated; + final Object[] keys = lEntry.value.fieldStatistics().keys; + final Object[] values = lEntry.value.fieldStatistics().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + String key = (String) keys[i]; + CollectionStatistics value = (CollectionStatistics) values[i]; + assert key != null; + CollectionStatistics existing = fieldStatistics.get(key); + if (existing != null) { + CollectionStatistics merged = new CollectionStatistics( + key, existing.maxDoc() + value.maxDoc(), + optionalSum(existing.docCount(), value.docCount()), + optionalSum(existing.sumTotalTermFreq(), value.sumTotalTermFreq()), + optionalSum(existing.sumDocFreq(), value.sumDocFreq()) + ); + fieldStatistics.put(key, merged); + } else { + fieldStatistics.put(key, value); + } } } aggMaxDoc += lEntry.value.maxDoc(); @@ -285,11 +293,11 @@ public ScoreDoc[] sortDocs(AtomicArray resu /** * Builds an array, with potential null elements, with docs to load. */ - public void fillDocIdsToLoad(AtomicArray docsIdsToLoad, ScoreDoc[] shardDocs) { + public void fillDocIdsToLoad(AtomicArray docsIdsToLoad, ScoreDoc[] shardDocs) { for (ScoreDoc shardDoc : shardDocs) { - ExtTIntArrayList list = docsIdsToLoad.get(shardDoc.shardIndex); + IntArrayList list = docsIdsToLoad.get(shardDoc.shardIndex); if (list == null) { - list = new ExtTIntArrayList(); // can't be shared!, uses unsafe on it later on + list = new IntArrayList(); // can't be shared!, uses unsafe on it later on docsIdsToLoad.set(shardDoc.shardIndex, list); } list.add(shardDoc.doc); diff --git a/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java b/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java index b0c5738ff9b81..952e17bb5f7f7 100644 --- a/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java +++ b/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java @@ -20,37 +20,37 @@ package org.elasticsearch.search.dfs; -import java.io.IOException; -import java.util.Map; - +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; -import org.elasticsearch.common.collect.XMaps; +import org.elasticsearch.common.hppc.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import java.io.IOException; + public class AggregatedDfs implements Streamable { - private Map termStatistics; - private Map fieldStatistics; + private ObjectObjectOpenHashMap termStatistics; + private ObjectObjectOpenHashMap fieldStatistics; private long maxDoc; private AggregatedDfs() { } - public AggregatedDfs(Map termStatistics, Map fieldStatistics, long maxDoc) { + public AggregatedDfs(ObjectObjectOpenHashMap termStatistics, ObjectObjectOpenHashMap fieldStatistics, long maxDoc) { this.termStatistics = termStatistics; this.fieldStatistics = fieldStatistics; this.maxDoc = maxDoc; } - public Map termStatistics() { + public ObjectObjectOpenHashMap termStatistics() { return termStatistics; } - public Map fieldStatistics() { + public ObjectObjectOpenHashMap fieldStatistics() { return fieldStatistics; } @@ -67,7 +67,7 @@ public static AggregatedDfs readAggregatedDfs(StreamInput in) throws IOException @Override public void readFrom(StreamInput in) throws IOException { int size = in.readVInt(); - termStatistics = XMaps.newMap(size); + termStatistics = HppcMaps.newMap(size); for (int i = 0; i < size; i++) { Term term = new Term(in.readString(), in.readBytesRef()); TermStatistics stats = new TermStatistics(in.readBytesRef(), @@ -82,14 +82,19 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(termStatistics.size()); - for (Map.Entry termTermStatisticsEntry : termStatistics.entrySet()) { - Term term = termTermStatisticsEntry.getKey(); - out.writeString(term.field()); - out.writeBytesRef(term.bytes()); - TermStatistics stats = termTermStatisticsEntry.getValue(); - out.writeBytesRef(stats.term()); - out.writeVLong(stats.docFreq()); - out.writeVLong(DfsSearchResult.addOne(stats.totalTermFreq())); + final boolean[] states = termStatistics.allocated; + final Object[] keys = termStatistics.keys; + final Object[] values = termStatistics.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + Term term = (Term) keys[i]; + out.writeString(term.field()); + out.writeBytesRef(term.bytes()); + TermStatistics stats = (TermStatistics) values[i]; + out.writeBytesRef(stats.term()); + out.writeVLong(stats.docFreq()); + out.writeVLong(DfsSearchResult.addOne(stats.totalTermFreq())); + } } DfsSearchResult.writeFieldStats(out, fieldStatistics); out.writeVLong(maxDoc); diff --git a/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 23db782c669e1..012067ada44e2 100644 --- a/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -19,18 +19,23 @@ package org.elasticsearch.search.dfs; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; +import com.carrotsearch.hppc.ObjectOpenHashSet; +import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.ImmutableMap; -import gnu.trove.set.hash.THashSet; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; -import org.elasticsearch.common.collect.XMaps; +import org.elasticsearch.common.hppc.HppcMaps; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.internal.SearchContext; +import java.util.AbstractSet; +import java.util.Collection; +import java.util.Iterator; import java.util.Map; /** @@ -38,10 +43,10 @@ */ public class DfsPhase implements SearchPhase { - private static ThreadLocal> cachedTermsSet = new ThreadLocal>() { + private static ThreadLocal> cachedTermsSet = new ThreadLocal>() { @Override - protected THashSet initialValue() { - return new THashSet(); + protected ObjectOpenHashSet initialValue() { + return new ObjectOpenHashSet(); } }; @@ -55,22 +60,21 @@ public void preProcess(SearchContext context) { } public void execute(SearchContext context) { - THashSet termsSet = null; + final ObjectOpenHashSet termsSet = cachedTermsSet.get(); try { if (!context.queryRewritten()) { context.updateRewriteQuery(context.searcher().rewrite(context.query())); } - termsSet = cachedTermsSet.get(); if (!termsSet.isEmpty()) { termsSet.clear(); } - context.query().extractTerms(termsSet); + context.query().extractTerms(new DelegateSet(termsSet)); if (context.rescore() != null) { - context.rescore().rescorer().extractTerms(context, context.rescore(), termsSet); + context.rescore().rescorer().extractTerms(context, context.rescore(), new DelegateSet(termsSet)); } - Term[] terms = termsSet.toArray(new Term[termsSet.size()]); + Term[] terms = termsSet.toArray(Term.class); TermStatistics[] termStatistics = new TermStatistics[terms.length]; IndexReaderContext indexReaderContext = context.searcher().getTopReaderContext(); for (int i = 0; i < terms.length; i++) { @@ -79,7 +83,7 @@ public void execute(SearchContext context) { termStatistics[i] = context.searcher().termStatistics(terms[i], termContext); } - Map fieldStatistics = XMaps.newNoNullKeysMap(); + ObjectObjectOpenHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); for (Term term : terms) { assert term.field() != null : "field is null"; if (!fieldStatistics.containsKey(term.field())) { @@ -94,9 +98,58 @@ public void execute(SearchContext context) { } catch (Exception e) { throw new DfsPhaseExecutionException(context, "Exception during dfs phase", e); } finally { - if (termsSet != null) { - termsSet.clear(); // don't hold on to terms + termsSet.clear(); // don't hold on to terms + } + } + + // We need to bridge to JCF world, b/c of Query#extractTerms + private static class DelegateSet extends AbstractSet { + + private final ObjectOpenHashSet delegate; + + private DelegateSet(ObjectOpenHashSet delegate) { + this.delegate = delegate; + } + + @Override + public boolean add(Term term) { + return delegate.add(term); + } + + @Override + public boolean addAll(Collection terms) { + boolean result = false; + for (Term term : terms) { + result = delegate.add(term); } + return result; + } + + @Override + public Iterator iterator() { + final Iterator> iterator = delegate.iterator(); + return new Iterator() { + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public Term next() { + return iterator.next().value; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public int size() { + return delegate.size(); } } + } diff --git a/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index 03c865bd96241..046b3dd7d15f4 100644 --- a/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -19,20 +19,20 @@ package org.elasticsearch.search.dfs; -import java.io.IOException; -import java.util.Map; - +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.collect.XMaps; +import org.elasticsearch.common.hppc.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.transport.TransportResponse; +import java.io.IOException; + /** * */ @@ -45,7 +45,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes private long id; private Term[] terms; private TermStatistics[] termStatistics; - private Map fieldStatistics = XMaps.newNoNullKeysMap(); + private ObjectObjectOpenHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); private int maxDoc; public DfsSearchResult() { @@ -85,7 +85,7 @@ public DfsSearchResult termsStatistics(Term[] terms, TermStatistics[] termStatis return this; } - public DfsSearchResult fieldStatistics(Map fieldStatistics) { + public DfsSearchResult fieldStatistics(ObjectObjectOpenHashMap fieldStatistics) { this.fieldStatistics = fieldStatistics; return this; } @@ -98,7 +98,7 @@ public TermStatistics[] termStatistics() { return termStatistics; } - public Map fieldStatistics() { + public ObjectObjectOpenHashMap fieldStatistics() { return fieldStatistics; } @@ -143,15 +143,21 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(maxDoc); } - public static void writeFieldStats(StreamOutput out, Map fieldStatistics) throws IOException { + public static void writeFieldStats(StreamOutput out, ObjectObjectOpenHashMap fieldStatistics) throws IOException { out.writeVInt(fieldStatistics.size()); - for (Map.Entry entry : fieldStatistics.entrySet()) { - out.writeString(entry.getKey()); - assert entry.getValue().maxDoc() >= 0; - out.writeVLong(entry.getValue().maxDoc()); - out.writeVLong(addOne(entry.getValue().docCount())); - out.writeVLong(addOne(entry.getValue().sumTotalTermFreq())); - out.writeVLong(addOne(entry.getValue().sumDocFreq())); + final boolean[] states = fieldStatistics.allocated; + Object[] keys = fieldStatistics.keys; + Object[] values = fieldStatistics.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + out.writeString((String) keys[i]); + CollectionStatistics statistics = (CollectionStatistics) values[i]; + assert statistics.maxDoc() >= 0; + out.writeVLong(statistics.maxDoc()); + out.writeVLong(addOne(statistics.docCount())); + out.writeVLong(addOne(statistics.sumTotalTermFreq())); + out.writeVLong(addOne(statistics.sumDocFreq())); + } } } @@ -168,14 +174,14 @@ public static void writeSingleTermStats(StreamOutput out, TermStatistics termSt out.writeVLong(addOne(termStatistic.totalTermFreq())); } - public static Map readFieldStats(StreamInput in) throws IOException { + public static ObjectObjectOpenHashMap readFieldStats(StreamInput in) throws IOException { return readFieldStats(in, null); } - public static Map readFieldStats(StreamInput in, Map fieldStatistics) throws IOException { + public static ObjectObjectOpenHashMap readFieldStats(StreamInput in, ObjectObjectOpenHashMap fieldStatistics) throws IOException { final int numFieldStatistics = in.readVInt(); if (fieldStatistics == null) { - fieldStatistics = XMaps.newNoNullKeysMap(numFieldStatistics); + fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics); } for (int i = 0; i < numFieldStatistics; i++) { final String field = in.readString(); diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java index 3c512c5f3db74..9d60bb4970f47 100644 --- a/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java @@ -19,8 +19,7 @@ package org.elasticsearch.search.facet.datehistogram; -import gnu.trove.iterator.TLongLongIterator; -import gnu.trove.map.hash.TLongLongHashMap; +import com.carrotsearch.hppc.LongLongOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.common.joda.TimeZoneRounding; @@ -43,7 +42,7 @@ public class CountDateHistogramFacetExecutor extends FacetExecutor { private final IndexNumericFieldData indexFieldData; final DateHistogramFacet.ComparatorType comparatorType; - final Recycler.V counts; + final Recycler.V counts; public CountDateHistogramFacetExecutor(IndexNumericFieldData indexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) { this.comparatorType = comparatorType; @@ -60,14 +59,19 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - InternalCountDateHistogramFacet.CountEntry[] entries = new InternalCountDateHistogramFacet.CountEntry[counts.v().size()]; - int i = 0; - for (TLongLongIterator it = counts.v().iterator(); it.hasNext(); ) { - it.advance(); - entries[i++] = new InternalCountDateHistogramFacet.CountEntry(it.key(), it.value()); + InternalCountDateHistogramFacet.CountEntry[] countEntries = new InternalCountDateHistogramFacet.CountEntry[counts.v().size()]; + final boolean[] states = counts.v().allocated; + final long[] keys = counts.v().keys; + final long[] values = counts.v().values; + + int entryIndex = 0; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + countEntries[entryIndex++] = new InternalCountDateHistogramFacet.CountEntry(keys[i], values[i]); + } } counts.release(); - return new InternalCountDateHistogramFacet(facetName, comparatorType, entries); + return new InternalCountDateHistogramFacet(facetName, comparatorType, countEntries); } class Collector extends FacetExecutor.Collector { @@ -96,20 +100,20 @@ public void postCollection() { public static class DateHistogramProc extends LongFacetAggregatorBase { - private final TLongLongHashMap counts; + private final LongLongOpenHashMap counts; private final TimeZoneRounding tzRounding; - public DateHistogramProc(TLongLongHashMap counts, TimeZoneRounding tzRounding) { + public DateHistogramProc(LongLongOpenHashMap counts, TimeZoneRounding tzRounding) { this.counts = counts; this.tzRounding = tzRounding; } @Override public void onValue(int docId, long value) { - counts.adjustOrPutValue(tzRounding.calc(value), 1, 1); + counts.addTo(tzRounding.calc(value), 1); } - public TLongLongHashMap counts() { + public LongLongOpenHashMap counts() { return counts; } } diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java index 1fd3395e6a73d..d879d0da006ef 100644 --- a/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java @@ -19,8 +19,7 @@ package org.elasticsearch.search.facet.datehistogram; -import gnu.trove.iterator.TLongLongIterator; -import gnu.trove.map.hash.TLongLongHashMap; +import com.carrotsearch.hppc.LongLongOpenHashMap; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.HashedBytesArray; @@ -139,25 +138,29 @@ public Facet reduce(ReduceContext context) { return facets.get(0); } - Recycler.V counts = context.cacheRecycler().longLongMap(-1); + Recycler.V counts = context.cacheRecycler().longLongMap(-1); for (Facet facet : facets) { InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facet; for (CountEntry entry : histoFacet.entries) { - counts.v().adjustOrPutValue(entry.getTime(), entry.getCount(), entry.getCount()); + counts.v().addTo(entry.getTime(), entry.getCount()); } } - CountEntry[] entries = new CountEntry[counts.v().size()]; - int i = 0; - for (TLongLongIterator it = counts.v().iterator(); it.hasNext(); ) { - it.advance(); - entries[i++] = new CountEntry(it.key(), it.value()); + CountEntry[] countEntries = new CountEntry[counts.v().size()]; + final boolean[] states = counts.v().allocated; + final long[] keys = counts.v().keys; + final long[] values = counts.v().values; + int entriesIndex = 0; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + countEntries[entriesIndex++] = new CountEntry(keys[i], values[i]); + } } counts.release(); - Arrays.sort(entries, comparatorType.comparator()); + Arrays.sort(countEntries, comparatorType.comparator()); - return new InternalCountDateHistogramFacet(getName(), comparatorType, entries); + return new InternalCountDateHistogramFacet(getName(), comparatorType, countEntries); } static final class Fields { diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java index a5cbb075f5be1..f4fc8148df554 100644 --- a/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.datehistogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -26,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; @@ -154,7 +154,7 @@ public Facet reduce(ReduceContext context) { return internalFacet; } - Recycler.V> map = context.cacheRecycler().longObjectMap(-1); + Recycler.V> map = context.cacheRecycler().longObjectMap(-1); for (Facet facet : facets) { InternalFullDateHistogramFacet histoFacet = (InternalFullDateHistogramFacet) facet; @@ -177,7 +177,8 @@ public Facet reduce(ReduceContext context) { } // sort - Object[] values = map.v().internalValues(); + // TODO: hppc - not happy with toArray + Object[] values = map.v().values().toArray(); Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = new ArrayList(map.v().size()); for (int i = 0; i < map.v().size(); i++) { diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java index 7fd27863b3e26..b580f0ea57ede 100644 --- a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java @@ -19,11 +19,11 @@ package org.elasticsearch.search.facet.datehistogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.common.joda.TimeZoneRounding; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LongValues; @@ -45,7 +45,7 @@ public class ValueDateHistogramFacetExecutor extends FacetExecutor { private final DateHistogramFacet.ComparatorType comparatorType; final TimeZoneRounding tzRounding; - final Recycler.V> entries; + final Recycler.V> entries; public ValueDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) { this.comparatorType = comparatorType; @@ -63,7 +63,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - ArrayList entries1 = new ArrayList(entries.v().valueCollection()); + ArrayList entries1 = new ArrayList(entries.v().size()); + final boolean[] states = entries.v().allocated; + final Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + InternalFullDateHistogramFacet.FullEntry value = (InternalFullDateHistogramFacet.FullEntry) values[i]; + entries1.add(value); + } + } + entries.release(); return new InternalFullDateHistogramFacet(facetName, comparatorType, entries1); } @@ -95,14 +104,14 @@ public void postCollection() { public static class DateHistogramProc extends LongFacetAggregatorBase { - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; private final TimeZoneRounding tzRounding; DoubleValues valueValues; final ValueAggregator valueAggregator = new ValueAggregator(); - public DateHistogramProc(TimeZoneRounding tzRounding, ExtTLongObjectHashMap entries) { + public DateHistogramProc(TimeZoneRounding tzRounding, LongObjectOpenHashMap entries) { this.tzRounding = tzRounding; this.entries = entries; } diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java index df2da647014e4..0a1c847d361c9 100644 --- a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java @@ -19,12 +19,12 @@ package org.elasticsearch.search.facet.datehistogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.common.joda.TimeZoneRounding; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LongValues; import org.elasticsearch.script.SearchScript; @@ -46,7 +46,7 @@ public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor { final SearchScript valueScript; final TimeZoneRounding tzRounding; - final Recycler.V> entries; + final Recycler.V> entries; public ValueScriptDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) { this.comparatorType = comparatorType; @@ -64,7 +64,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - ArrayList entries1 = new ArrayList(entries.v().valueCollection()); + ArrayList entries1 = new ArrayList(entries.v().size()); + final boolean[] states = entries.v().allocated; + final Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + InternalFullDateHistogramFacet.FullEntry value = (InternalFullDateHistogramFacet.FullEntry) values[i]; + entries1.add(value); + } + } + entries.release(); return new InternalFullDateHistogramFacet(facetName, comparatorType, entries1); } @@ -104,9 +113,9 @@ public static class DateHistogramProc extends LongFacetAggregatorBase { private final TimeZoneRounding tzRounding; protected final SearchScript valueScript; - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; - public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript, final ExtTLongObjectHashMap entries) { + public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript, final LongObjectOpenHashMap entries) { this.tzRounding = tzRounding; this.valueScript = valueScript; this.entries = entries; diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java index dace6825ac2fe..e8888d0b0411c 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java @@ -19,8 +19,7 @@ package org.elasticsearch.search.facet.histogram; -import gnu.trove.iterator.TLongLongIterator; -import gnu.trove.map.hash.TLongLongHashMap; +import com.carrotsearch.hppc.LongLongOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.index.fielddata.DoubleValues; @@ -42,7 +41,7 @@ public class CountHistogramFacetExecutor extends FacetExecutor { private final HistogramFacet.ComparatorType comparatorType; final long interval; - final Recycler.V counts; + final Recycler.V counts; public CountHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { this.comparatorType = comparatorType; @@ -60,10 +59,14 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { InternalCountHistogramFacet.CountEntry[] entries = new InternalCountHistogramFacet.CountEntry[counts.v().size()]; - int i = 0; - for (TLongLongIterator it = counts.v().iterator(); it.hasNext(); ) { - it.advance(); - entries[i++] = new InternalCountHistogramFacet.CountEntry(it.key(), it.value()); + final boolean[] states = counts.v().allocated; + final long[] keys = counts.v().keys; + final long[] values = counts.v().values; + int entryIndex = 0; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + entries[entryIndex++] = new InternalCountHistogramFacet.CountEntry(keys[i], values[i]); + } } counts.release(); return new InternalCountHistogramFacet(facetName, comparatorType, entries); @@ -100,9 +103,9 @@ public void postCollection() { public final static class HistogramProc extends DoubleFacetAggregatorBase { private final long interval; - private final TLongLongHashMap counts; + private final LongLongOpenHashMap counts; - public HistogramProc(long interval, TLongLongHashMap counts) { + public HistogramProc(long interval, LongLongOpenHashMap counts) { this.interval = interval; this.counts = counts; } @@ -110,10 +113,10 @@ public HistogramProc(long interval, TLongLongHashMap counts) { @Override public void onValue(int docId, double value) { long bucket = bucket(value, interval); - counts.adjustOrPutValue(bucket, 1, 1); + counts.addTo(bucket, 1); } - public TLongLongHashMap counts() { + public LongLongOpenHashMap counts() { return counts; } } diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java index 1f4c218edb311..c0251094676f0 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.facet.histogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.search.facet.DoubleFacetAggregatorBase; @@ -43,7 +43,7 @@ public class FullHistogramFacetExecutor extends FacetExecutor { private final HistogramFacet.ComparatorType comparatorType; final long interval; - final Recycler.V> entries; + final Recycler.V> entries; public FullHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { this.comparatorType = comparatorType; @@ -60,9 +60,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - List entries1 = new ArrayList(entries.v().valueCollection()); + List fullEntries = new ArrayList(entries.v().size()); + boolean[] states = entries.v().allocated; + Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + fullEntries.add((InternalFullHistogramFacet.FullEntry) values[i]); + } + } entries.release(); - return new InternalFullHistogramFacet(facetName, comparatorType, entries1); + return new InternalFullHistogramFacet(facetName, comparatorType, fullEntries); } public static long bucket(double value, long interval) { @@ -96,9 +103,9 @@ public void postCollection() { public final static class HistogramProc extends DoubleFacetAggregatorBase { final long interval; - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; - public HistogramProc(long interval, ExtTLongObjectHashMap entries) { + public HistogramProc(long interval, LongObjectOpenHashMap entries) { this.interval = interval; this.entries = entries; } diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/InternalCountHistogramFacet.java b/src/main/java/org/elasticsearch/search/facet/histogram/InternalCountHistogramFacet.java index 328d0e9f03565..ba70a2db69d60 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/InternalCountHistogramFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/InternalCountHistogramFacet.java @@ -19,8 +19,7 @@ package org.elasticsearch.search.facet.histogram; -import gnu.trove.iterator.TLongLongIterator; -import gnu.trove.map.hash.TLongLongHashMap; +import com.carrotsearch.hppc.LongLongOpenHashMap; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.HashedBytesArray; @@ -140,18 +139,22 @@ public Facet reduce(ReduceContext context) { return facets.get(0); } - Recycler.V counts = context.cacheRecycler().longLongMap(-1); + Recycler.V counts = context.cacheRecycler().longLongMap(-1); for (Facet facet : facets) { InternalCountHistogramFacet histoFacet = (InternalCountHistogramFacet) facet; for (Entry entry : histoFacet.entries) { - counts.v().adjustOrPutValue(entry.getKey(), entry.getCount(), entry.getCount()); + counts.v().addTo(entry.getKey(), entry.getCount()); } } + final boolean[] states = counts.v().allocated; + final long[] keys = counts.v().keys; + final long[] values = counts.v().values; CountEntry[] entries = new CountEntry[counts.v().size()]; - int i = 0; - for (TLongLongIterator it = counts.v().iterator(); it.hasNext(); ) { - it.advance(); - entries[i++] = new CountEntry(it.key(), it.value()); + int entryIndex = 0; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + entries[entryIndex++] = new CountEntry(keys[i], values[i]); + } } counts.release(); diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/InternalFullHistogramFacet.java b/src/main/java/org/elasticsearch/search/facet/histogram/InternalFullHistogramFacet.java index ebef61447f739..124f148e2a16d 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/InternalFullHistogramFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/InternalFullHistogramFacet.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.histogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -26,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; @@ -151,7 +151,7 @@ public Facet reduce(ReduceContext context) { return internalFacet; } - Recycler.V> map = context.cacheRecycler().longObjectMap(-1); + Recycler.V> map = context.cacheRecycler().longObjectMap(-1); for (Facet facet : facets) { InternalFullHistogramFacet histoFacet = (InternalFullHistogramFacet) facet; @@ -174,7 +174,8 @@ public Facet reduce(ReduceContext context) { } // sort - Object[] values = map.v().internalValues(); + // TODO: hppc - toArray? + Object[] values = map.v().values().toArray(); Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = new ArrayList(map.v().size()); for (int i = 0; i < map.v().size(); i++) { diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/ScriptHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/histogram/ScriptHistogramFacetExecutor.java index 85fca2d808e61..d606aa499e862 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/ScriptHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/ScriptHistogramFacetExecutor.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.facet.histogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.facet.FacetExecutor; import org.elasticsearch.search.facet.InternalFacet; @@ -43,7 +43,7 @@ public class ScriptHistogramFacetExecutor extends FacetExecutor { final long interval; private final HistogramFacet.ComparatorType comparatorType; - final Recycler.V> entries; + final Recycler.V> entries; public ScriptHistogramFacetExecutor(String scriptLang, String keyScript, String valueScript, Map params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { this.keyScript = context.scriptService().search(context.lookup(), scriptLang, keyScript, params); @@ -61,7 +61,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - List entries1 = new ArrayList(entries.v().valueCollection()); + List entries1 = new ArrayList(entries.v().size()); + final boolean[] states = entries.v().allocated; + final Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + InternalFullHistogramFacet.FullEntry value = (InternalFullHistogramFacet.FullEntry) values[i]; + entries1.add(value); + } + } + entries.release(); return new InternalFullHistogramFacet(facetName, comparatorType, entries1); } @@ -72,9 +81,9 @@ public static long bucket(double value, long interval) { class Collector extends FacetExecutor.Collector { - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; - Collector(ExtTLongObjectHashMap entries) { + Collector(LongObjectOpenHashMap entries) { this.entries = entries; } diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/ValueHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/histogram/ValueHistogramFacetExecutor.java index 6248d4f578973..65db5484b3100 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/ValueHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/ValueHistogramFacetExecutor.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.facet.histogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.search.facet.DoubleFacetAggregatorBase; @@ -43,7 +43,7 @@ public class ValueHistogramFacetExecutor extends FacetExecutor { private final HistogramFacet.ComparatorType comparatorType; private final long interval; - final Recycler.V> entries; + final Recycler.V> entries; public ValueHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { this.comparatorType = comparatorType; @@ -60,7 +60,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - List entries1 = new ArrayList(entries.v().valueCollection()); + List entries1 = new ArrayList(entries.v().size()); + final boolean [] states = entries.v().allocated; + final Object[] values = entries.v().values; + + for (int i = 0; i < states.length; i++) { + if (states[i]) { + InternalFullHistogramFacet.FullEntry value = (InternalFullHistogramFacet.FullEntry) values[i]; + entries1.add(value); + } + } entries.release(); return new InternalFullHistogramFacet(facetName, comparatorType, entries1); } @@ -93,13 +102,13 @@ public void postCollection() { public final static class HistogramProc extends DoubleFacetAggregatorBase { final long interval; - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; DoubleValues valueValues; final ValueAggregator valueAggregator = new ValueAggregator(); - public HistogramProc(long interval, ExtTLongObjectHashMap entries) { + public HistogramProc(long interval, LongObjectOpenHashMap entries) { this.interval = interval; this.entries = entries; } diff --git a/src/main/java/org/elasticsearch/search/facet/histogram/ValueScriptHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/histogram/ValueScriptHistogramFacetExecutor.java index 82ece41fada96..b2821cac14823 100644 --- a/src/main/java/org/elasticsearch/search/facet/histogram/ValueScriptHistogramFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/histogram/ValueScriptHistogramFacetExecutor.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.facet.histogram; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.script.SearchScript; @@ -47,7 +47,7 @@ public class ValueScriptHistogramFacetExecutor extends FacetExecutor { final SearchScript valueScript; final long interval; - final Recycler.V> entries; + final Recycler.V> entries; public ValueScriptHistogramFacetExecutor(IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { this.comparatorType = comparatorType; @@ -65,7 +65,16 @@ public Collector collector() { @Override public InternalFacet buildFacet(String facetName) { - List entries1 = new ArrayList(entries.v().valueCollection()); + List entries1 = new ArrayList(entries.v().size()); + final boolean[] states = entries.v().allocated; + final Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + InternalFullHistogramFacet.FullEntry value = (InternalFullHistogramFacet.FullEntry) values[i]; + entries1.add(value); + } + } + entries.release(); return new InternalFullHistogramFacet(facetName, comparatorType, entries1); } @@ -110,9 +119,9 @@ public static class HistogramProc extends DoubleFacetAggregatorBase { private final SearchScript valueScript; - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; - public HistogramProc(long interval, SearchScript valueScript, ExtTLongObjectHashMap entries) { + public HistogramProc(long interval, SearchScript valueScript, LongObjectOpenHashMap entries) { this.interval = interval; this.valueScript = valueScript; this.entries = entries; diff --git a/src/main/java/org/elasticsearch/search/facet/terms/doubles/InternalDoubleTermsFacet.java b/src/main/java/org/elasticsearch/search/facet/terms/doubles/InternalDoubleTermsFacet.java index 10b59a990ff38..955e17b7a5235 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/doubles/InternalDoubleTermsFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/doubles/InternalDoubleTermsFacet.java @@ -19,9 +19,8 @@ package org.elasticsearch.search.facet.terms.doubles; +import com.carrotsearch.hppc.DoubleIntOpenHashMap; import com.google.common.collect.ImmutableList; -import gnu.trove.iterator.TDoubleIntIterator; -import gnu.trove.map.hash.TDoubleIntHashMap; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.HashedBytesArray; @@ -173,7 +172,7 @@ public Facet reduce(ReduceContext context) { InternalDoubleTermsFacet first = null; - Recycler.V aggregated = context.cacheRecycler().doubleIntMap(-1); + Recycler.V aggregated = context.cacheRecycler().doubleIntMap(-1); long missing = 0; long total = 0; for (Facet facet : facets) { @@ -185,15 +184,20 @@ public Facet reduce(ReduceContext context) { missing += termsFacet.getMissingCount(); total += termsFacet.getTotalCount(); for (Entry entry : termsFacet.getEntries()) { - aggregated.v().adjustOrPutValue(((DoubleEntry) entry).term, entry.getCount(), entry.getCount()); + aggregated.v().addTo(((DoubleEntry) entry).term, entry.getCount()); } } BoundedTreeSet ordered = new BoundedTreeSet(first.comparatorType.comparator(), first.requiredSize); - for (TDoubleIntIterator it = aggregated.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new DoubleEntry(it.key(), it.value())); + final boolean[] states = aggregated.v().allocated; + final double[] keys = aggregated.v().keys; + final int[] values = aggregated.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + ordered.add(new DoubleEntry(keys[i], values[i])); + } } + first.entries = ordered; first.missing = missing; first.total = total; diff --git a/src/main/java/org/elasticsearch/search/facet/terms/doubles/TermsDoubleFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/terms/doubles/TermsDoubleFacetExecutor.java index 821dd23c2483c..64f5527584990 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/doubles/TermsDoubleFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/doubles/TermsDoubleFacetExecutor.java @@ -19,11 +19,10 @@ package org.elasticsearch.search.facet.terms.doubles; +import com.carrotsearch.hppc.DoubleIntOpenHashMap; +import com.carrotsearch.hppc.DoubleOpenHashSet; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import gnu.trove.iterator.TDoubleIntIterator; -import gnu.trove.map.hash.TDoubleIntHashMap; -import gnu.trove.set.hash.TDoubleHashSet; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; @@ -57,7 +56,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor { private final SearchScript script; private final ImmutableSet excluded; - final Recycler.V facets; + final Recycler.V facets; long missing; long total; @@ -121,11 +120,15 @@ public InternalFacet buildFacet(String facetName) { facets.release(); return new InternalDoubleTermsFacet(facetName, comparatorType, size, ImmutableList.of(), missing, total); } else { + final boolean[] states = facets.v().allocated; + final double[] keys = facets.v().keys; + final int[] values = facets.v().values; if (size < EntryPriorityQueue.LIMIT) { EntryPriorityQueue ordered = new EntryPriorityQueue(shardSize, comparatorType.comparator()); - for (TDoubleIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.insertWithOverflow(new InternalDoubleTermsFacet.DoubleEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + ordered.insertWithOverflow(new InternalDoubleTermsFacet.DoubleEntry(keys[i], values[i])); + } } InternalDoubleTermsFacet.DoubleEntry[] list = new InternalDoubleTermsFacet.DoubleEntry[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { @@ -135,9 +138,10 @@ public InternalFacet buildFacet(String facetName) { return new InternalDoubleTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total); } else { BoundedTreeSet ordered = new BoundedTreeSet(comparatorType.comparator(), shardSize); - for (TDoubleIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new InternalDoubleTermsFacet.DoubleEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + ordered.add(new InternalDoubleTermsFacet.DoubleEntry(keys[i], values[i])); + } } facets.release(); return new InternalDoubleTermsFacet(facetName, comparatorType, size, ordered, missing, total); @@ -189,15 +193,15 @@ public static class AggregatorValueProc extends StaticAggregatorValueProc { private final SearchScript script; - private final TDoubleHashSet excluded; + private final DoubleOpenHashSet excluded; - public AggregatorValueProc(TDoubleIntHashMap facets, Set excluded, SearchScript script) { + public AggregatorValueProc(DoubleIntOpenHashMap facets, Set excluded, SearchScript script) { super(facets); this.script = script; if (excluded == null || excluded.isEmpty()) { this.excluded = null; } else { - this.excluded = new TDoubleHashSet(excluded.size()); + this.excluded = new DoubleOpenHashSet(excluded.size()); for (BytesRef s : excluded) { this.excluded.add(Double.parseDouble(s.utf8ToString())); } @@ -230,18 +234,18 @@ public void onValue(int docId, double value) { public static class StaticAggregatorValueProc extends DoubleFacetAggregatorBase { - private final TDoubleIntHashMap facets; + private final DoubleIntOpenHashMap facets; - public StaticAggregatorValueProc(TDoubleIntHashMap facets) { + public StaticAggregatorValueProc(DoubleIntOpenHashMap facets) { this.facets = facets; } @Override public void onValue(int docId, double value) { - facets.adjustOrPutValue(value, 1, 1); + facets.addTo(value, 1); } - public final TDoubleIntHashMap facets() { + public final DoubleIntOpenHashMap facets() { return facets; } } diff --git a/src/main/java/org/elasticsearch/search/facet/terms/longs/InternalLongTermsFacet.java b/src/main/java/org/elasticsearch/search/facet/terms/longs/InternalLongTermsFacet.java index af89ec4d26a52..ba36487e19776 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/longs/InternalLongTermsFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/longs/InternalLongTermsFacet.java @@ -19,9 +19,8 @@ package org.elasticsearch.search.facet.terms.longs; +import com.carrotsearch.hppc.LongIntOpenHashMap; import com.google.common.collect.ImmutableList; -import gnu.trove.iterator.TLongIntIterator; -import gnu.trove.map.hash.TLongIntHashMap; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.HashedBytesArray; @@ -174,7 +173,7 @@ public Facet reduce(ReduceContext context) { InternalLongTermsFacet first = null; - Recycler.V aggregated = context.cacheRecycler().longIntMap(-1); + Recycler.V aggregated = context.cacheRecycler().longIntMap(-1); long missing = 0; long total = 0; for (Facet facet : facets) { @@ -186,14 +185,19 @@ public Facet reduce(ReduceContext context) { missing += termsFacet.getMissingCount(); total += termsFacet.getTotalCount(); for (Entry entry : termsFacet.getEntries()) { - aggregated.v().adjustOrPutValue(((LongEntry) entry).term, entry.getCount(), entry.getCount()); + aggregated.v().addTo(((LongEntry) entry).term, entry.getCount()); } } BoundedTreeSet ordered = new BoundedTreeSet(first.comparatorType.comparator(), first.requiredSize); - for (TLongIntIterator it = aggregated.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new LongEntry(it.key(), it.value())); + LongIntOpenHashMap entries = aggregated.v(); + final boolean[] states = aggregated.v().allocated; + final long[] keys = aggregated.v().keys; + final int[] values = aggregated.v().values; + for (int i = 0; i < entries.allocated.length; i++) { + if (states[i]) { + ordered.add(new LongEntry(keys[i], values[i])); + } } first.entries = ordered; first.missing = missing; diff --git a/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java index 9c39268093154..60db59fcf3c03 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java @@ -19,11 +19,10 @@ package org.elasticsearch.search.facet.terms.longs; +import com.carrotsearch.hppc.LongIntOpenHashMap; +import com.carrotsearch.hppc.LongOpenHashSet; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import gnu.trove.iterator.TLongIntIterator; -import gnu.trove.map.hash.TLongIntHashMap; -import gnu.trove.set.hash.TLongHashSet; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; @@ -57,7 +56,7 @@ public class TermsLongFacetExecutor extends FacetExecutor { private final SearchScript script; private final ImmutableSet excluded; - final Recycler.V facets; + final Recycler.V facets; long missing; long total; @@ -120,11 +119,16 @@ public InternalFacet buildFacet(String facetName) { facets.release(); return new InternalLongTermsFacet(facetName, comparatorType, size, ImmutableList.of(), missing, total); } else { + LongIntOpenHashMap facetEntries = facets.v(); + final boolean[] states = facets.v().allocated; + final long[] keys = facets.v().keys; + final int[] values = facets.v().values; if (size < EntryPriorityQueue.LIMIT) { EntryPriorityQueue ordered = new EntryPriorityQueue(shardSize, comparatorType.comparator()); - for (TLongIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.insertWithOverflow(new InternalLongTermsFacet.LongEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + ordered.insertWithOverflow(new InternalLongTermsFacet.LongEntry(keys[i], values[i])); + } } InternalLongTermsFacet.LongEntry[] list = new InternalLongTermsFacet.LongEntry[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { @@ -134,9 +138,10 @@ public InternalFacet buildFacet(String facetName) { return new InternalLongTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total); } else { BoundedTreeSet ordered = new BoundedTreeSet(comparatorType.comparator(), shardSize); - for (TLongIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new InternalLongTermsFacet.LongEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + ordered.add(new InternalLongTermsFacet.LongEntry(keys[i], values[i])); + } } facets.release(); return new InternalLongTermsFacet(facetName, comparatorType, size, ordered, missing, total); @@ -188,15 +193,15 @@ public static class AggregatorValueProc extends StaticAggregatorValueProc { private final SearchScript script; - private final TLongHashSet excluded; + private final LongOpenHashSet excluded; - public AggregatorValueProc(TLongIntHashMap facets, Set excluded, SearchScript script) { + public AggregatorValueProc(LongIntOpenHashMap facets, Set excluded, SearchScript script) { super(facets); this.script = script; if (excluded == null || excluded.isEmpty()) { this.excluded = null; } else { - this.excluded = new TLongHashSet(excluded.size()); + this.excluded = new LongOpenHashSet(excluded.size()); for (BytesRef s : excluded) { this.excluded.add(Long.parseLong(s.utf8ToString())); } @@ -229,18 +234,18 @@ public void onValue(int docId, long value) { public static class StaticAggregatorValueProc extends LongFacetAggregatorBase { - private final TLongIntHashMap facets; + private final LongIntOpenHashMap facets; - public StaticAggregatorValueProc(TLongIntHashMap facets) { + public StaticAggregatorValueProc(LongIntOpenHashMap facets) { this.facets = facets; } @Override public void onValue(int docId, long value) { - facets.adjustOrPutValue(value, 1, 1); + facets.addTo(value, 1); } - public final TLongIntHashMap facets() { + public final LongIntOpenHashMap facets() { return facets; } } diff --git a/src/main/java/org/elasticsearch/search/facet/terms/strings/HashedAggregator.java b/src/main/java/org/elasticsearch/search/facet/terms/strings/HashedAggregator.java index 7dff3883e738d..3a2ee854d16de 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/strings/HashedAggregator.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/strings/HashedAggregator.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.facet.terms.strings; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.google.common.collect.ImmutableList; -import gnu.trove.map.hash.TObjectIntHashMap; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; @@ -235,12 +235,12 @@ private static final class AssertingHashCount implements HashCount { // simple // implemenation // for // assertions - private final TObjectIntHashMap valuesAndCount = new TObjectIntHashMap(); + private final ObjectIntOpenHashMap valuesAndCount = new ObjectIntOpenHashMap(); private HashedBytesRef spare = new HashedBytesRef(); @Override public boolean add(BytesRef value, int hashCode, BytesValues values) { - int adjustedValue = valuesAndCount.adjustOrPutValue(spare.reset(value, hashCode), 1, 1); + int adjustedValue = valuesAndCount.addTo(spare.reset(value, hashCode), 1); assert adjustedValue >= 1; if (adjustedValue == 1) { // only if we added the spare we create a // new instance @@ -268,7 +268,7 @@ public void release() { @Override public boolean addNoCount(BytesRef value, int hashCode, BytesValues values) { if (!valuesAndCount.containsKey(spare.reset(value, hashCode))) { - valuesAndCount.adjustOrPutValue(spare.reset(value, hashCode), 0, 0); + valuesAndCount.addTo(spare.reset(value, hashCode), 0); spare.bytes = values.makeSafe(spare.bytes); spare = new HashedBytesRef(); return true; diff --git a/src/main/java/org/elasticsearch/search/facet/terms/strings/InternalStringTermsFacet.java b/src/main/java/org/elasticsearch/search/facet/terms/strings/InternalStringTermsFacet.java index 491232ab12321..7fecd540b3aac 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/strings/InternalStringTermsFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/strings/InternalStringTermsFacet.java @@ -19,9 +19,8 @@ package org.elasticsearch.search.facet.terms.strings; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.google.common.collect.ImmutableList; -import gnu.trove.iterator.TObjectIntIterator; -import gnu.trove.map.hash.TObjectIntHashMap; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -179,7 +178,7 @@ public Facet reduce(ReduceContext context) { InternalStringTermsFacet first = null; - Recycler.V> aggregated = context.cacheRecycler().objectIntMap(-1); + Recycler.V> aggregated = context.cacheRecycler().objectIntMap(-1); long missing = 0; long total = 0; for (Facet facet : facets) { @@ -199,14 +198,21 @@ public Facet reduce(ReduceContext context) { } for (Entry entry : termsFacet.getEntries()) { - aggregated.v().adjustOrPutValue(entry.getTerm(), entry.getCount(), entry.getCount()); + aggregated.v().addTo(entry.getTerm(), entry.getCount()); } } BoundedTreeSet ordered = new BoundedTreeSet(first.comparatorType.comparator(), first.requiredSize); - for (TObjectIntIterator it = aggregated.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new TermEntry(it.key(), it.value())); + ObjectIntOpenHashMap aggregatedEntries = aggregated.v(); + + final boolean[] states = aggregatedEntries.allocated; + Object[] keys = aggregatedEntries.keys; + int[] values = aggregatedEntries.values; + for (int i = 0; i < aggregatedEntries.allocated.length; i++) { + if (states[i]) { + Text key = (Text) keys[i]; + ordered.add(new TermEntry(key, values[i])); + } } first.entries = ordered; first.missing = missing; diff --git a/src/main/java/org/elasticsearch/search/facet/terms/strings/ScriptTermsStringFieldFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/terms/strings/ScriptTermsStringFieldFacetExecutor.java index a2201a9b7f109..526d6213a0db7 100644 --- a/src/main/java/org/elasticsearch/search/facet/terms/strings/ScriptTermsStringFieldFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/terms/strings/ScriptTermsStringFieldFacetExecutor.java @@ -19,10 +19,9 @@ package org.elasticsearch.search.facet.terms.strings; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import gnu.trove.iterator.TObjectIntIterator; -import gnu.trove.map.hash.TObjectIntHashMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; @@ -54,7 +53,7 @@ public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor { private final ImmutableSet excluded; private final int numberOfShards; - final Recycler.V> facets; + final Recycler.V> facets; long missing; long total; @@ -84,11 +83,16 @@ public InternalFacet buildFacet(String facetName) { facets.release(); return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.of(), missing, total); } else { + final boolean[] states = facets.v().allocated; + final Object[] keys = facets.v().keys; + final int[] values = facets.v().values; if (shardSize < EntryPriorityQueue.LIMIT) { EntryPriorityQueue ordered = new EntryPriorityQueue(shardSize, comparatorType.comparator()); - for (TObjectIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.insertWithOverflow(new InternalStringTermsFacet.TermEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + BytesRef key = (BytesRef) keys[i]; + ordered.insertWithOverflow(new InternalStringTermsFacet.TermEntry(key, values[i])); + } } InternalStringTermsFacet.TermEntry[] list = new InternalStringTermsFacet.TermEntry[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { @@ -98,9 +102,11 @@ public InternalFacet buildFacet(String facetName) { return new InternalStringTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total); } else { BoundedTreeSet ordered = new BoundedTreeSet(comparatorType.comparator(), shardSize); - for (TObjectIntIterator it = facets.v().iterator(); it.hasNext(); ) { - it.advance(); - ordered.add(new InternalStringTermsFacet.TermEntry(it.key(), it.value())); + for (int i = 0; i < states.length; i++) { + if (states[i]) { + BytesRef key = (BytesRef) keys[i]; + ordered.add(new InternalStringTermsFacet.TermEntry(key, values[i])); + } } facets.release(); return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, missing, total); @@ -113,12 +119,12 @@ class Collector extends FacetExecutor.Collector { private final Matcher matcher; private final ImmutableSet excluded; private final SearchScript script; - private final TObjectIntHashMap facets; + private final ObjectIntOpenHashMap facets; long missing; long total; - Collector(Matcher matcher, ImmutableSet excluded, SearchScript script, TObjectIntHashMap facets) { + Collector(Matcher matcher, ImmutableSet excluded, SearchScript script, ObjectIntOpenHashMap facets) { this.matcher = matcher; this.excluded = excluded; this.script = script; @@ -150,7 +156,7 @@ public void collect(int doc) throws IOException { if (match(value)) { found = true; // LUCENE 4 UPGRADE: should be possible to convert directly to BR - facets.adjustOrPutValue(new BytesRef(value), 1, 1); + facets.addTo(new BytesRef(value), 1); total++; } } @@ -164,7 +170,7 @@ public void collect(int doc) throws IOException { if (match(value)) { found = true; // LUCENE 4 UPGRADE: should be possible to convert directly to BR - facets.adjustOrPutValue(new BytesRef(value), 1, 1); + facets.addTo(new BytesRef(value), 1); total++; } } @@ -175,7 +181,7 @@ public void collect(int doc) throws IOException { String value = o.toString(); if (match(value)) { // LUCENE 4 UPGRADE: should be possible to convert directly to BR - facets.adjustOrPutValue(new BytesRef(value), 1, 1); + facets.addTo(new BytesRef(value), 1); total++; } else { missing++; diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/InternalTermsStatsDoubleFacet.java b/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/InternalTermsStatsDoubleFacet.java index 6a5ca0bba9565..b1e83a0545948 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/InternalTermsStatsDoubleFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/InternalTermsStatsDoubleFacet.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.termsstats.doubles; +import com.carrotsearch.hppc.DoubleObjectOpenHashMap; import com.google.common.collect.ImmutableList; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; @@ -29,7 +30,6 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; @@ -184,7 +184,7 @@ public Facet reduce(ReduceContext context) { return facets.get(0); } int missing = 0; - Recycler.V> map = context.cacheRecycler().doubleObjectMap(-1); + Recycler.V> map = context.cacheRecycler().doubleObjectMap(-1); for (Facet facet : facets) { InternalTermsStatsDoubleFacet tsFacet = (InternalTermsStatsDoubleFacet) facet; missing += tsFacet.missing; @@ -209,12 +209,12 @@ public Facet reduce(ReduceContext context) { // sort if (requiredSize == 0) { // all terms - DoubleEntry[] entries1 = map.v().values(new DoubleEntry[map.v().size()]); + DoubleEntry[] entries1 = map.v().values().toArray(DoubleEntry.class); Arrays.sort(entries1, comparatorType.comparator()); map.release(); return new InternalTermsStatsDoubleFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing); } else { - Object[] values = map.v().internalValues(); + Object[] values = map.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = new ArrayList(map.v().size()); for (int i = 0; i < requiredSize; i++) { diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/TermsStatsDoubleFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/TermsStatsDoubleFacetExecutor.java index e65cb134278e9..622a59a4c7778 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/TermsStatsDoubleFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/doubles/TermsStatsDoubleFacetExecutor.java @@ -19,12 +19,12 @@ package org.elasticsearch.search.facet.termsstats.doubles; +import com.carrotsearch.hppc.DoubleObjectOpenHashMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.script.SearchScript; @@ -35,6 +35,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -50,7 +51,7 @@ public class TermsStatsDoubleFacetExecutor extends FacetExecutor { private final int size; private final int shardSize; - final Recycler.V> entries; + final Recycler.V> entries; long missing; public TermsStatsDoubleFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, SearchScript script, @@ -78,9 +79,18 @@ public InternalFacet buildFacet(String facetName) { } if (size == 0) { // all terms // all terms, just return the collection, we will sort it on the way back - return new InternalTermsStatsDoubleFacet(facetName, comparatorType, 0 /* indicates all terms*/, entries.v().valueCollection(), missing); + List doubleEntries = new ArrayList(entries.v().size()); + boolean[] states = entries.v().allocated; + Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + doubleEntries.add((InternalTermsStatsDoubleFacet.DoubleEntry) values[i]); + } + } + entries.release(); + return new InternalTermsStatsDoubleFacet(facetName, comparatorType, 0 /* indicates all terms*/, doubleEntries, missing); } - Object[] values = entries.v().internalValues(); + Object[] values = entries.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); int limit = shardSize; @@ -140,12 +150,12 @@ public void postCollection() { public static class Aggregator extends DoubleFacetAggregatorBase { - final ExtTDoubleObjectHashMap entries; + final DoubleObjectOpenHashMap entries; int missing; DoubleValues valueFieldData; final ValueAggregator valueAggregator = new ValueAggregator(); - public Aggregator(ExtTDoubleObjectHashMap entries) { + public Aggregator(DoubleObjectOpenHashMap entries) { this.entries = entries; } @@ -184,7 +194,7 @@ public static class ScriptAggregator extends Aggregator { private final SearchScript script; - public ScriptAggregator(ExtTDoubleObjectHashMap entries, SearchScript script) { + public ScriptAggregator(DoubleObjectOpenHashMap entries, SearchScript script) { super(entries); this.script = script; } diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/InternalTermsStatsLongFacet.java b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/InternalTermsStatsLongFacet.java index 5e02f3adb3ef8..d31a40d3c0059 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/InternalTermsStatsLongFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/InternalTermsStatsLongFacet.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.termsstats.longs; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import com.google.common.collect.ImmutableList; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; @@ -29,7 +30,6 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; @@ -184,7 +184,7 @@ public Facet reduce(ReduceContext context) { return facets.get(0); } int missing = 0; - Recycler.V> map = context.cacheRecycler().longObjectMap(-1); + Recycler.V> map = context.cacheRecycler().longObjectMap(-1); for (Facet facet : facets) { InternalTermsStatsLongFacet tsFacet = (InternalTermsStatsLongFacet) facet; missing += tsFacet.missing; @@ -209,12 +209,12 @@ public Facet reduce(ReduceContext context) { // sort if (requiredSize == 0) { // all terms - LongEntry[] entries1 = map.v().values(new LongEntry[map.v().size()]); + LongEntry[] entries1 = map.v().values().toArray(LongEntry.class); Arrays.sort(entries1, comparatorType.comparator()); map.release(); return new InternalTermsStatsLongFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing); } else { - Object[] values = map.v().internalValues(); + Object[] values = map.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = new ArrayList(map.v().size()); for (int i = 0; i < requiredSize; i++) { diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java index fe3451296f845..dfc7cb9bc5e40 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java @@ -19,12 +19,12 @@ package org.elasticsearch.search.facet.termsstats.longs; +import com.carrotsearch.hppc.LongObjectOpenHashMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTLongObjectHashMap; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LongValues; @@ -37,6 +37,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -51,7 +52,7 @@ public class TermsStatsLongFacetExecutor extends FacetExecutor { private final int size; private final int shardSize; - final Recycler.V> entries; + final Recycler.V> entries; long missing; public TermsStatsLongFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, SearchScript script, @@ -79,11 +80,21 @@ public InternalFacet buildFacet(String facetName) { } if (size == 0) { // all terms // all terms, just return the collection, we will sort it on the way back - return new InternalTermsStatsLongFacet(facetName, comparatorType, 0 /* indicates all terms*/, entries.v().valueCollection(), missing); + List longEntries = new ArrayList(entries.v().size()); + boolean[] states = entries.v().allocated; + Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + longEntries.add((InternalTermsStatsLongFacet.LongEntry) values[i]); + } + } + + entries.release(); + return new InternalTermsStatsLongFacet(facetName, comparatorType, 0 /* indicates all terms*/, longEntries, missing); } // we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards - Object[] values = entries.v().internalValues(); + Object[] values = entries.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); int limit = shardSize; @@ -142,11 +153,11 @@ public void postCollection() { public static class Aggregator extends LongFacetAggregatorBase { - final ExtTLongObjectHashMap entries; + final LongObjectOpenHashMap entries; DoubleValues valueValues; final ValueAggregator valueAggregator = new ValueAggregator(); - public Aggregator(ExtTLongObjectHashMap entries) { + public Aggregator(LongObjectOpenHashMap entries) { this.entries = entries; } @@ -185,7 +196,7 @@ public static class ScriptAggregator extends Aggregator { private final SearchScript script; - public ScriptAggregator(ExtTLongObjectHashMap entries, SearchScript script) { + public ScriptAggregator(LongObjectOpenHashMap entries, SearchScript script) { super(entries); this.script = script; } diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/strings/InternalTermsStatsStringFacet.java b/src/main/java/org/elasticsearch/search/facet/termsstats/strings/InternalTermsStatsStringFacet.java index 667162a440f9e..ba2dcafc0db17 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/strings/InternalTermsStatsStringFacet.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/strings/InternalTermsStatsStringFacet.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.termsstats.strings; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.google.common.collect.ImmutableList; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; @@ -31,7 +32,6 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.text.BytesText; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.trove.ExtTHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; @@ -189,7 +189,7 @@ public Facet reduce(ReduceContext context) { return tsFacet; } int missing = 0; - Recycler.V> map = context.cacheRecycler().hashMap(-1); + Recycler.V> map = context.cacheRecycler().hashMap(-1); for (Facet facet : facets) { InternalTermsStatsStringFacet tsFacet = (InternalTermsStatsStringFacet) facet; missing += tsFacet.missing; @@ -214,12 +214,12 @@ public Facet reduce(ReduceContext context) { // sort if (requiredSize == 0) { // all terms - StringEntry[] entries1 = map.v().values().toArray(new StringEntry[map.v().size()]); + StringEntry[] entries1 = map.v().values().toArray(StringEntry.class); Arrays.sort(entries1, comparatorType.comparator()); map.release(); return new InternalTermsStatsStringFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing); } else { - Object[] values = map.v().internalValues(); + Object[] values = map.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = new ArrayList(Math.min(map.v().size(), requiredSize)); for (int i = 0; i < requiredSize; i++) { diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/strings/TermsStatsStringFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/termsstats/strings/TermsStatsStringFacetExecutor.java index f389239ae7432..b73f7b43d0ce4 100644 --- a/src/main/java/org/elasticsearch/search/facet/termsstats/strings/TermsStatsStringFacetExecutor.java +++ b/src/main/java/org/elasticsearch/search/facet/termsstats/strings/TermsStatsStringFacetExecutor.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.facet.termsstats.strings; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.lucene.index.AtomicReaderContext; @@ -26,7 +27,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.HashedBytesRef; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.trove.ExtTHashMap; import org.elasticsearch.index.fielddata.BytesValues; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -40,6 +40,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -53,7 +54,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor { private final int size; private final int shardSize; - final Recycler.V> entries; + final Recycler.V> entries; long missing; public TermsStatsStringFacetExecutor(IndexFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, SearchScript valueScript, @@ -81,9 +82,17 @@ public InternalFacet buildFacet(String facetName) { } if (size == 0) { // all terms // all terms, just return the collection, we will sort it on the way back - return new InternalTermsStatsStringFacet(facetName, comparatorType, 0/* indicates all terms*/, entries.v().values(), missing); + List stringEntries = new ArrayList(); + final boolean[] states = entries.v().allocated; + final Object[] values = entries.v().values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + stringEntries.add((InternalTermsStatsStringFacet.StringEntry) values[i]); + } + } + return new InternalTermsStatsStringFacet(facetName, comparatorType, 0 /* indicates all terms*/, stringEntries, missing); } - Object[] values = entries.v().internalValues(); + Object[] values = entries.v().values; Arrays.sort(values, (Comparator) comparatorType.comparator()); List ordered = Lists.newArrayList(); @@ -144,7 +153,7 @@ public void postCollection() { public static class Aggregator extends HashedAggregator { - final ExtTHashMap entries; + final ObjectObjectOpenHashMap entries; final HashedBytesRef spare = new HashedBytesRef(); int missing = 0; @@ -152,7 +161,7 @@ public static class Aggregator extends HashedAggregator { ValueAggregator valueAggregator = new ValueAggregator(); - public Aggregator(ExtTHashMap entries) { + public Aggregator(ObjectObjectOpenHashMap entries) { this.entries = entries; } @@ -191,7 +200,7 @@ public void onValue(int docId, double value) { public static class ScriptAggregator extends Aggregator { private final SearchScript script; - public ScriptAggregator(ExtTHashMap entries, SearchScript script) { + public ScriptAggregator(ObjectObjectOpenHashMap entries, SearchScript script) { super(entries); this.script = script; } diff --git a/src/main/java/org/elasticsearch/search/fetch/FetchSearchRequest.java b/src/main/java/org/elasticsearch/search/fetch/FetchSearchRequest.java index 244a675c75006..1bd823f830d23 100644 --- a/src/main/java/org/elasticsearch/search/fetch/FetchSearchRequest.java +++ b/src/main/java/org/elasticsearch/search/fetch/FetchSearchRequest.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.fetch; +import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.trove.ExtTIntArrayList; import org.elasticsearch.transport.TransportRequest; import java.io.IOException; @@ -40,10 +40,10 @@ public class FetchSearchRequest extends TransportRequest { public FetchSearchRequest() { } - public FetchSearchRequest(TransportRequest request, long id, ExtTIntArrayList list) { + public FetchSearchRequest(TransportRequest request, long id, IntArrayList list) { super(request); this.id = id; - this.docIds = list.unsafeArray(); + this.docIds = list.buffer; this.size = list.size(); } diff --git a/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java b/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java index 7c618524fd764..01881326ca2b3 100644 --- a/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java +++ b/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.internal; +import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.google.common.collect.Iterators; -import gnu.trove.map.hash.TIntObjectHashMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,7 +51,7 @@ public static enum ShardTargetType { } private IdentityHashMap shardHandleLookup = new IdentityHashMap(); - private TIntObjectHashMap handleShardLookup = new TIntObjectHashMap(); + private IntObjectOpenHashMap handleShardLookup = new IntObjectOpenHashMap(); private ShardTargetType streamShardTarget = ShardTargetType.STREAM; public StreamContext reset() { @@ -65,7 +65,7 @@ public IdentityHashMap shardHandleLookup() { return shardHandleLookup; } - public TIntObjectHashMap handleShardLookup() { + public IntObjectOpenHashMap handleShardLookup() { return handleShardLookup; } diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java index 57c7d30ada146..7cb0a9d183879 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.suggest.completion; -import gnu.trove.map.hash.TObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.*; import org.apache.lucene.index.FieldInfo; @@ -259,9 +259,9 @@ public Lookup getLookup(FieldMapper mapper, CompletionSuggestionContext sugge @Override public CompletionStats stats(String... fields) { long sizeInBytes = 0; - TObjectLongHashMap completionFields = null; - if (fields != null && fields.length > 0) { - completionFields = new TObjectLongHashMap(fields.length); + ObjectLongOpenHashMap completionFields = null; + if (fields != null && fields.length > 0) { + completionFields = new ObjectLongOpenHashMap(fields.length); } for (Map.Entry entry : lookupMap.entrySet()) { @@ -273,7 +273,7 @@ public CompletionStats stats(String... fields) { // support for getting fields by regex as in fielddata if (Regex.simpleMatch(field, entry.getKey())) { long fstSize = entry.getValue().fst.sizeInBytes(); - completionFields.adjustOrPutValue(field, fstSize, fstSize); + completionFields.addTo(field, fstSize); } } } diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionStats.java b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionStats.java index 70ed78bf1823c..3714ee95fa1fd 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionStats.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionStats.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.search.suggest.completion; -import gnu.trove.iterator.TObjectLongIterator; -import gnu.trove.map.hash.TObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongOpenHashMap; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -39,12 +38,12 @@ public class CompletionStats implements Streamable, ToXContent { private long sizeInBytes; @Nullable - private TObjectLongHashMap fields; + private ObjectLongOpenHashMap fields; public CompletionStats() { } - public CompletionStats(long size, @Nullable TObjectLongHashMap fields) { + public CompletionStats(long size, @Nullable ObjectLongOpenHashMap fields) { this.sizeInBytes = size; this.fields = fields; } @@ -57,7 +56,7 @@ public ByteSizeValue getSize() { return new ByteSizeValue(sizeInBytes); } - public TObjectLongHashMap getFields() { + public ObjectLongOpenHashMap getFields() { return fields; } @@ -66,7 +65,7 @@ public void readFrom(StreamInput in) throws IOException { sizeInBytes = in.readVLong(); if (in.readBoolean()) { int size = in.readVInt(); - fields = new TObjectLongHashMap(size); + fields = new ObjectLongOpenHashMap(size); for (int i = 0; i < size; i++) { fields.put(in.readString(), in.readVLong()); } @@ -81,10 +80,14 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(true); out.writeVInt(fields.size()); - for (TObjectLongIterator it = fields.iterator(); it.hasNext(); ) { - it.advance(); - out.writeString(it.key()); - out.writeVLong(it.value()); + final boolean[] states = fields.allocated; + final Object[] keys = fields.keys; + final long[] values = fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + out.writeString((String) keys[i]); + out.writeVLong(values[i]); + } } } } @@ -95,11 +98,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, sizeInBytes); if (fields != null) { builder.startObject(Fields.FIELDS); - for (TObjectLongIterator it = fields.iterator(); it.hasNext(); ) { - it.advance(); - builder.startObject(it.key(), XContentBuilder.FieldCaseConversion.NONE); - builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, it.value()); - builder.endObject(); + final boolean[] states = fields.allocated; + final Object[] keys = fields.keys; + final long[] values = fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE); + builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, values[i]); + builder.endObject(); + } } builder.endObject(); } @@ -128,10 +135,15 @@ public void add(CompletionStats completion) { sizeInBytes += completion.getSizeInBytes(); if (completion.fields != null) { - if (fields == null) fields = new TObjectLongHashMap(); - for (TObjectLongIterator it = completion.fields.iterator(); it.hasNext(); ) { - it.advance(); - fields.adjustOrPutValue(it.key(), it.value(), it.value()); + if (fields == null) fields = new ObjectLongOpenHashMap(); + + final boolean[] states = completion.fields.allocated; + final Object[] keys = completion.fields.keys; + final long[] values = completion.fields.values; + for (int i = 0; i < states.length; i++) { + if (states[i]) { + fields.addTo((String) keys[i], values[i]); + } } } } diff --git a/src/test/java/org/elasticsearch/benchmark/trove/StringMapAdjustOrPutBenchmark.java b/src/test/java/org/elasticsearch/benchmark/hppc/StringMapAdjustOrPutBenchmark.java similarity index 83% rename from src/test/java/org/elasticsearch/benchmark/trove/StringMapAdjustOrPutBenchmark.java rename to src/test/java/org/elasticsearch/benchmark/hppc/StringMapAdjustOrPutBenchmark.java index 6ee725d3337c0..07d70881c48f8 100644 --- a/src/test/java/org/elasticsearch/benchmark/trove/StringMapAdjustOrPutBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/hppc/StringMapAdjustOrPutBenchmark.java @@ -17,18 +17,15 @@ * under the License. */ -package org.elasticsearch.benchmark.trove; +package org.elasticsearch.benchmark.hppc; +import com.carrotsearch.hppc.IntIntOpenHashMap; +import com.carrotsearch.hppc.IntObjectOpenHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; +import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import gnu.trove.map.custom_hash.TObjectIntCustomHashMap; -import gnu.trove.map.hash.THashMap; -import gnu.trove.map.hash.TIntIntHashMap; -import gnu.trove.map.hash.TIntObjectHashMap; -import gnu.trove.map.hash.TObjectIntHashMap; -import gnu.trove.strategy.IdentityHashingStrategy; import jsr166y.ThreadLocalRandom; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.trove.StringIdentityHashingStrategy; import org.elasticsearch.common.unit.SizeValue; import java.util.HashMap; @@ -53,15 +50,15 @@ public static void main(String[] args) { StopWatch stopWatch; stopWatch = new StopWatch().start(); - TObjectIntHashMap map = new TObjectIntHashMap(); + ObjectIntOpenHashMap map = new ObjectIntOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { map.clear(); } else { - map = new TObjectIntHashMap(); + map = new ObjectIntOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { - map.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); + map.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1); } } map.clear(); @@ -71,15 +68,16 @@ public static void main(String[] args) { System.out.println("TObjectIntHashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); stopWatch = new StopWatch().start(); - TObjectIntCustomHashMap iMap = new TObjectIntCustomHashMap(new StringIdentityHashingStrategy()); +// TObjectIntCustomHashMap iMap = new TObjectIntCustomHashMap(new StringIdentityHashingStrategy()); + ObjectIntOpenHashMap iMap = new ObjectIntOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { iMap.clear(); } else { - iMap = new TObjectIntCustomHashMap(new StringIdentityHashingStrategy()); + iMap = new ObjectIntOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { - iMap.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); + iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1); } } stopWatch.stop(); @@ -88,15 +86,15 @@ public static void main(String[] args) { iMap = null; stopWatch = new StopWatch().start(); - iMap = new TObjectIntCustomHashMap(new IdentityHashingStrategy()); + iMap = new ObjectIntOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { iMap.clear(); } else { - iMap = new TObjectIntCustomHashMap(new IdentityHashingStrategy()); + iMap = new ObjectIntOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { - iMap.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); + iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1); } } stopWatch.stop(); @@ -106,12 +104,12 @@ public static void main(String[] args) { // now test with THashMap stopWatch = new StopWatch().start(); - THashMap tMap = new THashMap(); + ObjectObjectOpenHashMap tMap = new ObjectObjectOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { tMap.clear(); } else { - tMap = new THashMap(); + tMap = new ObjectObjectOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { String key = values[(int) (i % NUMBER_OF_KEYS)]; @@ -189,16 +187,16 @@ public static void main(String[] args) { } stopWatch = new StopWatch().start(); - TIntIntHashMap intMap = new TIntIntHashMap(); + IntIntOpenHashMap intMap = new IntIntOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { intMap.clear(); } else { - intMap = new TIntIntHashMap(); + intMap = new IntIntOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { int key = iValues[(int) (i % NUMBER_OF_KEYS)]; - intMap.adjustOrPutValue(key, 1, 1); + intMap.addTo(key, 1); } } stopWatch.stop(); @@ -209,12 +207,12 @@ public static void main(String[] args) { // now test with THashMap stopWatch = new StopWatch().start(); - TIntObjectHashMap tIntMap = new TIntObjectHashMap(); + IntObjectOpenHashMap tIntMap = new IntObjectOpenHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { tIntMap.clear(); } else { - tIntMap = new TIntObjectHashMap(); + tIntMap = new IntObjectOpenHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { int key = iValues[(int) (i % NUMBER_OF_KEYS)]; diff --git a/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java index 8b8436e595507..37129a76b4ad0 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java @@ -42,6 +42,7 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.FilterBuilders.hasChildFilter; +import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter; import static org.elasticsearch.index.query.FilterBuilders.rangeFilter; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.node.NodeBuilder.nodeBuilder; @@ -230,7 +231,7 @@ public static void main(String[] args) throws Exception { } System.out.println("--> has_child filter with exponential parent results Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms"); - /*// run parent child constant query + // run parent child constant query for (int j = 0; j < QUERY_WARMUP; j++) { SearchResponse searchResponse = client.prepareSearch(indexName) .setQuery( @@ -326,7 +327,7 @@ public static void main(String[] args) throws Exception { // } totalQueryTime += searchResponse.getTookInMillis(); } - System.out.println("--> top_children, with match_all Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");*/ + System.out.println("--> top_children, with match_all Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms"); statsResponse = client.admin().cluster().prepareNodesStats() .setJvm(true).setIndices(true).execute().actionGet(); diff --git a/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationTests.java b/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationTests.java index 555f9fee14349..424c029831fef 100644 --- a/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationTests.java +++ b/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.allocation; -import gnu.trove.map.hash.TObjectIntHashMap; +import com.carrotsearch.hppc.ObjectIntOpenHashMap; import org.apache.lucene.util.LuceneTestCase.Slow; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.cluster.ClusterState; @@ -69,7 +69,7 @@ public void testSimpleAwareness() throws Exception { String node3 = cluster().startNode(ImmutableSettings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_2").build()); long start = System.currentTimeMillis(); - TObjectIntHashMap counts; + ObjectIntOpenHashMap counts; // On slow machines the initial relocation might be delayed do { Thread.sleep(100); @@ -81,11 +81,11 @@ public void testSimpleAwareness() throws Exception { ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); //System.out.println(clusterState.routingTable().prettyPrint()); // verify that we have 10 shards on node3 - counts = new TObjectIntHashMap(); + counts = new ObjectIntOpenHashMap(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - counts.adjustOrPutValue(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1, 1); + counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1); } } } @@ -112,12 +112,12 @@ public void testAwarenessZones() throws InterruptedException { ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("4").setWaitForRelocatingShards(0).execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); - TObjectIntHashMap counts = new TObjectIntHashMap(); + ObjectIntOpenHashMap counts = new ObjectIntOpenHashMap(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - counts.adjustOrPutValue(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1, 1); + counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1); } } } @@ -145,12 +145,12 @@ public void testAwarenessZonesIncrementalNodes() throws InterruptedException { ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").setWaitForRelocatingShards(0).execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); - TObjectIntHashMap counts = new TObjectIntHashMap(); + ObjectIntOpenHashMap counts = new ObjectIntOpenHashMap(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - counts.adjustOrPutValue(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1, 1); + counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1); } } } @@ -167,12 +167,12 @@ public void testAwarenessZonesIncrementalNodes() throws InterruptedException { assertThat(health.isTimedOut(), equalTo(false)); clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); - counts = new TObjectIntHashMap(); + counts = new ObjectIntOpenHashMap(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - counts.adjustOrPutValue(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1, 1); + counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1); } } } diff --git a/src/test/java/org/elasticsearch/index/fielddata/LongFieldDataTests.java b/src/test/java/org/elasticsearch/index/fielddata/LongFieldDataTests.java index c16486a74d6f9..49b4e839520ee 100644 --- a/src/test/java/org/elasticsearch/index/fielddata/LongFieldDataTests.java +++ b/src/test/java/org/elasticsearch/index/fielddata/LongFieldDataTests.java @@ -19,11 +19,8 @@ package org.elasticsearch.index.fielddata; -import gnu.trove.iterator.TLongIterator; -import gnu.trove.set.TDoubleSet; -import gnu.trove.set.TLongSet; -import gnu.trove.set.hash.TDoubleHashSet; -import gnu.trove.set.hash.TLongHashSet; +import com.carrotsearch.hppc.DoubleOpenHashSet; +import com.carrotsearch.hppc.LongOpenHashSet; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; @@ -299,17 +296,22 @@ public long nextValue(Random r) { public abstract long nextValue(Random r); } - private void test(List values) throws Exception { + private void test(List values) throws Exception { StringField id = new StringField("_id", "", Field.Store.NO); for (int i = 0; i < values.size(); ++i) { Document doc = new Document(); id.setStringValue("" + i); doc.add(id); - final TLongSet v = values.get(i); - for (TLongIterator it = v.iterator(); it.hasNext(); ) { - LongField value = new LongField("value", it.next(), Field.Store.NO); - doc.add(value); + final LongOpenHashSet v = values.get(i); + final boolean[] states = v.allocated; + final long[] keys = v.keys; + + for (int j = 0; j < states.length; j++) { + if (states[j]) { + LongField value = new LongField("value", keys[j], Field.Store.NO); + doc.add(value); + } } writer.addDocument(doc); } @@ -319,10 +321,10 @@ private void test(List values) throws Exception { final AtomicNumericFieldData atomicFieldData = indexFieldData.load(refreshReader()); final LongValues data = atomicFieldData.getLongValues(); final DoubleValues doubleData = atomicFieldData.getDoubleValues(); - final TLongSet set = new TLongHashSet(); - final TDoubleSet doubleSet = new TDoubleHashSet(); + final LongOpenHashSet set = new LongOpenHashSet(); + final DoubleOpenHashSet doubleSet = new DoubleOpenHashSet(); for (int i = 0; i < values.size(); ++i) { - final TLongSet v = values.get(i); + final LongOpenHashSet v = values.get(i); assertThat(data.hasValue(i), equalTo(!v.isEmpty())); assertThat(doubleData.hasValue(i), equalTo(!v.isEmpty())); @@ -338,9 +340,13 @@ private void test(List values) throws Exception { } assertThat(set, equalTo(v)); - final TDoubleSet doubleV = new TDoubleHashSet(); - for (TLongIterator it = v.iterator(); it.hasNext(); ) { - doubleV.add((double) it.next()); + final DoubleOpenHashSet doubleV = new DoubleOpenHashSet(); + final boolean[] states = v.allocated; + final long[] keys = v.keys; + for (int j = 0; j < states.length; j++) { + if (states[j]) { + doubleV.add((double) keys[j]); + } } doubleSet.clear(); for (DoubleValues.Iter iter = doubleData.getIter(i); iter.hasNext(); ) { @@ -353,10 +359,10 @@ private void test(List values) throws Exception { private void test(Data data) throws Exception { Random r = getRandom(); final int numDocs = 1000 + r.nextInt(19000); - final List values = new ArrayList(numDocs); + final List values = new ArrayList(numDocs); for (int i = 0; i < numDocs; ++i) { final int numValues = data.numValues(r); - final TLongSet vals = new TLongHashSet(numValues); + final LongOpenHashSet vals = new LongOpenHashSet(numValues); for (int j = 0; j < numValues; ++j) { vals.add(data.nextValue(r)); } diff --git a/src/test/java/org/elasticsearch/recovery/RelocationTests.java b/src/test/java/org/elasticsearch/recovery/RelocationTests.java index ecfe4f3d864c3..7d128f1a213cc 100644 --- a/src/test/java/org/elasticsearch/recovery/RelocationTests.java +++ b/src/test/java/org/elasticsearch/recovery/RelocationTests.java @@ -19,9 +19,8 @@ package org.elasticsearch.recovery; -import gnu.trove.procedure.TIntProcedure; -import gnu.trove.set.TIntSet; -import gnu.trove.set.hash.TIntHashSet; +import com.carrotsearch.hppc.IntOpenHashSet; +import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.util.LuceneTestCase.Slow; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -231,19 +230,20 @@ public void run() { for (int hit = 0; hit < indexCounter.get(); hit++) { hitIds[hit] = hit + 1; } - TIntSet set = new TIntHashSet(hitIds); + IntOpenHashSet set = IntOpenHashSet.from(hitIds); for (SearchHit hit : hits.hits()) { int id = Integer.parseInt(hit.id()); if (!set.remove(id)) { logger.error("Extra id [{}]", id); } } - set.forEach(new TIntProcedure() { + set.forEach(new IntProcedure() { + @Override - public boolean execute(int value) { + public void apply(int value) { logger.error("Missing id [{}]", value); - return true; } + }); } assertThat(hits.totalHits(), equalTo(indexCounter.get())); @@ -390,18 +390,18 @@ public void run() { for (int hit = 0; hit < indexCounter.get(); hit++) { hitIds[hit] = hit + 1; } - TIntSet set = new TIntHashSet(hitIds); + IntOpenHashSet set = IntOpenHashSet.from(hitIds); for (SearchHit hit : hits.hits()) { int id = Integer.parseInt(hit.id()); if (!set.remove(id)) { logger.error("Extra id [{}]", id); } } - set.forEach(new TIntProcedure() { + set.forEach(new IntProcedure() { + @Override - public boolean execute(int value) { + public void apply(int value) { logger.error("Missing id [{}]", value); - return true; } }); }