Skip to content

Commit

Permalink
Remove docIdsToLoad method from SearchContext
Browse files Browse the repository at this point in the history
  • Loading branch information
romseygeek committed Oct 17, 2023
1 parent f52d5fa commit e9b1d1b
Show file tree
Hide file tree
Showing 10 changed files with 11 additions and 74 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,6 @@ final class DefaultSearchContext extends SearchContext {
private Query query;
private ParsedQuery postFilter;
private Query aliasFilter;
private int[] docIdsToLoad;
private SearchContextAggregations aggregations;
private SearchHighlightContext highlight;
private SuggestionSearchContext suggest;
Expand Down Expand Up @@ -724,17 +723,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
this.seqAndPrimaryTerm = seqNoAndPrimaryTerm;
}

@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
this.docIdsToLoad = docIdsToLoad;
return this;
}

@Override
public DfsSearchResult dfsResult() {
return dfsResult;
Expand Down
10 changes: 4 additions & 6 deletions server/src/main/java/org/elasticsearch/search/SearchService.java
Original file line number Diff line number Diff line change
Expand Up @@ -707,8 +707,7 @@ private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchCon
Releasable scope = tracer.withScope(SpanId.forTask(context.getTask()));
SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime)
) {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
fetchPhase.execute(context, shortcutDocIdsToLoad(context));
if (reader.singleSession()) {
freeReaderContext(reader.id());
}
Expand Down Expand Up @@ -856,11 +855,10 @@ public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, A
}
searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(request.getRescoreDocIds()));
searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(request.getAggregatedDfs()));
searchContext.docIdsToLoad(request.docIds());
try (
SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext, true, System.nanoTime())
) {
fetchPhase.execute(searchContext);
fetchPhase.execute(searchContext, request.docIds());
if (readerContext.singleSession()) {
freeReaderContext(request.contextId());
}
Expand Down Expand Up @@ -1471,7 +1469,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
* Shortcut ids to load, we load only "from" and up to "size". The phase controller
* handles this as well since the result is always size * shards for Q_T_F
*/
private static void shortcutDocIdsToLoad(SearchContext context) {
private static int[] shortcutDocIdsToLoad(SearchContext context) {
final int[] docIdsToLoad;
int docsOffset = 0;
final Suggest suggest = context.queryResult().suggest();
Expand Down Expand Up @@ -1509,7 +1507,7 @@ private static void shortcutDocIdsToLoad(SearchContext context) {
docIdsToLoad[docsOffset++] = option.getDoc().doc;
}
}
context.docIdsToLoad(docIdsToLoad);
return docIdsToLoad;
}

private static void processScroll(InternalScrollSearchRequest request, ReaderContext reader, SearchContext context) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
}
subSearchContext.docIdsToLoad(docIdsToLoad);
subSearchContext.fetchPhase().execute(subSearchContext);
subSearchContext.fetchPhase().execute(subSearchContext, docIdsToLoad);
FetchSearchResult fetchResult = subSearchContext.fetchResult();
if (fetchProfiles != null) {
fetchProfiles.add(fetchResult.profileResult());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public FetchPhase(List<FetchSubPhase> fetchSubPhases) {
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this);
}

public void execute(SearchContext context) {
public void execute(SearchContext context, int[] docIdsToLoad) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("{}", new SearchContextSourcePrinter(context));
}
Expand All @@ -66,7 +66,7 @@ public void execute(SearchContext context) {
throw new TaskCancelledException("cancelled");
}

if (context.docIdsToLoad() == null || context.docIdsToLoad().length == 0) {
if (docIdsToLoad == null || docIdsToLoad.length == 0) {
// no individual hits to process, so we shortcut
SearchHits hits = new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore());
context.fetchResult().shardResult(hits, null);
Expand All @@ -76,7 +76,7 @@ public void execute(SearchContext context) {
Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : Profilers.startProfilingFetchPhase();
SearchHits hits = null;
try {
hits = buildSearchHits(context, profiler);
hits = buildSearchHits(context, docIdsToLoad, profiler);
} finally {
// Always finish profiling
ProfileResult profileResult = profiler.finish();
Expand All @@ -97,7 +97,7 @@ public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
}
}

private SearchHits buildSearchHits(SearchContext context, Profiler profiler) {
private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Profiler profiler) {

FetchContext fetchContext = new FetchContext(context);
SourceLoader sourceLoader = context.newSourceLoader();
Expand Down Expand Up @@ -167,7 +167,7 @@ protected SearchHit nextDoc(int doc) throws IOException {
}
};

SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), context.docIdsToLoad());
SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad);

if (context.isCancelled()) {
throw new TaskCancelledException("cancelled");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,10 @@ private void hitExecute(Map<String, InnerHitsContext.InnerHitSubContext> innerHi
for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) {
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
}
innerHitsContext.docIdsToLoad(docIdsToLoad);
innerHitsContext.setRootId(hit.getId());
innerHitsContext.setRootLookup(rootSource);

fetchPhase.execute(innerHitsContext);
fetchPhase.execute(innerHitsContext, docIdsToLoad);
FetchSearchResult fetchResult = innerHitsContext.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -364,16 +364,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
in.seqNoAndPrimaryTerm(seqNoAndPrimaryTerm);
}

@Override
public int[] docIdsToLoad() {
return in.docIdsToLoad();
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
return in.docIdsToLoad(docIdsToLoad);
}

@Override
public DfsSearchResult dfsResult() {
return in.dfsResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -315,10 +315,6 @@ public Query rewrittenQuery() {
/** controls whether the sequence number and primary term of the last modification to each hit should be returned */
public abstract void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm);

public abstract int[] docIdsToLoad();

public abstract SearchContext docIdsToLoad(int[] docIdsToLoad);

public abstract DfsSearchResult dfsResult();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@ public class SubSearchContext extends FilteredSearchContext {
private final FetchSearchResult fetchSearchResult;
private final QuerySearchResult querySearchResult;

private int[] docIdsToLoad;

private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
Expand Down Expand Up @@ -279,17 +277,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm;
}

@Override
public int[] docIdsToLoad() {
return docIdsToLoad;
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
this.docIdsToLoad = docIdsToLoad;
return this;
}

@Override
public CollapseContext collapse() {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -485,16 +485,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {
throw new UnsupportedOperationException();
}

@Override
public int[] docIdsToLoad() {
throw new UnsupportedOperationException();
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
throw new UnsupportedOperationException();
}

@Override
public DfsSearchResult dfsResult() {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -457,16 +457,6 @@ public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) {

}

@Override
public int[] docIdsToLoad() {
return new int[0];
}

@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
return null;
}

@Override
public DfsSearchResult dfsResult() {
return null;
Expand Down

0 comments on commit e9b1d1b

Please sign in to comment.