Skip to content

Commit

Permalink
Never wrap searcher for internal engine operations
Browse files Browse the repository at this point in the history
there is no need to wrap the searcher for internal operations like version
checks for indexing. In-fact this can even lead to crazy bugs if the wrapper
filters documents that are updated etc. such that we end up with either duplicated docs
if create is used or we update wrong version under certain conditions.
  • Loading branch information
s1monw committed Oct 12, 2015
1 parent 7a01277 commit fe7637d
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 4 deletions.
11 changes: 8 additions & 3 deletions core/src/main/java/org/elasticsearch/index/engine/Engine.java
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,10 @@ final protected GetResult getFromSearcher(Get get) throws EngineException {
* @see Searcher#close()
*/
public final Searcher acquireSearcher(String source) throws EngineException {
return acquireSearcher(source, true);
}

protected final Searcher acquireSearcher(String source, boolean maybeWrap) throws EngineException {
boolean success = false;
/* Acquire order here is store -> manager since we need
* to make sure that the store is not closed before
Expand All @@ -296,8 +300,9 @@ public final Searcher acquireSearcher(String source) throws EngineException {
final IndexSearcher searcher = manager.acquire();
try {
final Searcher retVal = newSearcher(source, searcher, manager);
final Searcher wrappedSearcher = maybeWrap ? config().getWrappingService().wrap(engineConfig, retVal) : retVal;
success = true;
return config().getWrappingService().wrap(engineConfig, retVal);
return wrappedSearcher;
} finally {
if (!success) {
manager.release(searcher);
Expand Down Expand Up @@ -356,7 +361,7 @@ protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManage
*/
public final SegmentsStats segmentsStats() {
ensureOpen();
try (final Searcher searcher = acquireSearcher("segments_stats")) {
try (final Searcher searcher = acquireSearcher("segments_stats", false)) {
SegmentsStats stats = new SegmentsStats();
for (LeafReaderContext reader : searcher.reader().leaves()) {
final SegmentReader segmentReader = segmentReader(reader.reader());
Expand Down Expand Up @@ -387,7 +392,7 @@ protected Segment[] getSegmentInfo(SegmentInfos lastCommittedSegmentInfos, boole
Map<String, Segment> segments = new HashMap<>();

// first, go over and compute the search ones...
Searcher searcher = acquireSearcher("segments");
Searcher searcher = acquireSearcher("segments", false);
try {
for (LeafReaderContext reader : searcher.reader().leaves()) {
SegmentCommitInfo info = segmentReader(reader.reader()).getSegmentInfo();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1023,7 +1023,7 @@ private Object dirtyLock(Term uid) {
}

private long loadCurrentVersionFromIndex(Term uid) throws IOException {
try (final Searcher searcher = acquireSearcher("load_version")) {
try (final Searcher searcher = acquireSearcher("load_version", false)) {
return Versions.loadVersion(searcher.reader(), uid);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -529,6 +529,52 @@ public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) thr
IOUtils.close(store, engine);
}

@Test
public void testInternalSearcherNotWrapped() throws Exception {
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {

@Override
public DirectoryReader wrap(DirectoryReader reader) {
throw new IllegalStateException("don't wrap internal ops");
}

@Override
public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException {
throw new IllegalStateException("don't wrap internal ops");
}
};
Store store = createStore();
Path translog = createTempDir("wrapper-test");
InternalEngine engine = createEngine(store, translog, wrapper);
try {
Engine.Searcher searcher = engine.acquireSearcher("test");
fail("wait what?");
} catch (EngineException ex) {
// all well
}

// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));

engine.flush();
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
engine.index(new Engine.Index(newUid("1"), doc));

List<Segment> segments = engine.segments(randomBoolean());
assertTrue(segments.size() >= 1);
SegmentsStats segmentsStats = engine.segmentsStats();
assertTrue(segmentsStats.getCount() >= 1);

IOUtils.close(store, engine);
}

@Test
/* */
public void testConcurrentGetAndFlush() throws Exception {
Expand Down

0 comments on commit fe7637d

Please sign in to comment.