Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Continue registering pipelines after one pipeline parse failure. #28752

Merged
merged 11 commits into from
Mar 8, 2018
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;

import static org.hamcrest.Matchers.equalTo;
Expand Down Expand Up @@ -64,6 +65,58 @@ protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
}
}

public void testScriptDisabled() throws Exception {
String pipelineIdWithoutScript = randomAlphaOfLengthBetween(5, 10);
String pipelineIdWithScript = pipelineIdWithoutScript + "_script";
internalCluster().startNode();

BytesReference pipelineWithScript = new BytesArray("{\n" +
" \"processors\" : [\n" +
" {\"script\" : {\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"my_script\"}}\n" +
" ]\n" +
"}");
BytesReference pipelineWithoutScript = new BytesArray("{\n" +
" \"processors\" : [\n" +
" {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" +
" ]\n" +
"}");

Consumer<String> checkPipelineExists = (id) -> assertThat(client().admin().cluster().prepareGetPipeline(id)
.get().pipelines().get(0).getId(), equalTo(id));

client().admin().cluster().preparePutPipeline(pipelineIdWithScript, pipelineWithScript, XContentType.JSON).get();
client().admin().cluster().preparePutPipeline(pipelineIdWithoutScript, pipelineWithoutScript, XContentType.JSON).get();

checkPipelineExists.accept(pipelineIdWithScript);
checkPipelineExists.accept(pipelineIdWithoutScript);

internalCluster().stopCurrentMasterNode();
internalCluster().startNode(Settings.builder().put("script.allowed_types", "none"));

checkPipelineExists.accept(pipelineIdWithoutScript);
checkPipelineExists.accept(pipelineIdWithScript);

client().prepareIndex("index", "doc", "1")
.setSource("x", 0)
.setPipeline(pipelineIdWithoutScript)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();

IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> client().prepareIndex("index", "doc", "2")
.setSource("x", 0)
.setPipeline(pipelineIdWithScript)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get());
assertThat(exception.getMessage(),
equalTo("pipeline with id [" + pipelineIdWithScript + "] was not parsed successfully." +
" java.lang.IllegalArgumentException: cannot execute [inline] scripts"));

Map<String, Object> source = client().prepareGet("index", "doc", "1").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
}

public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception {
internalCluster().startNode();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,13 @@
import java.util.List;
import java.util.Map;

import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.plugins.IngestPlugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;

import static org.elasticsearch.common.settings.Setting.Property;

/**
* Holder class for several ingest related services.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,9 @@ private Pipeline getPipeline(String pipelineId) {
Pipeline pipeline = store.get(pipelineId);
if (pipeline == null) {
throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist");
} else if (pipeline.getId().equals("invalid_" + pipelineId)) {
throw new IllegalArgumentException(
"pipeline with id [" + pipelineId + "] was not parsed successfully. " + pipeline.getDescription());
}
return pipeline;
}
Expand Down
10 changes: 8 additions & 2 deletions server/src/main/java/org/elasticsearch/ingest/PipelineStore.java
Original file line number Diff line number Diff line change
Expand Up @@ -81,16 +81,22 @@ void innerUpdatePipelines(ClusterState previousState, ClusterState state) {
}

Map<String, Pipeline> pipelines = new HashMap<>();
ArrayList<ElasticsearchParseException> exceptions = new ArrayList<>();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

s/ArrayList/List ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

updated

for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) {
try {
pipelines.put(pipeline.getId(), factory.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories));
} catch (ElasticsearchParseException e) {
throw e;
pipelines.put(pipeline.getId(), new Pipeline("invalid_" + pipeline.getId(), e.getMessage(),
Copy link
Member

@martijnvg martijnvg Feb 22, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what if we create a pipeline that throws the exception for us (fail processor is in a ingest-common, but it very simple to have that behaviour here too):

pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e));

private Pipeline substitutePipeline(String id, ElasticsearchParseException e) {
        String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null;
        String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown";
        String errorMessage = "pipeline with id [" + id + "] could not be loaded, caused by [" + e.getDetailedMessage() + "]";
        Processor failureProcessor = new AbstractProcessor(tag) {
            @Override
            public void execute(IngestDocument ingestDocument) throws Exception {
                throw new IllegalStateException(errorMessage);
            }

            @Override
            public String getType() {
                return type;
            }
        };
        String description = "this is a place holder pipeline, because pipeline with id [" +  id + "] could not be loaded";
        return new Pipeline(id, description, null, new CompoundProcessor(failureProcessor));
    }

This way the if statement added PipelineExecutionService.java is not needed and this failure to load logic is in one place.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note, I updated the above comment.

Also I think it is important to throw an IllegalStateException instead of a IllegalArgumentException, because the fact that we failed to load the pipeline at this stage is a real issue and this is the best way we can deal with it?

Copy link
Contributor Author

@talevy talevy Mar 6, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem with this approach is that we will be put these substituted pipelines in PUT requests.

this will return true: https://github.com/elastic/elasticsearch/pull/28752/files#diff-fd6036d9f6ce14fecb8751a3adb9fc59R280 (testPutWithPipelineFactoryError)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hmm. nevermind.. for some reason I saw that test fail. I might have done something wrong or a different test failed

null, new CompoundProcessor()));
exceptions.add(e);
} catch (Exception e) {
throw new ElasticsearchParseException("Error updating pipeline with id [" + pipeline.getId() + "]", e);
pipelines.put(pipeline.getId(), new Pipeline("invalid_" + pipeline.getId(), e.getMessage(),
null, new CompoundProcessor()));
exceptions.add(new ElasticsearchParseException("Error updating pipeline with id [" + pipeline.getId() + "]", e));
}
}
this.pipelines = Collections.unmodifiableMap(pipelines);
ExceptionsHelper.rethrowAndSuppress(exceptions);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,10 @@ public void testBulkWithIngestFailures() throws Exception {
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
}

// cleanup
WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get();
assertTrue(deletePipelineResponse.isAcknowledged());
}

public void testBulkWithUpsert() throws Exception {
Expand Down Expand Up @@ -271,5 +275,8 @@ public void testPutWithPipelineFactoryError() throws Exception {
assertNotNull(ex);
assertThat(ex.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
}

GetPipelineResponse response = client().admin().cluster().prepareGetPipeline("_id").get();
assertFalse(response.isFound());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,10 @@ public void testFailStartNode() throws Exception {
installPlugin = false;
String node2 = internalCluster().startNode();
pipeline = internalCluster().getInstance(NodeService.class, node2).getIngestService().getPipelineStore().get("_id");
assertThat(pipeline, nullValue());

assertNotNull(pipeline);
assertThat(pipeline.getId(), equalTo("invalid__id"));
assertThat(pipeline.getDescription(), equalTo("No processor type exists with name [test]"));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,24 @@ public void testExecuteIndexPipelineDoesNotExist() {
verify(completionHandler, never()).accept(anyBoolean());
}

public void testExecuteIndexPipelineExistsButFailedParsing() {
when(store.get("_id")).thenReturn(new Pipeline("invalid__id", "error", null, new CompoundProcessor()));
IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
@SuppressWarnings("unchecked")
Consumer<Exception> failureHandler = mock(Consumer.class);
@SuppressWarnings("unchecked")
Consumer<Boolean> completionHandler = mock(Consumer.class);
try {
executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler);
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
equalTo("pipeline with id [_id] was not parsed successfully. error"));
}
verify(failureHandler, never()).accept(any(Exception.class));
verify(completionHandler, never()).accept(anyBoolean());
}

public void testExecuteBulkPipelineDoesNotExist() {
CompoundProcessor processor = mock(CompoundProcessor.class);
when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,9 @@ public void testPutWithErrorResponse() {
assertThat(e.getMessage(), equalTo("[processors] required property is missing"));
}
pipeline = store.get(id);
assertThat(pipeline, nullValue());
assertNotNull(pipeline);
assertThat(pipeline.getId(), equalTo("invalid__id"));
assertThat(pipeline.getDescription(), equalTo("[processors] required property is missing"));
}

public void testDelete() {
Expand Down