diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/MigrateOrEvaluateArgs.java b/MetadataMigration/src/main/java/org/opensearch/migrations/MigrateOrEvaluateArgs.java index 4c51a1868..01653a428 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/MigrateOrEvaluateArgs.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/MigrateOrEvaluateArgs.java @@ -56,7 +56,7 @@ public class MigrateOrEvaluateArgs { public Version sourceVersion = null; @ParametersDelegate - public MetadataTransformerParams metadataTransformationParams = new MetadataTransformerParams(); + public TransformerParams metadataTransformationParams = new MetadataTransformerParams(); @Getter public static class MetadataTransformerParams implements TransformerParams { diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Evaluate.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Evaluate.java index 76a8197f4..b1c4724a1 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Evaluate.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Evaluate.java @@ -25,10 +25,7 @@ public EvaluateResult execute(RootMetadataMigrationContext context) { var clusters = createClusters(); evaluateResult.clusters(clusters); - var transformer = new CompositeTransformer( - getCustomTransformer(), - selectTransformer(clusters) - ); + var transformer = selectTransformer(clusters); var items = migrateAllItems(migrationMode, clusters, transformer, context); evaluateResult.items(items); diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Migrate.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Migrate.java index 0f4e4335a..76106c41d 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Migrate.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Migrate.java @@ -25,10 +25,7 @@ public MigrateResult execute(RootMetadataMigrationContext context) { var clusters = createClusters(); migrateResult.clusters(clusters); - var transformer = new CompositeTransformer( - getCustomTransformer(), - selectTransformer(clusters) - ); + var transformer = selectTransformer(clusters); var items = migrateAllItems(migrationMode, clusters, transformer, context); migrateResult.items(items); diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigratorEvaluatorBase.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigratorEvaluatorBase.java index ecc363ccc..8a2dd863f 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigratorEvaluatorBase.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigratorEvaluatorBase.java @@ -5,6 +5,7 @@ import org.opensearch.migrations.MigrateOrEvaluateArgs; import org.opensearch.migrations.MigrationMode; +import org.opensearch.migrations.bulkload.transformers.CompositeTransformer; import org.opensearch.migrations.bulkload.transformers.TransformFunctions; import org.opensearch.migrations.bulkload.transformers.Transformer; import org.opensearch.migrations.bulkload.transformers.TransformerToIJsonTransformerAdapter; @@ -59,7 +60,7 @@ protected Transformer getCustomTransformer() { log.atInfo().setMessage("Metadata Transformations config string: {}") .addArgument(transformerConfig).log(); } else { - log.atInfo().setMessage("Using Noop transformation config: {}") + log.atInfo().setMessage("Using Noop custom transformation config: {}") .addArgument(NOOP_TRANSFORMATION_CONFIG).log(); transformerConfig = NOOP_TRANSFORMATION_CONFIG; } @@ -68,13 +69,15 @@ protected Transformer getCustomTransformer() { } protected Transformer selectTransformer(Clusters clusters) { - var transformer = TransformFunctions.getTransformer( + var versionTransformer = TransformFunctions.getTransformer( clusters.getSource().getVersion(), clusters.getTarget().getVersion(), arguments.minNumberOfReplicas ); - log.atInfo().setMessage("Selected transformer: {}").addArgument(transformer).log(); - return transformer; + var customTransformer = getCustomTransformer(); + var compositeTransformer = new CompositeTransformer(customTransformer, versionTransformer); + log.atInfo().setMessage("Selected transformer: {}").addArgument(compositeTransformer).log(); + return compositeTransformer; } protected Items migrateAllItems(MigrationMode migrationMode, Clusters clusters, Transformer transformer, RootMetadataMigrationContext context) { diff --git a/MetadataMigration/src/main/resources/log4j2.properties b/MetadataMigration/src/main/resources/log4j2.properties index a55b999a2..ed2037018 100644 --- a/MetadataMigration/src/main/resources/log4j2.properties +++ b/MetadataMigration/src/main/resources/log4j2.properties @@ -3,40 +3,10 @@ status = WARN property.logsDir = ${env:SHARED_LOGS_DIR_PATH:-./logs} property.failedLoggerFileNamePrefix = ${logsDir}/${hostName}/failedRequests/failedRequests property.metadataTuplesFileNamePrefix = ${logsDir}/${hostName}/metadataTuples/tuples - -appenders = console, FailedRequests, MetadataRun, MetadataTuples - -appender.FailedRequests.type = RollingRandomAccessFile -appender.FailedRequests.name = FailedRequests -appender.FailedRequests.fileName = ${failedLoggerFileNamePrefix}.log -appender.FailedRequests.filePattern = ${failedLoggerFileNamePrefix}-%d{yyyy-MM-dd-HH:mm}{UTC}-%i.log.gz -appender.FailedRequests.layout.type = PatternLayout -appender.FailedRequests.layout.pattern = %m%n -appender.FailedRequests.policies.type = Policies -appender.FailedRequests.policies.size.type = SizeBasedTriggeringPolicy -appender.FailedRequests.policies.size.size = 10 MB -appender.FailedRequests.strategy.type = DefaultRolloverStrategy -appender.FailedRequests.immediateFlush = false - -logger.FailedRequestsLogger.name = FailedRequestsLogger -logger.FailedRequestsLogger.level = info -logger.FailedRequestsLogger.additivity = false -logger.FailedRequestsLogger.appenderRef.FailedRequests.ref = FailedRequests - property.runTime = ${date:yyyy-MM-dd_HH-mm-ss} -property.metadataRunLoggerFileNamePrefix = ${logsDir}/${hostName}/metadata/metadata_ - -appender.MetadataRun.type = File -appender.MetadataRun.name = MetadataRun -appender.MetadataRun.fileName = ${metadataRunLoggerFileNamePrefix}${runTime}.log -appender.MetadataRun.layout.type = PatternLayout -appender.MetadataRun.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n -appender.MetadataRun.immediateFlush = true +property.metadataRunLoggerFileNamePrefix = ${logsDir}/${hostName}/metadata/metadata -logger.MetadataLogger.name = MetadataLogger -logger.MetadataLogger.level = debug -logger.MetadataLogger.additivity = false -logger.MetadataLogger.appenderRef.MetadataRun.ref = MetadataRun +appenders = console, MetadataTuples, FailedRequests, MetadataRun appender.console.type = Console appender.console.name = Console @@ -44,24 +14,26 @@ appender.console.target = SYSTEM_OUT appender.console.layout.type = PatternLayout appender.console.layout.pattern = %m%n -rootLogger.level = info +rootLogger.level = ERROR rootLogger.appenderRef.console.ref = MetadataRun +# Metadata Migration logger.MetadataMigration.name = org.opensearch.migrations.MetadataMigration logger.MetadataMigration.level = info logger.MetadataMigration.additivity = false logger.MetadataMigration.appenderRef.stdout.ref = Console logger.MetadataMigration.appenderRef.MetadataRun.ref = MetadataRun +# Metadata Tuples appender.MetadataTuples.type = RollingRandomAccessFile appender.MetadataTuples.name = MetadataTuples appender.MetadataTuples.fileName = ${metadataTuplesFileNamePrefix}.log -appender.MetadataTuples.filePattern = ${metadataTuplesFileNamePrefix}-%d{yyyy-MM-dd-HH-mm}{UTC}-%i.log +appender.MetadataTuples.filePattern = ${metadataTuplesFileNamePrefix}_${runTime}-%i.log appender.MetadataTuples.layout.type = PatternLayout appender.MetadataTuples.layout.pattern = %m%n appender.MetadataTuples.policies.type = Policies -appender.MetadataTuples.policies.size.type = SizeBasedTriggeringPolicy -appender.MetadataTuples.policies.size.size = 10 MB +appender.MetadataTuples.policies.startup.type = OnStartupTriggeringPolicy +appender.MetadataTuples.policies.startup.minSize = 0 appender.MetadataTuples.strategy.type = DefaultRolloverStrategy appender.MetadataTuples.immediateFlush = false @@ -69,3 +41,34 @@ logger.OutputTransformationJsonLogger.name = OutputTransformationJsonLogger logger.OutputTransformationJsonLogger.level = info logger.OutputTransformationJsonLogger.additivity = false logger.OutputTransformationJsonLogger.appenderRef.MetadataTuples.ref = MetadataTuples + +# MetadataRun Logs +appender.MetadataRun.type = File +appender.MetadataRun.name = MetadataRun +appender.MetadataRun.fileName = ${metadataRunLoggerFileNamePrefix}${runTime}-%i.log +appender.MetadataRun.layout.type = PatternLayout +appender.MetadataRun.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n +appender.MetadataRun.immediateFlush = false + +logger.MetadataLogger.name = MetadataLogger +logger.MetadataLogger.level = debug +logger.MetadataLogger.additivity = false +logger.MetadataLogger.appenderRef.MetadataRun.ref = MetadataRun + +# Failed Requestss +appender.FailedRequests.type = RollingRandomAccessFile +appender.FailedRequests.name = FailedRequests +appender.FailedRequests.fileName = ${failedLoggerFileNamePrefix}.log +appender.FailedRequests.filePattern = ${failedLoggerFileNamePrefix}-%d{yyyy-MM-dd-HH:mm}{UTC}-%i.log.gz +appender.FailedRequests.layout.type = PatternLayout +appender.FailedRequests.layout.pattern = %m%n +appender.FailedRequests.policies.type = Policies +appender.FailedRequests.policies.size.type = SizeBasedTriggeringPolicy +appender.FailedRequests.policies.size.size = 10 MB +appender.FailedRequests.strategy.type = DefaultRolloverStrategy +appender.FailedRequests.immediateFlush = false + +logger.FailedRequestsLogger.name = FailedRequestsLogger +logger.FailedRequestsLogger.level = info +logger.FailedRequestsLogger.additivity = false +logger.FailedRequestsLogger.appenderRef.FailedRequests.ref = FailedRequests diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/CustomTransformationTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/CustomTransformationTest.java new file mode 100644 index 000000000..e3c90d22d --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/CustomTransformationTest.java @@ -0,0 +1,281 @@ +package org.opensearch.migrations; + +import java.io.File; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Stream; + +import org.opensearch.migrations.bulkload.SupportedClusters; +import org.opensearch.migrations.bulkload.common.FileSystemSnapshotCreator; +import org.opensearch.migrations.bulkload.common.OpenSearchClient; +import org.opensearch.migrations.bulkload.common.http.ConnectionContextTestParams; +import org.opensearch.migrations.bulkload.framework.SearchClusterContainer; +import org.opensearch.migrations.bulkload.http.ClusterOperations; +import org.opensearch.migrations.bulkload.models.DataFilterArgs; +import org.opensearch.migrations.bulkload.worker.SnapshotRunner; +import org.opensearch.migrations.commands.MigrationItemResult; +import org.opensearch.migrations.metadata.tracing.MetadataMigrationTestContext; +import org.opensearch.migrations.snapshot.creation.tracing.SnapshotTestContext; +import org.opensearch.migrations.transform.TransformerParams; + +import lombok.Builder; +import lombok.Data; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * Test class to verify custom transformations during metadata migrations. + */ +@Tag("isolatedTest") +@Slf4j +class CustomTransformationTest { + + @TempDir + private File localDirectory; + + private static Stream scenarios() { + // Define scenarios with different source and target cluster versions + return SupportedClusters.sources().stream() + .flatMap(sourceCluster -> + SupportedClusters.targets().stream() + .map(targetCluster -> Arguments.of(sourceCluster, targetCluster)) + ); + } + + @ParameterizedTest(name = "Custom Transformation From {0} to {1}") + @MethodSource(value = "scenarios") + void customTransformationMetadataMigration( + SearchClusterContainer.ContainerVersion sourceVersion, + SearchClusterContainer.ContainerVersion targetVersion) { + try ( + final var sourceCluster = new SearchClusterContainer(sourceVersion); + final var targetCluster = new SearchClusterContainer(targetVersion) + ) { + performCustomTransformationTest(sourceCluster, targetCluster); + } + } + + @SneakyThrows + private void performCustomTransformationTest( + final SearchClusterContainer sourceCluster, + final SearchClusterContainer targetCluster + ) { + // Start both source and target clusters asynchronously + CompletableFuture.allOf( + CompletableFuture.runAsync(sourceCluster::start), + CompletableFuture.runAsync(targetCluster::start) + ).join(); + + var sourceOperations = new ClusterOperations(sourceCluster.getUrl()); + var targetOperations = new ClusterOperations(targetCluster.getUrl()); + + // Test data + var originalIndexName = "test_index"; + var transformedIndexName = "transformed_index"; + var documentId = "1"; + var documentContent = "{\"field\":\"value\"}"; + + // Create index and add a document on the source cluster + sourceOperations.createIndex(originalIndexName); + sourceOperations.createDocument(originalIndexName, documentId, documentContent); + + // Create legacy template + var legacyTemplateName = "legacy_template"; + var legacyTemplatePattern = "legacy_*"; + sourceOperations.createLegacyTemplate(legacyTemplateName, legacyTemplatePattern); + + // Create index template + var indexTemplateName = "index_template"; + var indexTemplatePattern = "index*"; + + // Create component template + var componentTemplateName = "component_template"; + var componentTemplateMode = "mode_value"; // Replace with actual mode if applicable + boolean newComponentCompatible = sourceCluster.getContainerVersion().getVersion().getMajor() >= 7; + if (newComponentCompatible) { + sourceOperations.createIndexTemplate(indexTemplateName, "dummy", indexTemplatePattern); + + var componentTemplateAdditionalParam = "additional_param"; // Replace with actual param if applicable + sourceOperations.createComponentTemplate(componentTemplateName, indexTemplateName, componentTemplateAdditionalParam, "index*"); + } + + // Create index that matches the templates + var legacyIndexName = "legacy_index"; + var indexIndexName = "index_index"; + sourceOperations.createIndex(legacyIndexName); + sourceOperations.createIndex(indexIndexName); + + // Define custom transformations for index, legacy, and component templates + String customTransformationJson = "[\n" + + " {\n" + + " \"JsonConditionalTransformerProvider\": [\n" + + " {\"JsonJMESPathPredicateProvider\": { \"script\": \"name == 'test_index'\"}},\n" + + " [\n" + + " {\"JsonJoltTransformerProvider\": { \n" + + " \"script\": {\n" + + " \"operation\": \"modify-overwrite-beta\",\n" + + " \"spec\": {\n" + + " \"name\": \"transformed_index\"\n" + + " }\n" + + " } \n" + + " }}\n" + + " ]\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"JsonConditionalTransformerProvider\": [\n" + + " {\"JsonJMESPathPredicateProvider\": { \"script\": \"type == 'template' && name == 'legacy_template'\"}},\n" + + " [\n" + + " {\"JsonJoltTransformerProvider\": { \n" + + " \"script\": {\n" + + " \"operation\": \"modify-overwrite-beta\",\n" + + " \"spec\": {\n" + + " \"name\": \"transformed_legacy_template\"\n" + + " }\n" + + " } \n" + + " }}\n" + + " ]\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"JsonConditionalTransformerProvider\": [\n" + + " {\"JsonJMESPathPredicateProvider\": { \"script\": \"type == 'index_template' && name == 'index_template'\"}},\n" + + " [\n" + + " {\"JsonJoltTransformerProvider\": { \n" + + " \"script\": {\n" + + " \"operation\": \"modify-overwrite-beta\",\n" + + " \"spec\": {\n" + + " \"name\": \"transformed_index_template\",\n" + + " \"body\": {\n" + + " \"composed_of\": {\n" + + " \"[0]\": \"transformed_component_template\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }}\n" + + " ]\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"JsonConditionalTransformerProvider\": [\n" + + " {\"JsonJMESPathPredicateProvider\": { \"script\": \"type == 'component_template' && name == 'component_template'\"}},\n" + + " [\n" + + " {\"JsonJoltTransformerProvider\": { \n" + + " \"script\": {\n" + + " \"operation\": \"modify-overwrite-beta\",\n" + + " \"spec\": {\n" + + " \"name\": \"transformed_component_template\"\n" + + " }\n" + + " } \n" + + " }}\n" + + " ]\n" + + " ]\n" + + " }\n" + + "]"; + + var arguments = new MigrateOrEvaluateArgs(); + + // Use SnapshotImage as the transfer medium + var snapshotName = "custom_transformation_snap"; + var snapshotContext = SnapshotTestContext.factory().noOtelTracking(); + var sourceClient = new OpenSearchClient(ConnectionContextTestParams.builder() + .host(sourceCluster.getUrl()) + .insecure(true) + .build() + .toConnectionContext()); + var snapshotCreator = new FileSystemSnapshotCreator( + snapshotName, + sourceClient, + SearchClusterContainer.CLUSTER_SNAPSHOT_DIR, + List.of(), + snapshotContext.createSnapshotCreateContext() + ); + SnapshotRunner.runAndWaitForCompletion(snapshotCreator); + sourceCluster.copySnapshotData(localDirectory.toString()); + arguments.fileSystemRepoPath = localDirectory.getAbsolutePath(); + arguments.snapshotName = snapshotName; + arguments.sourceVersion = sourceCluster.getContainerVersion().getVersion(); + + arguments.targetArgs.host = targetCluster.getUrl(); + + // Set up data filters to include only the test index and templates + var dataFilterArgs = new DataFilterArgs(); + dataFilterArgs.indexAllowlist = List.of(originalIndexName, legacyIndexName, indexIndexName, transformedIndexName); + dataFilterArgs.indexTemplateAllowlist = List.of(indexTemplateName, legacyTemplateName, "transformed_legacy_template", "transformed_index_template"); + dataFilterArgs.componentTemplateAllowlist = List.of(componentTemplateName, "transformed_component_template"); + arguments.dataFilterArgs = dataFilterArgs; + + // Specify the custom transformer configuration + arguments.metadataTransformationParams = TestTransformationParams.builder() + .transformerConfig(customTransformationJson) + .build(); + + // Execute the migration with the custom transformation + var metadataContext = MetadataMigrationTestContext.factory().noOtelTracking(); + var metadata = new MetadataMigration(); + + MigrationItemResult result = metadata.migrate(arguments).execute(metadataContext); + + // Verify the migration result + log.info(result.asCliOutput()); + assertThat(result.getExitCode(), equalTo(0)); + + // Verify that the transformed index exists on the target cluster + var res = targetOperations.get("/" + transformedIndexName); + assertThat(res.getKey(), equalTo(200)); + assertThat(res.getValue(), containsString(transformedIndexName)); + + // Verify that the original index does not exist on the target cluster + res = targetOperations.get("/" + originalIndexName); + assertThat(res.getKey(), equalTo(404)); + + // Verify that the transformed legacy template exists on the target cluster + res = targetOperations.get("/_template/transformed_legacy_template"); + assertThat(res.getKey(), equalTo(200)); + assertThat(res.getValue(), containsString("transformed_legacy_template")); + + // Verify that the original legacy template does not exist on the target cluster + res = targetOperations.get("/_template/" + legacyTemplateName); + assertThat(res.getKey(), equalTo(404)); + + if (newComponentCompatible) { + // Verify that the transformed index template exists on the target cluster + res = targetOperations.get("/_index_template/transformed_index_template"); + assertThat(res.getKey(), equalTo(200)); + assertThat(res.getValue(), containsString("transformed_index_template")); + + // Verify that the original index template does not exist on the target cluster + res = targetOperations.get("/_index_template/" + indexTemplateName); + assertThat(res.getKey(), equalTo(404)); + + // Verify that the transformed component template exists on the target cluster + res = targetOperations.get("/_component_template/transformed_component_template"); + assertThat(res.getKey(), equalTo(200)); + assertThat(res.getValue(), containsString("transformed_component_template")); + + // Verify that the original component template does not exist on the target cluster + res = targetOperations.get("/_component_template/" + componentTemplateName); + assertThat(res.getKey(), equalTo(404)); + } + } + + @Data + @Builder + private static class TestTransformationParams implements TransformerParams { + @Builder.Default + private String transformerConfigParameterArgPrefix = ""; + private String transformerConfigEncoded; + private String transformerConfig; + private String transformerConfigFile; + } +} diff --git a/RFS/src/main/java/org/opensearch/migrations/bulkload/worker/IndexRunner.java b/RFS/src/main/java/org/opensearch/migrations/bulkload/worker/IndexRunner.java index 7592dbd80..ddd9a0d85 100644 --- a/RFS/src/main/java/org/opensearch/migrations/bulkload/worker/IndexRunner.java +++ b/RFS/src/main/java/org/opensearch/migrations/bulkload/worker/IndexRunner.java @@ -36,17 +36,14 @@ public IndexMetadataResults migrateIndices(MigrationMode mode, ICreateIndexConte }; var results = IndexMetadataResults.builder(); - // log out filtered items + // Set results for filtered items repoDataProvider.getIndicesInSnapshot(snapshotName) .stream() .filter(Predicate.not(FilterScheme.filterIndicesByAllowList(indexAllowlist, logger))) - .forEach(index -> { - var indexMetadata = metadataFactory.fromRepo(snapshotName, index.getName()); - log.atInfo().setMessage("{ \"before\": {},\n\"after\":{}}") - .addArgument(indexMetadata) - .addArgument("Removed due to index filter") - .log(); - }); + .forEach(index -> results.index(CreationResult.builder() + .name(index.getName()) + .failureType(CreationFailureType.SKIPPED_DUE_TO_FILTER) + .build())); repoDataProvider.getIndicesInSnapshot(snapshotName) @@ -60,10 +57,6 @@ public IndexMetadataResults migrateIndices(MigrationMode mode, ICreateIndexConte var indexMetadata = originalIndexMetadata.deepCopy(); try { indexMetadata = transformer.transformIndexMetadata(indexMetadata); - log.atInfo().setMessage("{ \"before\": {},\n\"after\":{}}") - .addArgument(originalIndexMetadata) - .addArgument(indexMetadata) - .log(); indexResult = indexCreator.create(indexMetadata, mode, context); } catch (Throwable t) { indexResult = CreationResult.builder() diff --git a/RFS/src/main/java/org/opensearch/migrations/metadata/CreationResult.java b/RFS/src/main/java/org/opensearch/migrations/metadata/CreationResult.java index 08e2ae2ed..243347d42 100644 --- a/RFS/src/main/java/org/opensearch/migrations/metadata/CreationResult.java +++ b/RFS/src/main/java/org/opensearch/migrations/metadata/CreationResult.java @@ -29,7 +29,8 @@ public boolean wasFatal() { public static enum CreationFailureType { ALREADY_EXISTS(false, "already exists"), UNABLE_TO_TRANSFORM_FAILURE(true, "failed to transform to the target version"), - TARGET_CLUSTER_FAILURE(true, "failed on target cluster"); + TARGET_CLUSTER_FAILURE(true, "failed on target cluster"), + SKIPPED_DUE_TO_FILTER(false, "skipped due to filter"); private final boolean fatal; private final String message;