diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json index b90ab00a62c1c..acf0a7d031470 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json @@ -2,7 +2,7 @@ "destinationDefinitionId": "424892c4-daac-4491-b35d-c6688ba547ba", "name": "Snowflake", "dockerRepository": "airbyte/destination-snowflake", - "dockerImageTag": "0.4.2", + "dockerImageTag": "0.4.3", "documentationUrl": "https://docs.airbyte.io/integrations/destinations/snowflake", "icon": "snowflake.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 6f120e18eca22..712996128cd71 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -179,7 +179,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.2 + dockerImageTag: 0.4.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg - name: MariaDB ColumnStore diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 6f9cb6856d36c..dd34e8ef899b4 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3786,7 +3786,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.2" +- dockerImage: "airbyte/destination-snowflake:0.4.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: @@ -3974,6 +3974,14 @@ \ memory requirement. Modify this with care." title: "Stream Part Size" order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Whether to delete the staging files from S3 after completing\ + \ the sync. See the docs for details. Only relevant for COPY. Defaults\ + \ to true." + default: true + order: 6 - title: "GCS Staging" additionalProperties: false description: "Writes large batches of records to a file, uploads the file\ diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java index bfce8529dfceb..6b13de6f73c5f 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java @@ -22,4 +22,9 @@ public static boolean shouldPurgeStagingData(final JsonNode config) { } } + public static S3CopyConfig getS3CopyConfig(final JsonNode config) { + return new S3CopyConfig(S3CopyConfig.shouldPurgeStagingData(config), + S3DestinationConfig.getS3DestinationConfig(config)); + } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java index 5f7aef024cbd3..e6a2988b7b66a 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.jdbc.copy.s3; import com.amazonaws.services.s3.AmazonS3; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; @@ -201,6 +202,16 @@ protected static String getFullS3Path(final String s3BucketName, final String s3 return String.join("/", "s3:/", s3BucketName, s3StagingFile); } + @VisibleForTesting + public String getTmpTableName() { + return tmpTableName; + } + + @VisibleForTesting + public Map getStagingWritersByFile() { + return stagingWritersByFile; + } + public abstract void copyS3CsvFileIntoTable(JdbcDatabase database, String s3FileLocation, String schema, diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java index 1b5249d0ed7fa..8b8212bb8cf7e 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java @@ -43,7 +43,7 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config, getDatabase(config), getSqlOperations(), getNameTransformer(), - new S3CopyConfig(S3CopyConfig.shouldPurgeStagingData(config), getS3DestinationConfig(config)), + S3CopyConfig.getS3CopyConfig(config), catalog, new RedshiftStreamCopierFactory(), getConfiguredSchema(config)); diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java index c296ddf1d1265..bd6e878d36835 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java @@ -110,11 +110,6 @@ public void removeFileAndDropTmpTable() throws Exception { } } - @VisibleForTesting - String getTmpTableName() { - return tmpTableName; - } - /** * Creates the contents of a manifest file given the `s3StagingFiles`. There must be at least one * entry in a manifest file otherwise it is not considered valid for the COPY command. diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index f854615fb1c72..b0e293930d9cc 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -18,5 +18,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.4.2 +LABEL io.airbyte.version=0.4.3 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/README.md b/airbyte-integrations/connectors/destination-snowflake/README.md index b21bda7163d7c..e48eaa79fc85f 100644 --- a/airbyte-integrations/connectors/destination-snowflake/README.md +++ b/airbyte-integrations/connectors/destination-snowflake/README.md @@ -22,7 +22,7 @@ Put the contents of the `Snowflake Integration Test Config` secret on Rippling under the `Engineering` folder into `secrets/config.json` to be able to run integration tests locally. 1. Put the contents of the `destination snowflake - insert test creds` LastPass secret into `secrets/insert_config.json`. -1. Put the contents of the `destination snowflake - insert staging test creds` secret into `insert_staging_config.json`. +1. Put the contents of the `destination snowflake - insert staging test creds` secret into `internal_staging_config.json`. 1. Put the contents of the `destination snowflake - gcs copy test creds` secret into `secrets/copy_gcs_config.json` 1. Put the contents of the `destination snowflake - s3 copy test creds` secret into `secrets/copy_s3_config.json` diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyS3Destination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyS3Destination.java index 0e58e705c6690..53fd764e1227a 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyS3Destination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyS3Destination.java @@ -11,6 +11,7 @@ import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; import io.airbyte.integrations.destination.s3.S3Destination; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.protocol.models.AirbyteMessage; @@ -28,7 +29,7 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config, getDatabase(config), getSqlOperations(), getNameTransformer(), - getS3DestinationConfig(config), + S3CopyConfig.getS3CopyConfig(config.get("loading_method")), catalog, new SnowflakeS3StreamCopierFactory(), getConfiguredSchema(config)); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java index e2f886c111527..d25e00a7675c7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java @@ -5,26 +5,65 @@ package io.airbyte.integrations.destination.snowflake; import com.amazonaws.services.s3.AmazonS3; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; -import io.airbyte.integrations.destination.jdbc.copy.s3.LegacyS3StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier; import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; -public class SnowflakeS3StreamCopier extends LegacyS3StreamCopier { +public class SnowflakeS3StreamCopier extends S3StreamCopier { + + // From https://docs.aws.amazon.com/redshift/latest/dg/t_loading-tables-from-s3.html + // "Split your load data files so that the files are about equal size, between 1 MB and 1 GB after + // compression" + public static final int MAX_PARTS_PER_FILE = 4; public SnowflakeS3StreamCopier(final String stagingFolder, - final DestinationSyncMode destSyncMode, final String schema, - final String streamName, final AmazonS3 client, final JdbcDatabase db, - final S3DestinationConfig s3Config, + final S3CopyConfig config, final ExtendedNameTransformer nameTransformer, - final SqlOperations sqlOperations) { - super(stagingFolder, destSyncMode, schema, streamName, client, db, s3Config, nameTransformer, sqlOperations); + final SqlOperations sqlOperations, + final ConfiguredAirbyteStream configuredAirbyteStream) { + this( + stagingFolder, + schema, + client, + db, + config, + nameTransformer, + sqlOperations, + Timestamp.from(Instant.now()), + configuredAirbyteStream); + } + + @VisibleForTesting + SnowflakeS3StreamCopier(final String stagingFolder, + final String schema, + final AmazonS3 client, + final JdbcDatabase db, + final S3CopyConfig config, + final ExtendedNameTransformer nameTransformer, + final SqlOperations sqlOperations, + final Timestamp uploadTime, + final ConfiguredAirbyteStream configuredAirbyteStream) { + super(stagingFolder, + schema, + client, + db, + config, + nameTransformer, + sqlOperations, + configuredAirbyteStream, + uploadTime, + MAX_PARTS_PER_FILE); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java index d2d9139af5182..52d203e660a34 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java @@ -9,24 +9,24 @@ import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.integrations.destination.jdbc.copy.s3.LegacyS3StreamCopierFactory; -import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopierFactory; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; -public class SnowflakeS3StreamCopierFactory extends LegacyS3StreamCopierFactory { +public class SnowflakeS3StreamCopierFactory extends S3StreamCopierFactory { @Override - public StreamCopier create(final String stagingFolder, - final DestinationSyncMode syncMode, - final String schema, - final String streamName, - final AmazonS3 s3Client, - final JdbcDatabase db, - final S3DestinationConfig s3Config, - final ExtendedNameTransformer nameTransformer, - final SqlOperations sqlOperations) + protected StreamCopier create(final String stagingFolder, + final String schema, + final AmazonS3 s3Client, + final JdbcDatabase db, + final S3CopyConfig config, + final ExtendedNameTransformer nameTransformer, + final SqlOperations sqlOperations, + final ConfiguredAirbyteStream configuredStream) throws Exception { - return new SnowflakeS3StreamCopier(stagingFolder, syncMode, schema, streamName, s3Client, db, s3Config, nameTransformer, sqlOperations); + return new SnowflakeS3StreamCopier(stagingFolder, schema, s3Client, db, config, nameTransformer, + sqlOperations, configuredStream); } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json index de555da19b948..914aee0d1aac1 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json @@ -178,6 +178,13 @@ "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", "title": "Stream Part Size", "order": 5 + }, + "purge_staging_data": { + "title": "Purge Staging Files and Tables", + "type": "boolean", + "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", + "default": true, + "order": 6 } } }, diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java new file mode 100644 index 0000000000000..77913d82e6fa0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake; + +import static io.airbyte.integrations.destination.snowflake.SnowflakeS3StreamCopier.MAX_PARTS_PER_FILE; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.amazonaws.services.s3.AmazonS3Client; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.sql.Timestamp; +import java.time.Instant; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class SnowflakeS3StreamCopierTest { + + private static final int PART_SIZE = 5; + + // equivalent to Thu, 09 Dec 2021 19:17:54 GMT + private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); + + private AmazonS3Client s3Client; + private JdbcDatabase db; + private SqlOperations sqlOperations; + private SnowflakeS3StreamCopier copier; + + @BeforeEach + public void setup() { + s3Client = mock(AmazonS3Client.class, RETURNS_DEEP_STUBS); + db = mock(JdbcDatabase.class); + sqlOperations = mock(SqlOperations.class); + + copier = new SnowflakeS3StreamCopier( + // In reality, this is normally a UUID - see CopyConsumerFactory#createWriteConfigs + "fake-staging-folder", + "fake-schema", + s3Client, + db, + new S3CopyConfig( + true, + new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + PART_SIZE, + null)), + new ExtendedNameTransformer(), + sqlOperations, + UPLOAD_TIME, + new ConfiguredAirbyteStream() + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withName("fake-stream") + .withNamespace("fake-namespace"))); + } + + @Test + public void copiesCorrectFilesToTable() throws Exception { + // Generate two files + for (int i = 0; i < MAX_PARTS_PER_FILE + 1; i++) { + copier.prepareStagingFile(); + } + + copier.copyStagingFileToTemporaryTable(); + + for (String fileName : copier.getStagingWritersByFile().keySet()) { + verify(db).execute(String.format("COPY INTO fake-schema.%s FROM " + + "'s3://fake-bucket/%s'" + + " CREDENTIALS=(aws_key_id='fake-access-key-id' aws_secret_key='fake-secret-access-key') " + + "file_format = (type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '\"');", + copier.getTmpTableName(), fileName)); + } + + } + +} diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index 8c9885b71a862..fff0a73cf611b 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -160,6 +160,25 @@ Internal named stages are storage location objects within a Snowflake database/s For AWS S3, you will need to create a bucket and provide credentials to access the bucket. We recommend creating a bucket that is only used for Airbyte to stage data to Snowflake. Airbyte needs read/write access to interact with this bucket. +Provide the required S3 info. + +* **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. +* **S3 Bucket Region** + * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. +* **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. +* **Secret Access Key** + * Corresponding key to the above key id. +* **Part Size** + * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. + +Optional parameters: +* **Purge Staging Data** + * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. + + ### Google Cloud Storage \(GCS\) First you will need to create a GCS bucket. @@ -198,6 +217,7 @@ Finally, you need to add read/write permissions to your bucket with that email. | Version | Date | Pull Request | Subject | |:--------|:-----------| :----- | :------ | +| 0.4.3 | 2022-01-20 | [#9531](https://github.com/airbytehq/airbyte/pull/9531) | Start using new S3StreamCopier and expose the purgeStagingData option | | 0.4.2 | 2022-01-10 | [#9141](https://github.com/airbytehq/airbyte/pull/9141) | Fixed duplicate rows on retries | | 0.4.1 | 2021-01-06 | [#9311](https://github.com/airbytehq/airbyte/pull/9311) | Update сreating schema during check | | 0.4.0 | 2021-12-27 | [#9063](https://github.com/airbytehq/airbyte/pull/9063) | Updated normalization to produce permanent tables |