Skip to content

Commit

Permalink
Add try catch to make sure all handlers are closed (#10627)
Browse files Browse the repository at this point in the history
* Add try catch to make sure all handlers are closed

* Handle exceptions while initializing writers

* Bumpversion of connectors

* bumpversion in seed

* Fix bigquery denormalized tests

* bumpversion seed of destination bigquery denormalized
  • Loading branch information
ChristopheDuong authored Feb 25, 2022
1 parent cf6bc0a commit 20642de
Show file tree
Hide file tree
Showing 16 changed files with 389 additions and 318 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
- name: BigQuery
destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
dockerRepository: airbyte/destination-bigquery
dockerImageTag: 0.6.9
dockerImageTag: 0.6.10
documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery
icon: bigquery.svg
- name: BigQuery (denormalized typed struct)
destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496
dockerRepository: airbyte/destination-bigquery-denormalized
dockerImageTag: 0.2.8
dockerImageTag: 0.2.9
documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery
icon: bigquery.svg
- name: Cassandra
Expand Down Expand Up @@ -60,7 +60,7 @@
- name: Google Cloud Storage (GCS)
destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a
dockerRepository: airbyte/destination-gcs
dockerImageTag: 0.1.23
dockerImageTag: 0.1.24
documentationUrl: https://docs.airbyte.io/integrations/destinations/gcs
icon: googlecloudstorage.svg
- name: Google Firestore
Expand Down Expand Up @@ -162,7 +162,7 @@
- name: Redshift
destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc
dockerRepository: airbyte/destination-redshift
dockerImageTag: 0.3.25
dockerImageTag: 0.3.26
documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift
icon: redshift.svg
- name: Rockset
Expand All @@ -173,7 +173,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
dockerImageTag: 0.2.8
dockerImageTag: 0.2.9
documentationUrl: https://docs.airbyte.io/integrations/destinations/s3
icon: s3.svg
- name: SFTP-JSON
Expand All @@ -185,7 +185,7 @@
- name: Snowflake
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
dockerRepository: airbyte/destination-snowflake
dockerImageTag: 0.4.15
dockerImageTag: 0.4.16
documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake
icon: snowflake.svg
resourceRequirements:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@
supportsDBT: false
supported_destination_sync_modes:
- "append"
- dockerImage: "airbyte/destination-bigquery:0.6.9"
- dockerImage: "airbyte/destination-bigquery:0.6.10"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery"
connectionSpecification:
Expand Down Expand Up @@ -383,7 +383,7 @@
- "overwrite"
- "append"
- "append_dedup"
- dockerImage: "airbyte/destination-bigquery-denormalized:0.2.8"
- dockerImage: "airbyte/destination-bigquery-denormalized:0.2.9"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery"
connectionSpecification:
Expand Down Expand Up @@ -1162,7 +1162,7 @@
- "overwrite"
- "append"
supportsNamespaces: true
- dockerImage: "airbyte/destination-gcs:0.1.23"
- dockerImage: "airbyte/destination-gcs:0.1.24"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/gcs"
connectionSpecification:
Expand Down Expand Up @@ -3272,7 +3272,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-redshift:0.3.25"
- dockerImage: "airbyte/destination-redshift:0.3.26"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift"
connectionSpecification:
Expand Down Expand Up @@ -3454,7 +3454,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.2.8"
- dockerImage: "airbyte/destination-s3:0.2.9"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
connectionSpecification:
Expand Down Expand Up @@ -3825,7 +3825,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-snowflake:0.4.15"
- dockerImage: "airbyte/destination-snowflake:0.4.16"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ FROM airbyte/integration-base-java:dev
WORKDIR /airbyte

ENV APPLICATION destination-bigquery-denormalized
ENV APPLICATION_VERSION 0.2.8
ENV APPLICATION_VERSION 0.2.9
ENV ENABLE_SENTRY true

COPY --from=build /airbyte /airbyte

LABEL io.airbyte.version=0.2.8
LABEL io.airbyte.version=0.2.9
LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class BigQueryDenormalizedGscDestinationAcceptanceTest extends DestinationAcceptanceTest {
public class BigQueryDenormalizedGcsDestinationAcceptanceTest extends DestinationAcceptanceTest {

private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDenormalizedGscDestinationAcceptanceTest.class);
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDenormalizedGcsDestinationAcceptanceTest.class);

private static final Path CREDENTIALS_PATH = Path.of("secrets/credentials.json");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import static io.airbyte.integrations.destination.bigquery.util.BigQueryDenormalizedTestDataUtils.getSchemaWithInvalidArrayType;
import static io.airbyte.integrations.destination.bigquery.util.BigQueryDenormalizedTestDataUtils.getSchemaWithReferenceDefinition;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.params.provider.Arguments.arguments;

import com.amazonaws.services.s3.AmazonS3;
Expand Down Expand Up @@ -71,12 +72,12 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

class BigQueryDenormalizedGscDestinationTest {
class BigQueryDenormalizedGcsDestinationTest {

private static final Path CREDENTIALS_PATH = Path.of("secrets/credentials.json");
private static final Set<String> AIRBYTE_METADATA_FIELDS = Set.of(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, JavaBaseConstants.COLUMN_NAME_AB_ID);

private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDenormalizedGscDestinationTest.class);
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDenormalizedGcsDestinationTest.class);

private static final String BIG_QUERY_CLIENT_CHUNK_SIZE = "big_query_client_buffer_size_mb";
private static final Instant NOW = Instant.now();
Expand Down Expand Up @@ -276,8 +277,11 @@ void testWriteWithFormat() throws Exception {
Field.of("updated_at", StandardSQLTypeName.TIMESTAMP),
Field.of(JavaBaseConstants.COLUMN_NAME_AB_ID, StandardSQLTypeName.STRING),
Field.of(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, StandardSQLTypeName.TIMESTAMP));
final Schema actualSchema = BigQueryUtils.getTableDefinition(bigquery, dataset.getDatasetId().getDataset(), USERS_STREAM_NAME).getSchema();

assertEquals(BigQueryUtils.getTableDefinition(bigquery, dataset.getDatasetId().getDataset(), USERS_STREAM_NAME).getSchema(), expectedSchema);
assertNotNull(actualSchema);
actualSchema.getFields().forEach(actualField -> assertEquals(expectedSchema.getFields().get(actualField.getName()),
Field.of(actualField.getName(), actualField.getType())));
}

@Test
Expand Down
Loading

0 comments on commit 20642de

Please sign in to comment.