Skip to content

Commit

Permalink
Destination-S3: Remove configurable part size (#13043)
Browse files Browse the repository at this point in the history
* Destination-S3: Remove configurable part size

* Changelog updated

* auto-bump connector version

Co-authored-by: Octavia Squidington III <[email protected]>
  • Loading branch information
2 people authored and suhomud committed May 23, 2022
1 parent 82d2d3d commit 9c56202
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
dockerImageTag: 0.3.5
dockerImageTag: 0.3.6
documentationUrl: https://docs.airbyte.io/integrations/destinations/s3
icon: s3.svg
resourceRequirements:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3802,7 +3802,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.3.5"
- dockerImage: "airbyte/destination-s3:0.3.6"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
connectionSpecification:
Expand Down Expand Up @@ -4000,17 +4000,6 @@
- "snappy"
default: "snappy"
order: 1
part_size_mb:
title: "Block Size (MB) for Amazon S3 multipart upload (Optional)"
description: "This is the size of a \"Part\" being buffered in memory.\
\ It limits the memory usage when writing. Larger values will allow\
\ to upload a bigger files and improve the speed, but consumes9\
\ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
type: "integer"
default: 5
examples:
- 5
order: 2
- title: "CSV: Comma-Separated Values"
required:
- "format_type"
Expand All @@ -4031,16 +4020,6 @@
enum:
- "No flattening"
- "Root level flattening"
part_size_mb:
title: "Block Size (MB) For Amazon S3 Multipart Upload (Optional)"
description: "This is the size of a \"Part\" being buffered in memory.\
\ It limits the memory usage when writing. Larger values will allow\
\ to upload a bigger files and improve the speed, but consumes9\
\ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
type: "integer"
default: 5
examples:
- 5
compression:
title: "Compression"
type: "object"
Expand Down Expand Up @@ -4076,16 +4055,6 @@
enum:
- "JSONL"
default: "JSONL"
part_size_mb:
title: "Block Size (MB) For Amazon S3 Multipart Upload (Optional)"
description: "This is the size of a \"Part\" being buffered in memory.\
\ It limits the memory usage when writing. Larger values will allow\
\ to upload a bigger files and improve the speed, but consumes9\
\ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
type: "integer"
default: 5
examples:
- 5
compression:
title: "Compression"
type: "object"
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/destination-s3/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,5 @@ ENV APPLICATION destination-s3

COPY --from=build /airbyte /airbyte

LABEL io.airbyte.version=0.3.5
LABEL io.airbyte.version=0.3.6
LABEL io.airbyte.name=airbyte/destination-s3
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public class S3DestinationConfig {
private final String bucketRegion;
private final String pathFormat;
private final S3CredentialConfig credentialConfig;
@Deprecated
private final Integer partSize;
private final S3FormatConfig formatConfig;

Expand Down Expand Up @@ -204,13 +205,12 @@ public boolean equals(final Object o) {
return Objects.equals(endpoint, that.endpoint) && Objects.equals(bucketName, that.bucketName) && Objects.equals(
bucketPath, that.bucketPath) && Objects.equals(bucketRegion, that.bucketRegion)
&& Objects.equals(credentialConfig, that.credentialConfig)
&& Objects.equals(partSize, that.partSize)
&& Objects.equals(formatConfig, that.formatConfig);
}

@Override
public int hashCode() {
return Objects.hash(endpoint, bucketName, bucketPath, bucketRegion, credentialConfig, partSize, formatConfig);
return Objects.hash(endpoint, bucketName, bucketPath, bucketRegion, credentialConfig, formatConfig);
}

public static class Builder {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ public String getValue() {
}

private final Flattening flattening;
@Deprecated
private final Long partSize;
private final CompressionType compressionType;

Expand Down Expand Up @@ -112,13 +113,12 @@ public boolean equals(final Object o) {
}
final S3CsvFormatConfig that = (S3CsvFormatConfig) o;
return flattening == that.flattening
&& Objects.equals(partSize, that.partSize)
&& Objects.equals(compressionType, that.compressionType);
}

@Override
public int hashCode() {
return Objects.hash(flattening, partSize, compressionType);
return Objects.hash(flattening, compressionType);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -200,14 +200,6 @@
}
],
"order": 1
},
"part_size_mb": {
"title": "Block Size (MB) for Amazon S3 multipart upload (Optional)",
"description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.",
"type": "integer",
"default": 5,
"examples": [5],
"order": 2
}
}
},
Expand All @@ -228,13 +220,6 @@
"default": "No flattening",
"enum": ["No flattening", "Root level flattening"]
},
"part_size_mb": {
"title": "Block Size (MB) For Amazon S3 Multipart Upload (Optional)",
"description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.",
"type": "integer",
"default": 5,
"examples": [5]
},
"compression": {
"title": "Compression",
"type": "object",
Expand Down Expand Up @@ -276,13 +261,6 @@
"enum": ["JSONL"],
"default": "JSONL"
},
"part_size_mb": {
"title": "Block Size (MB) For Amazon S3 Multipart Upload (Optional)",
"description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.",
"type": "integer",
"default": 5,
"examples": [5]
},
"compression": {
"title": "Compression",
"type": "object",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ public void testCreateAndModify() {
assertEquals(newBucketName, modifiedConfig.getBucketName());
assertEquals(newBucketPath, modifiedConfig.getBucketPath());
assertEquals(newBucketRegion, modifiedConfig.getBucketRegion());
assertEquals(newPartSize, modifiedConfig.getPartSize());

final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) modifiedConfig.getS3CredentialConfig();
assertEquals(newKey, credentialConfig.getAccessKeyId());
Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/s3.md
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,7 @@ In order for everything to work correctly, it is also necessary that the user wh

| Version | Date | Pull Request | Subject |
|:--------| :--- | :--- |:---------------------------------------------------------------------------------------------------------------------------|
| 0.3.6 | 2022-05-19 | [\#13043](https://github.com/airbytehq/airbyte/pull/13043) | Destination S3: Remove configurable part size. |
| 0.3.5 | 2022-05-12 | [\#12797](https://github.com/airbytehq/airbyte/pull/12797) | Update spec to replace markdown. |
| 0.3.4 | 2022-05-04 | [\#12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. |
| 0.3.3 | 2022-04-20 | [\#12167](https://github.com/airbytehq/airbyte/pull/12167) | Add gzip compression option for CSV and JSONL formats. |
Expand Down

0 comments on commit 9c56202

Please sign in to comment.