diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6c8ad205e653a..403d81d73b642 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.29.17-alpha +current_version = 0.29.19-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 9e2e0c8d9b5c7..595a95fc264f6 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ -VERSION=0.29.17-alpha +VERSION=0.29.19-alpha # Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_USER=docker diff --git a/.github/ISSUE_TEMPLATE/new-integration-request.md b/.github/ISSUE_TEMPLATE/new-integration-request.md index 5a07428910ae6..676b89d0899f5 100644 --- a/.github/ISSUE_TEMPLATE/new-integration-request.md +++ b/.github/ISSUE_TEMPLATE/new-integration-request.md @@ -12,9 +12,10 @@ assignees: '' * Do you need a specific version of the underlying data source e.g: you specifically need support for an older version of the API or DB? ## Describe the context around this new connector -* Which team in your company wants this integration, what for? This helps us understand the use case. +* Why do you need this integration? How does your team intend to use the data? This helps us understand the use case. * How often do you want to run syncs? * If this is an API source connector, which entities/endpoints do you need supported? +* If the connector is for a paid service, can we name you as a mutual user when we subscribe for an account? Which company should we name? ## Describe the alternative you are considering or using What are you considering doing if you don’t have this integration through Airbyte? diff --git a/.github/workflows/helm.yaml b/.github/workflows/helm.yaml new file mode 100644 index 0000000000000..6c738d4e7c006 --- /dev/null +++ b/.github/workflows/helm.yaml @@ -0,0 +1,74 @@ +name: Helm +on: + push: + paths: + - '.github/workflows/helm.yaml' + - 'charts/**' + pull_request: + paths: + - '.github/workflows/helm.yaml' + - 'charts/**' +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + timeout-minutes: 20 + steps: + - uses: actions/checkout@v2 + - name: Setup Kubectl + uses: azure/setup-kubectl@v1 + - name: Setup Helm + uses: azure/setup-helm@v1 + with: + version: '3.6.3' + - name: Lint Chart + working-directory: ./charts/airbyte + run: ./ci.sh lint + + generate-docs: + name: Generate Docs Parameters + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v2 + - name: Checkout bitnami-labs/readme-generator-for-helm + uses: actions/checkout@v2 + with: + repository: 'bitnami-labs/readme-generator-for-helm' + ref: '55cab5dd2191c4ffa7245cfefa428d4d9bb12730' + path: readme-generator-for-helm + - name: Install readme-generator-for-helm dependencies + working-directory: readme-generator-for-helm + run: npm install -g + - name: Test can update README with generated parameters + working-directory: charts/airbyte + run: ./ci.sh check-docs-updated + + install: + name: Install + runs-on: ubuntu-latest + timeout-minutes: 20 + steps: + - uses: actions/checkout@v2 + - name: Setup Kubectl + uses: azure/setup-kubectl@v1 + - name: Setup Helm + uses: azure/setup-helm@v1 + with: + version: '3.6.3' + - name: Setup Kind Cluster + uses: helm/kind-action@v1.2.0 + with: + version: "v0.11.1" + image: "kindest/node:v1.21.1" + - name: Install airbyte chart + working-directory: ./charts/airbyte + run: ./ci.sh install + - if: always() + name: Print diagnostics + working-directory: ./charts/airbyte + run: ./ci.sh diagnostics + - if: success() + name: Test airbyte chart + working-directory: ./charts/airbyte + run: ./ci.sh test diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 1b8b882c4cb30..e21fde645be23 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -178,6 +178,7 @@ jobs: SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} + DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} - run: | echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u airbytebot -p ${DOCKER_PASSWORD} ./tools/integrations/manage.sh publish airbyte-integrations/${{ github.event.inputs.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 35733a488067c..d32505fd0bbee 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -173,6 +173,7 @@ jobs: SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} + DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} - run: | ./tools/bin/ci_integration_test.sh ${{ github.event.inputs.connector }} name: test ${{ github.event.inputs.connector }} @@ -194,6 +195,16 @@ jobs: **/normalization_test_output/**/build/compiled/airbyte_utils/** **/normalization_test_output/**/build/run/airbyte_utils/** **/normalization_test_output/**/models/generated/** + + - name: Test coverage reports artifacts + if: github.event.inputs.comment-id && success() + uses: actions/upload-artifact@v2 + with: + name: test-reports + path: | + **/${{ github.event.inputs.connector }}/htmlcov/** + retention-days: 3 + - name: Report Status if: github.ref == 'refs/heads/master' && always() run: ./tools/status/report.sh ${{ github.event.inputs.connector }} ${{github.repository}} ${{github.run_id}} ${{steps.test.outcome}} @@ -208,6 +219,7 @@ jobs: comment-id: ${{ github.event.inputs.comment-id }} body: | > :white_check_mark: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} + ${{env.PYTHON_UNITTEST_COVERAGE_REPORT}} - name: Add Failure Comment if: github.event.inputs.comment-id && failure() uses: peter-evans/create-or-update-comment@v1 diff --git a/.gitignore b/.gitignore index def1cbdab64f6..f02f5b77b68ba 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,9 @@ data .project .settings +# Logs +acceptance_tests_logs/ + # Secrets secrets !airbyte-integrations/connector-templates/**/secrets @@ -26,6 +29,21 @@ __pycache__ .ipynb_checkpoints .pytest_ +# Python unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + # dbt profiles.yml diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 583a149090882..bfb5c84acd90e 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -1234,7 +1234,7 @@ paths: post: tags: - oauth - summary: Given a source def ID and optional workspaceID generate an access/refresh token etc. + summary: Given a source def ID generate an access/refresh token etc. operationId: completeSourceOAuth requestBody: content: @@ -1280,7 +1280,7 @@ paths: post: tags: - oauth - summary: + summary: Given a destination def ID generate an access/refresh token etc. operationId: completeDestinationOAuth requestBody: content: @@ -1906,6 +1906,34 @@ components: description: The specification for what values are required to configure the sourceDefinition. type: object example: { user: { type: string } } + SourceAuthSpecification: + $ref: "#/components/schemas/AuthSpecification" + AuthSpecification: + type: object + properties: + auth_type: + type: string + enum: ["oauth2.0"] # Future auth types should be added here + oauth2Specification: + "$ref": "#/components/schemas/OAuth2Specification" + OAuth2Specification: + description: An object containing any metadata needed to describe this connector's Oauth flow + type: object + properties: + oauthFlowInitParameters: + description: + "Pointers to the fields in the ConnectorSpecification which are needed to obtain the initial refresh/access tokens for the OAuth flow. + Each inner array represents the path in the ConnectorSpecification of the referenced field. + For example. + Assume the ConnectorSpecification contains params 'app_secret', 'app_id' which are needed to get the initial refresh token. + If they are not nested in the config, then the array would look like this [['app_secret'], ['app_id']] + If they are nested inside, say, an object called 'auth_params' then this array would be [['auth_params', 'app_secret'], ['auth_params', 'app_id']]" + type: array + items: + description: A list of strings which describes each parameter's path inside the ConnectionSpecification + type: array + items: + type: string SourceDefinitionSpecificationRead: type: object required: @@ -1918,6 +1946,8 @@ components: type: string connectionSpecification: $ref: "#/components/schemas/SourceDefinitionSpecification" + authSpecification: + $ref: "#/components/schemas/SourceAuthSpecification" jobInfo: $ref: "#/components/schemas/SynchronousJobRead" # SOURCE @@ -2018,6 +2048,8 @@ components: DestinationDefinitionId: type: string format: uuid + DestinationAuthSpecification: + $ref: "#/components/schemas/AuthSpecification" DestinationDefinitionIdRequestBody: type: object required: @@ -2101,6 +2133,8 @@ components: type: string connectionSpecification: $ref: "#/components/schemas/DestinationDefinitionSpecification" + authSpecification: + $ref: "#/components/schemas/DestinationAuthSpecification" jobInfo: $ref: "#/components/schemas/SynchronousJobRead" supportedDestinationSyncModes: diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 9e4f9312c012d..b25d00e39b238 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## 0.1.21 +Resolve nested schema references and move external references to single schema definitions. + +## 0.1.20 +- Allow using `requests.auth.AuthBase` as authenticators instead of custom CDK authenticators. +- Implement Oauth2Authenticator, MultipleTokenAuthenticator and TokenAuthenticator authenticators. +- Add support for both legacy and requests native authenticator to HttpStream class. + +## 0.1.19 +No longer prints full config files on validation error to prevent exposing secrets to log file: https://github.com/airbytehq/airbyte/pull/5879 + ## 0.1.18 Fix incremental stream not saved state when internal limit config set. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/core.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/core.py index 5db5cfea1f750..97bcc3b58645b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/core.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/core.py @@ -26,7 +26,10 @@ from abc import ABC, abstractmethod from typing import Any, Mapping +from deprecated import deprecated + +@deprecated(version="0.1.20", reason="Use requests.auth.AuthBase instead") class HttpAuthenticator(ABC): """ Base abstract class for various HTTP Authentication strategies. Authentication strategies are generally @@ -40,6 +43,7 @@ def get_auth_header(self) -> Mapping[str, Any]: """ +@deprecated(version="0.1.20", reason="Set `authenticator=None` instead") class NoAuth(HttpAuthenticator): def get_auth_header(self) -> Mapping[str, Any]: return {} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py index d7799e25ab736..b76cf962ffb94 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/oauth.py @@ -27,10 +27,12 @@ import pendulum import requests +from deprecated import deprecated from .core import HttpAuthenticator +@deprecated(version="0.1.20", reason="Use airbyte_cdk.sources.streams.http.requests_native_auth.Oauth2Authenticator instead") class Oauth2Authenticator(HttpAuthenticator): """ Generates OAuth2.0 access tokens from an OAuth2.0 refresh token and client credentials. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py index 294e19175d3e4..64da6c61f8e12 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/auth/token.py @@ -26,9 +26,12 @@ from itertools import cycle from typing import Any, List, Mapping +from deprecated import deprecated + from .core import HttpAuthenticator +@deprecated(version="0.1.20", reason="Use airbyte_cdk.sources.streams.http.requests_native_auth.TokenAuthenticator instead") class TokenAuthenticator(HttpAuthenticator): def __init__(self, token: str, auth_method: str = "Bearer", auth_header: str = "Authorization"): self.auth_method = auth_method @@ -39,6 +42,7 @@ def get_auth_header(self) -> Mapping[str, Any]: return {self.auth_header: f"{self.auth_method} {self._token}"} +@deprecated(version="0.1.20", reason="Use airbyte_cdk.sources.streams.http.requests_native_auth.MultipleTokenAuthenticator instead") class MultipleTokenAuthenticator(HttpAuthenticator): def __init__(self, tokens: List[str], auth_method: str = "Bearer", auth_header: str = "Authorization"): self.auth_method = auth_method diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py index 83f1e00a06437..9d7575bd81540 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py @@ -29,6 +29,7 @@ import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.core import Stream +from requests.auth import AuthBase from .auth.core import HttpAuthenticator, NoAuth from .exceptions import DefaultBackoffException, RequestBodyException, UserDefinedBackoffException @@ -46,10 +47,16 @@ class HttpStream(Stream, ABC): source_defined_cursor = True # Most HTTP streams use a source defined cursor (i.e: the user can't configure it like on a SQL table) page_size = None # Use this variable to define page size for API http requests with pagination support - def __init__(self, authenticator: HttpAuthenticator = NoAuth()): - self._authenticator = authenticator + # TODO: remove legacy HttpAuthenticator authenticator references + def __init__(self, authenticator: Union[AuthBase, HttpAuthenticator] = None): self._session = requests.Session() + self._authenticator = NoAuth() + if isinstance(authenticator, AuthBase): + self._session.auth = authenticator + elif authenticator: + self._authenticator = authenticator + @property @abstractmethod def url_base(self) -> str: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/__init__.py new file mode 100644 index 0000000000000..8b62c71c24da3 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/__init__.py @@ -0,0 +1,32 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + +from .oauth import Oauth2Authenticator +from .token import MultipleTokenAuthenticator, TokenAuthenticator + +__all__ = [ + "Oauth2Authenticator", + "TokenAuthenticator", + "MultipleTokenAuthenticator", +] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py new file mode 100644 index 0000000000000..ee90164a70e9e --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py @@ -0,0 +1,104 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +from typing import Any, List, Mapping, MutableMapping, Tuple + +import pendulum +import requests +from requests.auth import AuthBase + + +class Oauth2Authenticator(AuthBase): + """ + Generates OAuth2.0 access tokens from an OAuth2.0 refresh token and client credentials. + The generated access token is attached to each request via the Authorization header. + """ + + def __init__( + self, + token_refresh_endpoint: str, + client_id: str, + client_secret: str, + refresh_token: str, + scopes: List[str] = None, + token_expiry_date: pendulum.datetime = None, + access_token_name: str = "access_token", + expires_in_name: str = "expires_in", + ): + self.token_refresh_endpoint = token_refresh_endpoint + self.client_secret = client_secret + self.client_id = client_id + self.refresh_token = refresh_token + self.scopes = scopes + self.access_token_name = access_token_name + self.expires_in_name = expires_in_name + + self._token_expiry_date = token_expiry_date or pendulum.now().subtract(days=1) + self._access_token = None + + def __call__(self, request): + request.headers.update(self.get_auth_header()) + return request + + def get_auth_header(self) -> Mapping[str, Any]: + return {"Authorization": f"Bearer {self.get_access_token()}"} + + def get_access_token(self): + if self.token_has_expired(): + t0 = pendulum.now() + token, expires_in = self.refresh_access_token() + self._access_token = token + self._token_expiry_date = t0.add(seconds=expires_in) + + return self._access_token + + def token_has_expired(self) -> bool: + return pendulum.now() > self._token_expiry_date + + def get_refresh_request_body(self) -> Mapping[str, Any]: + """Override to define additional parameters""" + payload: MutableMapping[str, Any] = { + "grant_type": "refresh_token", + "client_id": self.client_id, + "client_secret": self.client_secret, + "refresh_token": self.refresh_token, + } + + if self.scopes: + payload["scopes"] = self.scopes + + return payload + + def refresh_access_token(self) -> Tuple[str, int]: + """ + returns a tuple of (access_token, token_lifespan_in_seconds) + """ + try: + response = requests.request(method="POST", url=self.token_refresh_endpoint, data=self.get_refresh_request_body()) + response.raise_for_status() + response_json = response.json() + return response_json[self.access_token_name], response_json[self.expires_in_name] + except Exception as e: + raise Exception(f"Error while refreshing access token: {e}") from e diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py new file mode 100644 index 0000000000000..925962993fba9 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/token.py @@ -0,0 +1,59 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + +from itertools import cycle +from typing import Any, List, Mapping + +from requests.auth import AuthBase + + +class MultipleTokenAuthenticator(AuthBase): + """ + Builds auth header, based on the list of tokens provided. + Auth header is changed per each `get_auth_header` call, using each token in cycle. + The token is attached to each request via the `auth_header` header. + """ + + def __init__(self, tokens: List[str], auth_method: str = "Bearer", auth_header: str = "Authorization"): + self.auth_method = auth_method + self.auth_header = auth_header + self._tokens = tokens + self._tokens_iter = cycle(self._tokens) + + def __call__(self, request): + request.headers.update(self.get_auth_header()) + return request + + def get_auth_header(self) -> Mapping[str, Any]: + return {self.auth_header: f"{self.auth_method} {next(self._tokens_iter)}"} + + +class TokenAuthenticator(MultipleTokenAuthenticator): + """ + Builds auth header, based on the token provided. + The token is attached to each request via the `auth_header` header. + """ + + def __init__(self, token: str, auth_method: str = "Bearer", auth_header: str = "Authorization"): + super().__init__([token], auth_method, auth_header) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py index c687c8272a8c6..496d416b5b520 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_helpers.py @@ -23,77 +23,20 @@ # +import importlib import json import os import pkgutil from typing import Any, ClassVar, Dict, Mapping, Tuple -import pkg_resources +import jsonref from airbyte_cdk.logger import AirbyteLogger from airbyte_cdk.models import ConnectorSpecification -from jsonschema import RefResolver, validate +from jsonschema import validate from jsonschema.exceptions import ValidationError from pydantic import BaseModel, Field -class JsonSchemaResolver: - """Helper class to expand $ref items in json schema""" - - def __init__(self, shared_schemas_path: str): - self._shared_refs = self._load_shared_schema_refs(shared_schemas_path) - - @staticmethod - def _load_shared_schema_refs(shared_schemas_path: str): - shared_file_names = [f.name for f in os.scandir(shared_schemas_path) if f.is_file()] - shared_schema_refs = {} - for shared_file in shared_file_names: - with open(os.path.join(shared_schemas_path, shared_file)) as data_file: - shared_schema_refs[shared_file] = json.load(data_file) - - return shared_schema_refs - - def _resolve_schema_references(self, schema: dict, resolver: RefResolver) -> dict: - if "$ref" in schema: - reference_path = schema.pop("$ref", None) - resolved = resolver.resolve(reference_path)[1] - schema.update(resolved) - return self._resolve_schema_references(schema, resolver) - - if "properties" in schema: - for k, val in schema["properties"].items(): - schema["properties"][k] = self._resolve_schema_references(val, resolver) - - if "patternProperties" in schema: - for k, val in schema["patternProperties"].items(): - schema["patternProperties"][k] = self._resolve_schema_references(val, resolver) - - if "items" in schema: - schema["items"] = self._resolve_schema_references(schema["items"], resolver) - - if "anyOf" in schema: - for i, element in enumerate(schema["anyOf"]): - schema["anyOf"][i] = self._resolve_schema_references(element, resolver) - - return schema - - def resolve(self, schema: dict, refs: Dict[str, dict] = None) -> dict: - """Resolves and replaces json-schema $refs with the appropriate dict. - Recursively walks the given schema dict, converting every instance - of $ref in a 'properties' structure with a resolved dict. - This modifies the input schema and also returns it. - Arguments: - schema: - the schema dict - refs: - a dict of which forms a store of referenced schemata - Returns: - schema - """ - refs = refs or {} - refs = {**self._shared_refs, **refs} - return self._resolve_schema_references(schema, RefResolver("", schema, store=refs)) - - class ResourceSchemaLoader: """JSONSchema loader from package resources""" @@ -124,10 +67,63 @@ def get_schema(self, name: str) -> dict: print(f"Invalid JSON file format for file {schema_filename}") raise - shared_schemas_folder = pkg_resources.resource_filename(self.package_name, "schemas/shared/") - if os.path.exists(shared_schemas_folder): - return JsonSchemaResolver(shared_schemas_folder).resolve(raw_schema) - return raw_schema + return self.__resolve_schema_references(raw_schema) + + def __resolve_schema_references(self, raw_schema: dict) -> dict: + """ + Resolve links to external references and move it to local "definitions" map. + :param raw_schema jsonschema to lookup for external links. + :return JSON serializable object with references without external dependencies. + """ + + class JsonFileLoader: + """ + Custom json file loader to resolve references to resources located in "shared" directory. + We need this for compatability with existing schemas cause all of them have references + pointing to shared_schema.json file instead of shared/shared_schema.json + """ + + def __init__(self, uri_base: str, shared: str): + self.shared = shared + self.uri_base = uri_base + + def __call__(self, uri: str) -> Dict[str, Any]: + uri = uri.replace(self.uri_base, f"{self.uri_base}/{self.shared}/") + return json.load(open(uri)) + + package = importlib.import_module(self.package_name) + base = os.path.dirname(package.__file__) + "/" + + def create_definitions(obj: dict, definitions: dict) -> Dict[str, Any]: + """ + Scan resolved schema and compose definitions section, also convert + jsonref.JsonRef object to JSON serializable dict. + :param obj - jsonschema object with ref field resovled. + :definitions - object for storing generated definitions. + :return JSON serializable object with references without external dependencies. + """ + if isinstance(obj, jsonref.JsonRef): + def_key = obj.__reference__["$ref"] + def_key = def_key.replace("#/definitions/", "").replace(".json", "_") + definition = create_definitions(obj.__subject__, definitions) + # Omit existance definitions for extenal resource since + # we dont need it anymore. + definition.pop("definitions", None) + definitions[def_key] = definition + return {"$ref": "#/definitions/" + def_key} + elif isinstance(obj, dict): + return {k: create_definitions(v, definitions) for k, v in obj.items()} + elif isinstance(obj, list): + return [create_definitions(item, definitions) for item in obj] + else: + return obj + + resolved = jsonref.JsonRef.replace_refs(raw_schema, loader=JsonFileLoader(base, "schemas/shared"), base_uri=base) + definitions = {} + resolved = create_definitions(resolved, definitions) + if definitions: + resolved["definitions"] = definitions + return resolved def check_config_against_spec_or_exit(config: Mapping[str, Any], spec: ConnectorSpecification, logger: AirbyteLogger): @@ -142,7 +138,7 @@ def check_config_against_spec_or_exit(config: Mapping[str, Any], spec: Connector try: validate(instance=config, schema=spec_schema) except ValidationError as validation_error: - raise Exception("Config validation error: " + validation_error.message) + raise Exception("Config validation error: " + validation_error.message) from None class InternalConfig(BaseModel): @@ -159,7 +155,8 @@ def split_config(config: Mapping[str, Any]) -> Tuple[dict, InternalConfig]: Break config map object into 2 instances: first is a dict with user defined configuration and second is internal config that contains private keys for acceptance test configuration. - :param config - Dict object that has been loaded from config file. + :param + config - Dict object that has been loaded from config file. :return tuple of user defined config dict with filtered out internal parameters and SAT internal config object. """ diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 7e5223f33a268..8be2e3ce70e6d 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -35,7 +35,7 @@ setup( name="airbyte-cdk", - version="0.1.18", + version="0.1.21", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", @@ -67,10 +67,12 @@ install_requires=[ "backoff", "jsonschema~=3.2.0", + "jsonref~=0.2", "pendulum", "pydantic~=1.6", "PyYAML~=5.4", "requests", + "Deprecated~=1.2", ], python_requires=">=3.7.0", extras_require={"dev": ["MyPy~=0.812", "pytest", "pytest-cov", "pytest-mock", "requests-mock"]}, diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py b/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py new file mode 100644 index 0000000000000..f1a88dadc585a --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py @@ -0,0 +1,164 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +import logging + +import requests +from airbyte_cdk.sources.streams.http.requests_native_auth import MultipleTokenAuthenticator, Oauth2Authenticator, TokenAuthenticator +from requests import Response + +LOGGER = logging.getLogger(__name__) + + +def test_token_authenticator(): + """ + Should match passed in token, no matter how many times token is retrieved. + """ + token_auth = TokenAuthenticator(token="test-token") + header1 = token_auth.get_auth_header() + header2 = token_auth.get_auth_header() + + prepared_request = requests.PreparedRequest() + prepared_request.headers = {} + token_auth(prepared_request) + + assert {"Authorization": "Bearer test-token"} == prepared_request.headers + assert {"Authorization": "Bearer test-token"} == header1 + assert {"Authorization": "Bearer test-token"} == header2 + + +def test_multiple_token_authenticator(): + multiple_token_auth = MultipleTokenAuthenticator(tokens=["token1", "token2"]) + header1 = multiple_token_auth.get_auth_header() + header2 = multiple_token_auth.get_auth_header() + header3 = multiple_token_auth.get_auth_header() + + prepared_request = requests.PreparedRequest() + prepared_request.headers = {} + multiple_token_auth(prepared_request) + + assert {"Authorization": "Bearer token2"} == prepared_request.headers + assert {"Authorization": "Bearer token1"} == header1 + assert {"Authorization": "Bearer token2"} == header2 + assert {"Authorization": "Bearer token1"} == header3 + + +class TestOauth2Authenticator: + """ + Test class for OAuth2Authenticator. + """ + + refresh_endpoint = "refresh_end" + client_id = "client_id" + client_secret = "client_secret" + refresh_token = "refresh_token" + + def test_get_auth_header_fresh(self, mocker): + """ + Should not retrieve new token if current token is valid. + """ + oauth = Oauth2Authenticator( + token_refresh_endpoint=TestOauth2Authenticator.refresh_endpoint, + client_id=TestOauth2Authenticator.client_id, + client_secret=TestOauth2Authenticator.client_secret, + refresh_token=TestOauth2Authenticator.refresh_token, + ) + + mocker.patch.object(Oauth2Authenticator, "refresh_access_token", return_value=("access_token", 1000)) + header = oauth.get_auth_header() + assert {"Authorization": "Bearer access_token"} == header + + def test_get_auth_header_expired(self, mocker): + """ + Should retrieve new token if current token is expired. + """ + oauth = Oauth2Authenticator( + token_refresh_endpoint=TestOauth2Authenticator.refresh_endpoint, + client_id=TestOauth2Authenticator.client_id, + client_secret=TestOauth2Authenticator.client_secret, + refresh_token=TestOauth2Authenticator.refresh_token, + ) + + expire_immediately = 0 + mocker.patch.object(Oauth2Authenticator, "refresh_access_token", return_value=("access_token_1", expire_immediately)) + oauth.get_auth_header() # Set the first expired token. + + valid_100_secs = 100 + mocker.patch.object(Oauth2Authenticator, "refresh_access_token", return_value=("access_token_2", valid_100_secs)) + header = oauth.get_auth_header() + assert {"Authorization": "Bearer access_token_2"} == header + + def test_refresh_request_body(self): + """ + Request body should match given configuration. + """ + scopes = ["scope1", "scope2"] + oauth = Oauth2Authenticator( + token_refresh_endpoint=TestOauth2Authenticator.refresh_endpoint, + client_id=TestOauth2Authenticator.client_id, + client_secret=TestOauth2Authenticator.client_secret, + refresh_token=TestOauth2Authenticator.refresh_token, + scopes=scopes, + ) + body = oauth.get_refresh_request_body() + expected = { + "grant_type": "refresh_token", + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token", + "scopes": scopes, + } + assert body == expected + + def test_refresh_access_token(self, mocker): + oauth = Oauth2Authenticator( + token_refresh_endpoint=TestOauth2Authenticator.refresh_endpoint, + client_id=TestOauth2Authenticator.client_id, + client_secret=TestOauth2Authenticator.client_secret, + refresh_token=TestOauth2Authenticator.refresh_token, + ) + resp = Response() + resp.status_code = 200 + + mocker.patch.object(requests, "request", return_value=resp) + mocker.patch.object(resp, "json", return_value={"access_token": "access_token", "expires_in": 1000}) + token = oauth.refresh_access_token() + + assert ("access_token", 1000) == token + + def test_auth_call_method(self, mocker): + oauth = Oauth2Authenticator( + token_refresh_endpoint=TestOauth2Authenticator.refresh_endpoint, + client_id=TestOauth2Authenticator.client_id, + client_secret=TestOauth2Authenticator.client_secret, + refresh_token=TestOauth2Authenticator.refresh_token, + ) + + mocker.patch.object(Oauth2Authenticator, "refresh_access_token", return_value=("access_token", 1000)) + prepared_request = requests.PreparedRequest() + prepared_request.headers = {} + oauth(prepared_request) + + assert {"Authorization": "Bearer access_token"} == prepared_request.headers diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py index 84a53835243d3..591e2cc8003ee 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py @@ -32,15 +32,18 @@ import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import NoAuth +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator as HttpTokenAuthenticator from airbyte_cdk.sources.streams.http.exceptions import DefaultBackoffException, RequestBodyException, UserDefinedBackoffException +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator class StubBasicReadHttpStream(HttpStream): url_base = "https://test_base_url.com" primary_key = "" - def __init__(self): - super().__init__() + def __init__(self, **kwargs): + super().__init__(**kwargs) self.resp_counter = 1 def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -63,6 +66,24 @@ def parse_response( yield stubResp +def test_default_authenticator(): + stream = StubBasicReadHttpStream() + assert isinstance(stream.authenticator, NoAuth) + assert stream._session.auth is None + + +def test_requests_native_token_authenticator(): + stream = StubBasicReadHttpStream(authenticator=TokenAuthenticator("test-token")) + assert isinstance(stream.authenticator, NoAuth) + assert isinstance(stream._session.auth, TokenAuthenticator) + + +def test_http_token_authenticator(): + stream = StubBasicReadHttpStream(authenticator=HttpTokenAuthenticator("test-token")) + assert isinstance(stream.authenticator, HttpTokenAuthenticator) + assert stream._session.auth is None + + def test_request_kwargs_used(mocker, requests_mock): stream = StubBasicReadHttpStream() request_kwargs = {"cert": None, "proxies": "google.com"} diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py index 8e9b4404c9b14..b4713c200ed98 100644 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py @@ -27,19 +27,25 @@ import os import shutil import sys +import traceback from collections.abc import Mapping from pathlib import Path -from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader +import jsonref +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification +from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader, check_config_against_spec_or_exit from pytest import fixture +from pytest import raises as pytest_raises + +logger = AirbyteLogger() + MODULE = sys.modules[__name__] MODULE_NAME = MODULE.__name__.split(".")[0] SCHEMAS_ROOT = "/".join(os.path.abspath(MODULE.__file__).split("/")[:-1]) / Path("schemas") -# TODO (sherif) refactor ResourceSchemaLoader to completely separate the functionality for reading data from the package. See https://github.com/airbytehq/airbyte/issues/3222 -# and the functionality for resolving schemas. See https://github.com/airbytehq/airbyte/issues/3222 @fixture(autouse=True, scope="session") def create_and_teardown_schemas_dir(): os.mkdir(SCHEMAS_ROOT) @@ -53,6 +59,38 @@ def create_schema(name: str, content: Mapping): f.write(json.dumps(content)) +@fixture +def spec_object(): + spec = { + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["api_token"], + "additionalProperties": False, + "properties": { + "api_token": {"title": "API Token", "type": "string"}, + }, + }, + } + yield ConnectorSpecification.parse_obj(spec) + + +def test_check_config_against_spec_or_exit_does_not_print_schema(capsys, spec_object): + config = {"super_secret_token": "really_a_secret"} + with pytest_raises(Exception) as ex_info: + check_config_against_spec_or_exit(config, spec_object, logger) + exc = ex_info.value + traceback.print_exception(type(exc), exc, exc.__traceback__) + out, err = capsys.readouterr() + assert "really_a_secret" not in out + err + + +def test_should_not_fail_validation_for_valid_config(spec_object): + config = {"api_token": "something"} + check_config_against_spec_or_exit(config, spec_object, logger) + assert True, "should pass validation with valid config" + + class TestResourceSchemaLoader: # Test that a simple schema is loaded correctly @staticmethod @@ -78,8 +116,9 @@ def test_shared_schemas_resolves(): "properties": { "str": {"type": "string"}, "int": {"type": "integer"}, - "obj": {"type": ["null", "object"], "properties": {"k1": {"type": "string"}}}, + "obj": {"$ref": "#/definitions/shared_schema_"}, }, + "definitions": {"shared_schema_": {"type": ["null", "object"], "properties": {"k1": {"type": "string"}}}}, } partial_schema = { @@ -96,3 +135,43 @@ def test_shared_schemas_resolves(): actual_schema = resolver.get_schema("complex_schema") assert actual_schema == expected_schema + + @staticmethod + def test_shared_schemas_resolves_nested(): + expected_schema = { + "type": ["null", "object"], + "properties": { + "str": {"type": "string"}, + "int": {"type": "integer"}, + "one_of": {"oneOf": [{"type": "string"}, {"$ref": "#/definitions/shared_schema_type_one"}]}, + "obj": {"$ref": "#/definitions/shared_schema_type_one"}, + }, + "definitions": {"shared_schema_type_one": {"type": ["null", "object"], "properties": {"k1": {"type": "string"}}}}, + } + partial_schema = { + "type": ["null", "object"], + "properties": { + "str": {"type": "string"}, + "int": {"type": "integer"}, + "one_of": {"oneOf": [{"type": "string"}, {"$ref": "shared_schema.json#/definitions/type_one"}]}, + "obj": {"$ref": "shared_schema.json#/definitions/type_one"}, + }, + } + + referenced_schema = { + "definitions": { + "type_one": {"$ref": "shared_schema.json#/definitions/type_nested"}, + "type_nested": {"type": ["null", "object"], "properties": {"k1": {"type": "string"}}}, + } + } + + create_schema("complex_schema", partial_schema) + create_schema("shared/shared_schema", referenced_schema) + + resolver = ResourceSchemaLoader(MODULE_NAME) + + actual_schema = resolver.get_schema("complex_schema") + assert actual_schema == expected_schema + # Make sure generated schema is JSON serializable + assert json.dumps(actual_schema) + assert jsonref.JsonRef.replace_refs(actual_schema) diff --git a/airbyte-cdk/python/unit_tests/test_entrypoint.py b/airbyte-cdk/python/unit_tests/test_entrypoint.py index be6e628afd177..a8e5c97293ed3 100644 --- a/airbyte-cdk/python/unit_tests/test_entrypoint.py +++ b/airbyte-cdk/python/unit_tests/test_entrypoint.py @@ -161,9 +161,8 @@ def test_config_validate(entrypoint: AirbyteEntrypoint, mocker, config_mock, sch messages = list(entrypoint.run(parsed_args)) assert [_wrap_message(check_value)] == messages else: - with pytest.raises(Exception) as ex_info: + with pytest.raises(Exception, match=r"(?i)Config Validation Error:.*"): list(entrypoint.run(parsed_args)) - assert "Config validation error:" in str(ex_info.value) def test_run_check(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock): diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/9f760101-60ae-462f-9ee6-b7a9dafd454d.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/9f760101-60ae-462f-9ee6-b7a9dafd454d.json index 06ca1971d4568..657a83d19b6da 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/9f760101-60ae-462f-9ee6-b7a9dafd454d.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/9f760101-60ae-462f-9ee6-b7a9dafd454d.json @@ -2,6 +2,6 @@ "destinationDefinitionId": "9f760101-60ae-462f-9ee6-b7a9dafd454d", "name": "Kafka", "dockerRepository": "airbyte/destination-kafka", - "dockerImageTag": "0.1.1", + "dockerImageTag": "0.1.2", "documentationUrl": "https://docs.airbyte.io/integrations/destinations/kafka" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/af6d50ee-dddf-4126-a8ee-7faee990774f.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/af6d50ee-dddf-4126-a8ee-7faee990774f.json index da4acab7745e4..8494bbc934f92 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/af6d50ee-dddf-4126-a8ee-7faee990774f.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/af6d50ee-dddf-4126-a8ee-7faee990774f.json @@ -2,6 +2,6 @@ "sourceDefinitionId": "af6d50ee-dddf-4126-a8ee-7faee990774f", "name": "PostHog", "dockerRepository": "airbyte/source-posthog", - "dockerImageTag": "0.1.3", + "dockerImageTag": "0.1.4", "documentationUrl": "https://docs.airbyte.io/integrations/sources/posthog" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json index a4bed61a7ee75..efec2dea3ecee 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "e094cb9a-26de-4645-8761-65c0c425d1de", "name": "Stripe", "dockerRepository": "airbyte/source-stripe", - "dockerImageTag": "0.1.16", + "dockerImageTag": "0.1.17", "documentationUrl": "https://docs.airbyte.io/integrations/sources/stripe", "icon": "stripe.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json index 4b24104474044..3e649449fa893 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "e7778cfc-e97c-4458-9ecb-b4f2bba8946c", "name": "Facebook Marketing", "dockerRepository": "airbyte/source-facebook-marketing", - "dockerImageTag": "0.2.14", + "dockerImageTag": "0.2.17", "documentationUrl": "https://docs.airbyte.io/integrations/sources/facebook-marketing", "icon": "facebook.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json index 473207582bdb4..7c78ccf19c0cd 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "ef69ef6e-aa7f-4af1-a01d-ef775033524e", "name": "GitHub", "dockerRepository": "airbyte/source-github", - "dockerImageTag": "0.1.10", + "dockerImageTag": "0.1.11", "documentationUrl": "https://docs.airbyte.io/integrations/sources/github", "icon": "github.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 6f76ea01f376e..fb9ae85282a08 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -78,7 +78,7 @@ - destinationDefinitionId: 9f760101-60ae-462f-9ee6-b7a9dafd454d name: Kafka dockerRepository: airbyte/destination-kafka - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/kafka - destinationDefinitionId: 8ccd8909-4e99-4141-b48d-4984b70b2d89 name: DynamoDB diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 2499291fd05d9..2279ca821191a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -39,7 +39,7 @@ - sourceDefinitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e name: GitHub dockerRepository: airbyte/source-github - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/github icon: github.svg - sourceDefinitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 @@ -67,7 +67,7 @@ - sourceDefinitionId: af6d50ee-dddf-4126-a8ee-7faee990774f name: PostHog dockerRepository: airbyte/source-posthog - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/posthog - sourceDefinitionId: cd42861b-01fc-4658-a8ab-5d11d0510f01 name: Recurly @@ -113,7 +113,7 @@ - sourceDefinitionId: e094cb9a-26de-4645-8761-65c0c425d1de name: Stripe dockerRepository: airbyte/source-stripe - dockerImageTag: 0.1.16 + dockerImageTag: 0.1.17 documentationUrl: https://docs.airbyte.io/integrations/sources/stripe icon: stripe.svg - sourceDefinitionId: b03a9f3e-22a5-11eb-adc1-0242ac120002 @@ -137,7 +137,7 @@ - sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c name: Facebook Marketing dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.14 + dockerImageTag: 0.2.17 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg - sourceDefinitionId: 010eb12f-837b-4685-892d-0a39f76a98f5 diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java index fb121f05eab21..14ff1d4a1064a 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -424,7 +424,7 @@ private T getEnvOrDefault(final String key, final T defaultValue, final Func if (value != null && !value.isEmpty()) { return parser.apply(value); } else { - LOGGER.info("{} not found or empty, defaulting to {}", key, isSecret ? "*****" : defaultValue); + LOGGER.info("Using default value for environment variable {}: '{}'", key, isSecret ? "*****" : defaultValue); return defaultValue; } } diff --git a/airbyte-config/persistence/build.gradle b/airbyte-config/persistence/build.gradle index 35caaf9adb6d2..40bb4cce193dd 100644 --- a/airbyte-config/persistence/build.gradle +++ b/airbyte-config/persistence/build.gradle @@ -4,6 +4,7 @@ dependencies { implementation project(':airbyte-db:lib') implementation project(':airbyte-db:jooq') implementation project(':airbyte-config:models') + implementation project(':airbyte-protocol:models') implementation project(':airbyte-config:init') implementation project(':airbyte-json-validation') diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index 4b32d52784e68..5f60c4ebe6118 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -37,6 +37,8 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardWorkspace; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.ArrayList; @@ -168,7 +170,12 @@ public SourceConnection getSourceConnection(final UUID sourceId) throws JsonVali return persistence.getConfig(ConfigSchema.SOURCE_CONNECTION, sourceId.toString(), SourceConnection.class); } - public void writeSourceConnection(final SourceConnection source) throws JsonValidationException, IOException { + public void writeSourceConnection(final SourceConnection source, final ConnectorSpecification connectorSpecification) + throws JsonValidationException, IOException { + // actual validation is only for sanity checking + final JsonSchemaValidator validator = new JsonSchemaValidator(); + validator.ensure(connectorSpecification.getConnectionSpecification(), source.getConfiguration()); + persistence.writeConfig(ConfigSchema.SOURCE_CONNECTION, source.getSourceId().toString(), source); } @@ -181,7 +188,12 @@ public DestinationConnection getDestinationConnection(final UUID destinationId) return persistence.getConfig(ConfigSchema.DESTINATION_CONNECTION, destinationId.toString(), DestinationConnection.class); } - public void writeDestinationConnection(final DestinationConnection destinationConnection) throws JsonValidationException, IOException { + public void writeDestinationConnection(final DestinationConnection destinationConnection, final ConnectorSpecification connectorSpecification) + throws JsonValidationException, IOException { + // actual validation is only for sanity checking + final JsonSchemaValidator validator = new JsonSchemaValidator(); + validator.ensure(connectorSpecification.getConnectionSpecification(), destinationConnection.getConfiguration()); + persistence.writeConfig(ConfigSchema.DESTINATION_CONNECTION, destinationConnection.getDestinationId().toString(), destinationConnection); } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java index 5cc5ef8828c65..10fee8d05d131 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_29_15_001__Add_temporalWorkflowId_col_to_Attempts.java @@ -38,7 +38,8 @@ public void migrate(Context context) throws Exception { // As database schema changes, the generated jOOQ code can be deprecated. So // old migration may not compile if there is any generated code. DSLContext ctx = DSL.using(context.getConnection()); - ctx.alterTable("attempts").addColumn(DSL.field("temporal_workflow_id", SQLDataType.VARCHAR(256).nullable(true))) + ctx.alterTable("attempts") + .addColumnIfNotExists(DSL.field("temporal_workflow_id", SQLDataType.VARCHAR(256).nullable(true))) .execute(); } diff --git a/airbyte-db/lib/src/main/resources/jobs_database/Attempts.yaml b/airbyte-db/lib/src/main/resources/jobs_database/Attempts.yaml index dea43213d71d4..758f53c322f6f 100644 --- a/airbyte-db/lib/src/main/resources/jobs_database/Attempts.yaml +++ b/airbyte-db/lib/src/main/resources/jobs_database/Attempts.yaml @@ -11,7 +11,7 @@ required: - status - created_at - updated_at -additionalProperties: false +additionalProperties: true properties: id: type: number @@ -25,8 +25,6 @@ properties: type: ["null", object] status: type: string - temporal_workflow_id: - type: ["null", string] created_at: # todo should be datetime. type: string diff --git a/airbyte-e2e-testing/cypress/integration/onboarding.spec.js b/airbyte-e2e-testing/cypress/integration/onboarding.spec.js index 15a6a869d7201..4c1c14b9ae381 100644 --- a/airbyte-e2e-testing/cypress/integration/onboarding.spec.js +++ b/airbyte-e2e-testing/cypress/integration/onboarding.spec.js @@ -8,9 +8,6 @@ describe("Onboarding actions", () => { cy.submit(); - cy.url().should("include", `${Cypress.config().baseUrl}/onboarding`); - cy.get("button[data-id='skip-onboarding']").click(); - cy.url().should("equal", `${Cypress.config().baseUrl}/`); }); }); diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 7403a0eab7ee4..7e9d6189948b3 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -90,6 +90,7 @@ | :--- | :--- | | Azure Blob Storage | [![destination-azure-blob-storage](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-azure-blob-storage%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-azure-blob-storage) | | BigQuery | [![destination-bigquery](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-bigquery%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-bigquery) | +| Databricks | (Temporarily Not Available) | | Google Cloud Storage (GCS) | [![destination-gcs](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-gcs) | | Google PubSub | [![destination-pubsub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pubsub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pubsub) | | Kafka | [![destination-kafka](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-kafka%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-kafka) | diff --git a/airbyte-integrations/connector-templates/generator/package-lock.json b/airbyte-integrations/connector-templates/generator/package-lock.json index d794def106f08..cc35540821818 100644 --- a/airbyte-integrations/connector-templates/generator/package-lock.json +++ b/airbyte-integrations/connector-templates/generator/package-lock.json @@ -307,6 +307,29 @@ "to-object-path": "^0.3.0", "union-value": "^1.0.0", "unset-value": "^1.0.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "dev": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + } + } } }, "camel-case": { @@ -1371,6 +1394,12 @@ "isobject": "^3.0.1" } }, + "is-primitive": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz", + "integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==", + "dev": true + }, "is-relative": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", @@ -2186,26 +2215,13 @@ } }, "set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz", + "integrity": "sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw==", "dev": true, "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } + "is-plain-object": "^2.0.4", + "is-primitive": "^3.0.1" } }, "signal-exit": { @@ -2552,6 +2568,29 @@ "get-value": "^2.0.6", "is-extendable": "^0.1.1", "set-value": "^2.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "dev": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + } + } } }, "unset-value": { diff --git a/airbyte-integrations/connector-templates/generator/package.json b/airbyte-integrations/connector-templates/generator/package.json index 1e66a0fb2e3ba..87b9a86512ae8 100644 --- a/airbyte-integrations/connector-templates/generator/package.json +++ b/airbyte-integrations/connector-templates/generator/package.json @@ -8,6 +8,7 @@ "devDependencies": { "handlebars": "^4.7.7", "plop": "^2.7.4", - "uuid": "^8.3.2" + "uuid": "^8.3.2", + "set-value": ">=4.0.1" } } diff --git a/airbyte-integrations/connectors/destination-databricks/.dockerignore b/airbyte-integrations/connectors/destination-databricks/.dockerignore new file mode 100644 index 0000000000000..65c7d0ad3e73c --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/.dockerignore @@ -0,0 +1,3 @@ +* +!Dockerfile +!build diff --git a/airbyte-integrations/connectors/destination-databricks/.gitignore b/airbyte-integrations/connectors/destination-databricks/.gitignore new file mode 100644 index 0000000000000..c04f34fae1720 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/.gitignore @@ -0,0 +1,6 @@ +# The driver is not checked into the source code due to legal reasons. +# You can download the driver here: +# https://databricks.com/spark/jdbc-drivers-download +# By downloading this driver, you agree to the terms & conditions: +# https://databricks.com/jdbc-odbc-driver-license +lib/SparkJDBC42.jar diff --git a/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md b/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md new file mode 100644 index 0000000000000..85942c0e31758 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/BOOTSTRAP.md @@ -0,0 +1,6 @@ +# Databricks Destination Connector Bootstrap + +The Databricks Connector enables a developer to sync data into a Databricks cluster. It does so in two steps: + +1. Persist source data in S3 staging files in the Parquet format. +2. Create delta table based on the Parquet staging files. diff --git a/airbyte-integrations/connectors/destination-databricks/Dockerfile b/airbyte-integrations/connectors/destination-databricks/Dockerfile new file mode 100644 index 0000000000000..4c3d7fc644ca0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/Dockerfile @@ -0,0 +1,11 @@ +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte +ENV APPLICATION destination-databricks + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-databricks diff --git a/airbyte-integrations/connectors/destination-databricks/README.md b/airbyte-integrations/connectors/destination-databricks/README.md new file mode 100644 index 0000000000000..5a9ab5bf1cb1e --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/README.md @@ -0,0 +1,82 @@ +# Destination Databricks + +This is the repository for the Databricks destination connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/databricks). + +## Databricks JDBC Driver +This connector requires a JDBC driver to connect to Databricks cluster. The driver is developed by Simba. Before downloading and using this driver, you must agree to the [JDBC ODBC driver license](https://databricks.com/jdbc-odbc-driver-license). This means that you can only use this driver to connector third party applications to Apache Spark SQL within a Databricks offering using the ODBC and/or JDBC protocols. The driver can be downloaded from [here](https://databricks.com/spark/jdbc-drivers-download). + +This is currently a private connector that is only available in Airbyte Cloud. To build and publish this connector, first download the driver and put it under the `lib` directory. Please do not publish this connector publicly. We are working on a solution to publicize it. + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-databricks:build +``` + +#### Create credentials +**If you are a community contributor**, you will need access to AWS S3 and Databricks cluster to run the integration tests: + +- Create a Databricks cluster. See [documentation](https://docs.databricks.com/clusters/create.html). +- Create an S3 bucket. See [documentation](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys). +- Grant the Databricks cluster full access to the S3 bucket. Or mount it as Databricks File System (DBFS). See [documentation](https://docs.databricks.com/data/data-sources/aws/amazon-s3.html). +- Place both Databricks and S3 credentials in `sample_secrets/config.json`, which conforms to the spec file in `src/main/resources/spec.json`. +- Rename the directory from `sample_secrets` to `secrets`. +- Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**: + +- Get the `destination databricks creds` secrets on Last Pass, and put it in `sample_secrets/config.json`. +- Rename the directory from `sample_secrets` to `secrets`. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-databricks:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-databricks:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-databricks:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-databricks:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-databricks:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/io/airbyte/integrations/destinations/databricks`. + +#### Acceptance Tests +Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/destinations/databricksDestinationAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-databricks:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-databricks:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-databricks/build.gradle b/airbyte-integrations/connectors/destination-databricks/build.gradle new file mode 100644 index 0000000000000..24f6b9a9f062c --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/build.gradle @@ -0,0 +1,31 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.destination.databricks.DatabricksDestination' +} + +dependencies { + implementation project(':airbyte-db:lib') + implementation project(':airbyte-config:models') + implementation project(':airbyte-protocol:models') + implementation project(':airbyte-integrations:bases:base-java') + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + implementation project(':airbyte-integrations:connectors:destination-jdbc') + implementation project(':airbyte-integrations:connectors:destination-s3') + // Spark JDBC is not checked into the repo for legal reason + implementation files("lib/SparkJDBC42.jar") + + // parquet + implementation group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.3.0' + implementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.3.0' + implementation group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.3.0' + implementation group: 'org.apache.parquet', name: 'parquet-avro', version: '1.12.0' + implementation group: 'tech.allegro.schema.json2avro', name: 'converter', version: '0.2.10' + + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') + integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-databricks') +} diff --git a/airbyte-integrations/connectors/destination-databricks/lib/.keep b/airbyte-integrations/connectors/destination-databricks/lib/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/destination-databricks/sample_secrets/config.json b/airbyte-integrations/connectors/destination-databricks/sample_secrets/config.json new file mode 100644 index 0000000000000..930b87950f138 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/sample_secrets/config.json @@ -0,0 +1,15 @@ +{ + "databricks_server_hostname": "required", + "databricks_http_path": "required", + "databricks_port": "443", + "databricks_personal_access_token": "required", + "database_schema": "public", + "data_source": { + "data_source_type": "S3", + "s3_bucket_name": "required", + "s3_bucket_path": "required", + "s3_bucket_region": "required", + "s3_access_key_id": "required", + "s3_secret_access_key": "required" + } +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java new file mode 100644 index 0000000000000..b0276391d997c --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java @@ -0,0 +1,38 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import java.util.Set; + +public class DatabricksConstants { + + public static final String DATABRICKS_USERNAME = "token"; + public static final String DATABRICKS_DRIVER_CLASS = "com.simba.spark.jdbc.Driver"; + + public static final Set DEFAULT_TBL_PROPERTIES = Set.of( + "delta.autoOptimize.optimizeWrite = true", + "delta.autoOptimize.autoCompact = true"); + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java new file mode 100644 index 0000000000000..8b118e5cd44f4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java @@ -0,0 +1,101 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.db.Databases; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; +import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.function.Consumer; + +public class DatabricksDestination extends CopyDestination { + + public DatabricksDestination() { + super("database_schema"); + } + + public static void main(String[] args) throws Exception { + new IntegrationRunner(new DatabricksDestination()).run(args); + } + + @Override + public AirbyteMessageConsumer getConsumer(JsonNode config, ConfiguredAirbyteCatalog catalog, Consumer outputRecordCollector) { + DatabricksDestinationConfig databricksConfig = DatabricksDestinationConfig.get(config); + return CopyConsumerFactory.create( + outputRecordCollector, + getDatabase(config), + getSqlOperations(), + getNameTransformer(), + databricksConfig, + catalog, + new DatabricksStreamCopierFactory(), + databricksConfig.getDatabaseSchema()); + } + + @Override + public void checkPersistence(JsonNode config) { + DatabricksDestinationConfig databricksConfig = DatabricksDestinationConfig.get(config); + S3StreamCopier.attemptS3WriteAndDelete(databricksConfig.getS3DestinationConfig().getS3Config()); + } + + @Override + public ExtendedNameTransformer getNameTransformer() { + return new DatabricksNameTransformer(); + } + + @Override + public JdbcDatabase getDatabase(JsonNode jsonConfig) { + return getDatabase(DatabricksDestinationConfig.get(jsonConfig)); + } + + @Override + public SqlOperations getSqlOperations() { + return new DatabricksSqlOperations(); + } + + static String getDatabricksConnectionString(DatabricksDestinationConfig databricksConfig) { + return String.format("jdbc:spark://%s:%s/default;transportMode=http;ssl=1;httpPath=%s;UserAgentEntry=Airbyte", + databricksConfig.getDatabricksServerHostname(), + databricksConfig.getDatabricksPort(), + databricksConfig.getDatabricksHttpPath()); + } + + static JdbcDatabase getDatabase(DatabricksDestinationConfig databricksConfig) { + return Databases.createJdbcDatabase( + DatabricksConstants.DATABRICKS_USERNAME, + databricksConfig.getDatabricksPersonalAccessToken(), + getDatabricksConnectionString(databricksConfig), + DatabricksConstants.DATABRICKS_DRIVER_CLASS); + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfig.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfig.java new file mode 100644 index 0000000000000..0c54c537cd826 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfig.java @@ -0,0 +1,119 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.parquet.S3ParquetFormatConfig; + +/** + * Currently only S3 is supported. So the data source config is always {@link S3DestinationConfig}. + */ +public class DatabricksDestinationConfig { + + static final String DEFAULT_DATABRICKS_PORT = "443"; + static final String DEFAULT_DATABASE_SCHEMA = "public"; + static final boolean DEFAULT_PURGE_STAGING_DATA = true; + + private final String databricksServerHostname; + private final String databricksHttpPath; + private final String databricksPort; + private final String databricksPersonalAccessToken; + private final String databaseSchema; + private final boolean purgeStagingData; + private final S3DestinationConfig s3DestinationConfig; + + public DatabricksDestinationConfig(String databricksServerHostname, + String databricksHttpPath, + String databricksPort, + String databricksPersonalAccessToken, + String databaseSchema, + boolean purgeStagingData, + S3DestinationConfig s3DestinationConfig) { + this.databricksServerHostname = databricksServerHostname; + this.databricksHttpPath = databricksHttpPath; + this.databricksPort = databricksPort; + this.databricksPersonalAccessToken = databricksPersonalAccessToken; + this.databaseSchema = databaseSchema; + this.purgeStagingData = purgeStagingData; + this.s3DestinationConfig = s3DestinationConfig; + } + + public static DatabricksDestinationConfig get(JsonNode config) { + return new DatabricksDestinationConfig( + config.get("databricks_server_hostname").asText(), + config.get("databricks_http_path").asText(), + config.has("databricks_port") ? config.get("databricks_port").asText() : DEFAULT_DATABRICKS_PORT, + config.get("databricks_personal_access_token").asText(), + config.has("database_schema") ? config.get("database_schema").asText() : DEFAULT_DATABASE_SCHEMA, + config.has("purge_staging_data") ? config.get("purge_staging_data").asBoolean() : DEFAULT_PURGE_STAGING_DATA, + getDataSource(config.get("data_source"))); + } + + public static S3DestinationConfig getDataSource(JsonNode dataSource) { + return new S3DestinationConfig( + "", + dataSource.get("s3_bucket_name").asText(), + dataSource.get("s3_bucket_path").asText(), + dataSource.get("s3_bucket_region").asText(), + dataSource.get("s3_access_key_id").asText(), + dataSource.get("s3_secret_access_key").asText(), + getDefaultParquetConfig()); + } + + public String getDatabricksServerHostname() { + return databricksServerHostname; + } + + private static S3ParquetFormatConfig getDefaultParquetConfig() { + return new S3ParquetFormatConfig(new ObjectMapper().createObjectNode()); + } + + public String getDatabricksHttpPath() { + return databricksHttpPath; + } + + public String getDatabricksPort() { + return databricksPort; + } + + public String getDatabricksPersonalAccessToken() { + return databricksPersonalAccessToken; + } + + public String getDatabaseSchema() { + return databaseSchema; + } + + public boolean isPurgeStagingData() { + return purgeStagingData; + } + + public S3DestinationConfig getS3DestinationConfig() { + return s3DestinationConfig; + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java new file mode 100644 index 0000000000000..c0e81f5f2f0b3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java @@ -0,0 +1,56 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import io.airbyte.integrations.destination.ExtendedNameTransformer; + +public class DatabricksNameTransformer extends ExtendedNameTransformer { + + @Override + public String convertStreamName(String input) { + return applyDefaultCase(super.convertStreamName(input)); + } + + @Override + public String getIdentifier(String name) { + return applyDefaultCase(super.getIdentifier(name)); + } + + @Override + public String getTmpTableName(String streamName) { + return applyDefaultCase(super.getTmpTableName(streamName)); + } + + @Override + public String getRawTableName(String streamName) { + return applyDefaultCase(super.getRawTableName(streamName)); + } + + @Override + protected String applyDefaultCase(String input) { + return input.toLowerCase(); + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksSqlOperations.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksSqlOperations.java new file mode 100644 index 0000000000000..a1d2654627f01 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksSqlOperations.java @@ -0,0 +1,70 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.jdbc.JdbcSqlOperations; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import java.util.List; + +public class DatabricksSqlOperations extends JdbcSqlOperations { + + @Override + public void executeTransaction(JdbcDatabase database, List queries) throws Exception { + for (String query : queries) { + database.execute(query); + } + } + + /** + * Spark SQL does not support many of the data definition keywords and types as in Postgres. + * Reference: https://spark.apache.org/docs/latest/sql-ref-datatypes.html + */ + @Override + public String createTableQuery(JdbcDatabase database, String schemaName, String tableName) { + return String.format( + "CREATE TABLE IF NOT EXISTS %s.%s (%s STRING, %s STRING, %s TIMESTAMP);", + schemaName, tableName, + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_DATA, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + } + + @Override + public void createSchemaIfNotExists(JdbcDatabase database, String schemaName) throws Exception { + database.execute(String.format("create database if not exists %s;", schemaName)); + } + + @Override + public void insertRecordsInternal(JdbcDatabase database, + List records, + String schemaName, + String tmpTableName) { + // Do nothing. The records are copied into the table directly from the staging parquet file. + // So no manual insertion is needed. + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java new file mode 100644 index 0000000000000..9844d684711cd --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java @@ -0,0 +1,221 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import com.amazonaws.services.s3.AmazonS3; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.parquet.S3ParquetFormatConfig; +import io.airbyte.integrations.destination.s3.parquet.S3ParquetWriter; +import io.airbyte.integrations.destination.s3.writer.S3WriterFactory; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.sql.Timestamp; +import java.util.UUID; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This implementation is similar to + * {@link io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier}. The difference is that + * this implementation creates Parquet staging files, instead of CSV ones. + *

+ * It does the following operations: + *

  • 1. Parquet writer writes data stream into staging parquet file in + * s3:////.
  • + *
  • 2. Create a tmp delta table based on the staging parquet file.
  • + *
  • 3. Create the destination delta table based on the tmp delta table schema in + * s3:///.
  • + *
  • 4. Copy the staging parquet file into the destination delta table.
  • + *
  • 5. Delete the tmp delta table, and the staging parquet file.
  • + */ +public class DatabricksStreamCopier implements StreamCopier { + + private static final Logger LOGGER = LoggerFactory.getLogger(DatabricksStreamCopier.class); + private static final ObjectMapper MAPPER = new ObjectMapper(); + + private final String schemaName; + private final String streamName; + private final DestinationSyncMode destinationSyncMode; + private final AmazonS3 s3Client; + private final S3DestinationConfig s3Config; + private final boolean purgeStagingData; + private final JdbcDatabase database; + private final DatabricksSqlOperations sqlOperations; + + private final String tmpTableName; + private final String destTableName; + private final S3ParquetWriter parquetWriter; + private final String tmpTableLocation; + private final String destTableLocation; + + public DatabricksStreamCopier(String stagingFolder, + String schema, + ConfiguredAirbyteStream configuredStream, + AmazonS3 s3Client, + JdbcDatabase database, + DatabricksDestinationConfig databricksConfig, + ExtendedNameTransformer nameTransformer, + SqlOperations sqlOperations, + S3WriterFactory writerFactory, + Timestamp uploadTime) + throws Exception { + this.schemaName = schema; + this.streamName = configuredStream.getStream().getName(); + this.destinationSyncMode = configuredStream.getDestinationSyncMode(); + this.s3Client = s3Client; + this.s3Config = databricksConfig.getS3DestinationConfig(); + this.purgeStagingData = databricksConfig.isPurgeStagingData(); + this.database = database; + this.sqlOperations = (DatabricksSqlOperations) sqlOperations; + + this.tmpTableName = nameTransformer.getTmpTableName(streamName); + this.destTableName = nameTransformer.getIdentifier(streamName); + + S3DestinationConfig stagingS3Config = getStagingS3DestinationConfig(s3Config, stagingFolder); + this.parquetWriter = (S3ParquetWriter) writerFactory.create(stagingS3Config, s3Client, configuredStream, uploadTime); + + this.tmpTableLocation = String.format("s3://%s/%s", + s3Config.getBucketName(), parquetWriter.getOutputPrefix()); + this.destTableLocation = String.format("s3://%s/%s/%s/%s", + s3Config.getBucketName(), s3Config.getBucketPath(), databricksConfig.getDatabaseSchema(), streamName); + + LOGGER.info("[Stream {}] Database schema: {}", streamName, schemaName); + LOGGER.info("[Stream {}] Parquet schema: {}", streamName, parquetWriter.getParquetSchema()); + LOGGER.info("[Stream {}] Tmp table {} location: {}", streamName, tmpTableName, tmpTableLocation); + LOGGER.info("[Stream {}] Data table {} location: {}", streamName, destTableName, destTableLocation); + + parquetWriter.initialize(); + } + + @Override + public void write(UUID id, AirbyteRecordMessage recordMessage) throws Exception { + parquetWriter.write(id, recordMessage); + } + + @Override + public void closeStagingUploader(boolean hasFailed) throws Exception { + parquetWriter.close(hasFailed); + } + + @Override + public void createDestinationSchema() throws Exception { + LOGGER.info("[Stream {}] Creating database schema if it does not exist: {}", streamName, schemaName); + sqlOperations.createSchemaIfNotExists(database, schemaName); + } + + @Override + public void createTemporaryTable() throws Exception { + LOGGER.info("[Stream {}] Creating tmp table {} from staging file: {}", streamName, tmpTableName, tmpTableLocation); + + sqlOperations.dropTableIfExists(database, schemaName, tmpTableName); + String createTmpTable = String.format("CREATE TABLE %s.%s USING parquet LOCATION '%s';", schemaName, tmpTableName, tmpTableLocation); + LOGGER.info(createTmpTable); + database.execute(createTmpTable); + } + + @Override + public void copyStagingFileToTemporaryTable() { + // The tmp table is created directly based on the staging file. So no separate copying step is + // needed. + } + + @Override + public String createDestinationTable() throws Exception { + LOGGER.info("[Stream {}] Creating destination table if it does not exist: {}", streamName, destTableName); + + String createStatement = destinationSyncMode == DestinationSyncMode.OVERWRITE + // "create or replace" is the recommended way to replace existing table + ? "CREATE OR REPLACE TABLE" + : "CREATE TABLE IF NOT EXISTS"; + + String createTable = String.format( + "%s %s.%s " + + "USING delta " + + "LOCATION '%s' " + + "COMMENT 'Created from stream %s' " + + "TBLPROPERTIES ('airbyte.destinationSyncMode' = '%s', %s) " + + // create the table based on the schema of the tmp table + "AS SELECT * FROM %s.%s LIMIT 0", + createStatement, + schemaName, destTableName, + destTableLocation, + streamName, + destinationSyncMode.value(), + String.join(", ", DatabricksConstants.DEFAULT_TBL_PROPERTIES), + schemaName, tmpTableName); + LOGGER.info(createTable); + database.execute(createTable); + + return destTableName; + } + + @Override + public String generateMergeStatement(String destTableName) { + String copyData = String.format( + "COPY INTO %s.%s " + + "FROM '%s' " + + "FILEFORMAT = PARQUET " + + "PATTERN = '%s'", + schemaName, destTableName, + tmpTableLocation, + parquetWriter.getOutputFilename()); + LOGGER.info(copyData); + return copyData; + } + + @Override + public void removeFileAndDropTmpTable() throws Exception { + if (purgeStagingData) { + LOGGER.info("[Stream {}] Deleting tmp table: {}", streamName, tmpTableName); + sqlOperations.dropTableIfExists(database, schemaName, tmpTableName); + + LOGGER.info("[Stream {}] Deleting staging file: {}", streamName, parquetWriter.getOutputFilePath()); + s3Client.deleteObject(s3Config.getBucketName(), parquetWriter.getOutputFilePath()); + } + } + + /** + * The staging data location is s3:////. This method + * creates an {@link S3DestinationConfig} whose bucket path is /. + */ + static S3DestinationConfig getStagingS3DestinationConfig(S3DestinationConfig config, String stagingFolder) { + return new S3DestinationConfig( + config.getEndpoint(), + config.getBucketName(), + String.join("/", config.getBucketPath(), stagingFolder), + config.getBucketRegion(), + config.getAccessKeyId(), + config.getSecretAccessKey(), + // use default parquet format config + new S3ParquetFormatConfig(MAPPER.createObjectNode())); + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierFactory.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierFactory.java new file mode 100644 index 0000000000000..f1285f20e9dfc --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierFactory.java @@ -0,0 +1,64 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import com.amazonaws.services.s3.AmazonS3; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory; +import io.airbyte.integrations.destination.s3.writer.ProductionWriterFactory; +import io.airbyte.integrations.destination.s3.writer.S3WriterFactory; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.sql.Timestamp; + +public class DatabricksStreamCopierFactory implements StreamCopierFactory { + + @Override + public StreamCopier create(String configuredSchema, + DatabricksDestinationConfig databricksConfig, + String stagingFolder, + ConfiguredAirbyteStream configuredStream, + ExtendedNameTransformer nameTransformer, + JdbcDatabase database, + SqlOperations sqlOperations) { + try { + AirbyteStream stream = configuredStream.getStream(); + String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); + AmazonS3 s3Client = databricksConfig.getS3DestinationConfig().getS3Client(); + S3WriterFactory writerFactory = new ProductionWriterFactory(); + Timestamp uploadTimestamp = new Timestamp(System.currentTimeMillis()); + + return new DatabricksStreamCopier(stagingFolder, schema, configuredStream, s3Client, database, + databricksConfig, nameTransformer, sqlOperations, writerFactory, uploadTimestamp); + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json new file mode 100644 index 0000000000000..f1dbb454d9f81 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json @@ -0,0 +1,145 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/destinations/databricks", + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": ["overwrite", "append"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Databricks Destination Spec", + "type": "object", + "required": [ + "databricks_server_hostname", + "databricks_http_path", + "databricks_personal_access_token", + "data_source" + ], + "additionalProperties": false, + "properties": { + "databricks_server_hostname": { + "title": "Databricks Cluster Server Hostname", + "type": "string", + "description": "", + "examples": ["abc-12345678-wxyz.cloud.databricks.com"] + }, + "databricks_http_path": { + "title": "Databricks Cluster HTTP Path", + "type": "string", + "description": "", + "examples": ["sql/protocolvx/o/1234567489/0000-1111111-abcd90"] + }, + "databricks_port": { + "title": "Databricks Cluster Port", + "type": "string", + "description": "", + "default": "443", + "examples": ["443"] + }, + "databricks_personal_access_token": { + "title": "Databricks Personal Access Token", + "type": "string", + "description": "", + "examples": ["dapi0123456789abcdefghij0123456789AB"], + "airbyte_secret": true + }, + "database_schema": { + "title": "Database Schema", + "type": "string", + "description": "The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is \"public\".", + "default": "public", + "examples": ["public"] + }, + "data_source": { + "title": "Data Source", + "type": "object", + "description": "Storage on which the delta lake is built", + "oneOf": [ + { + "title": "Amazon S3", + "required": [ + "data_source_type", + "s3_bucket_name", + "s3_bucket_path", + "s3_bucket_region", + "s3_access_key_id", + "s3_secret_access_key" + ], + "properties": { + "data_source_type": { + "type": "string", + "enum": ["S3"], + "default": "S3" + }, + "s3_bucket_name": { + "title": "S3 Bucket Name", + "type": "string", + "description": "The name of the S3 bucket to use for intermittent staging of the data.", + "examples": ["airbyte.staging"] + }, + "s3_bucket_path": { + "Title": "S3 Bucket Path", + "type": "string", + "description": "The directory under the S3 bucket where data will be written.", + "examples": ["data_sync/test"] + }, + "s3_bucket_region": { + "title": "S3 Bucket Region", + "type": "string", + "default": "", + "description": "The region of the S3 staging bucket to use if utilising a copy strategy.", + "enum": [ + "", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "af-south-1", + "ap-east-1", + "ap-south-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "cn-north-1", + "cn-northwest-1", + "eu-central-1", + "eu-north-1", + "eu-south-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "sa-east-1", + "me-south-1", + "us-gov-east-1", + "us-gov-west-1" + ] + }, + "s3_access_key_id": { + "type": "string", + "description": "The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.", + "title": "S3 Key Id", + "examples": ["A012345678910EXAMPLE"], + "airbyte_secret": true + }, + "s3_secret_access_key": { + "title": "S3 Access Key", + "type": "string", + "description": "The corresponding secret to the above access key id.", + "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"], + "airbyte_secret": true + } + } + } + ] + }, + "purge_staging_data": { + "title": "Purge Staging Files and Tables", + "type": "boolean", + "description": "Default to 'true'. Switch it to 'false' for debugging purpose.", + "default": true + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java new file mode 100644 index 0000000000000..acfc74bfd7b7b --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java @@ -0,0 +1,170 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import static org.jooq.impl.DSL.asterisk; +import static org.jooq.impl.DSL.field; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; +import com.amazonaws.services.s3.model.DeleteObjectsResult; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.Database; +import io.airbyte.db.Databases; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; +import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; +import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.JSONFormat; +import org.jooq.JSONFormat.RecordFormat; +import org.jooq.SQLDialect; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DatabricksDestinationAcceptanceTest extends DestinationAcceptanceTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(DatabricksDestinationAcceptanceTest.class); + private static final String SECRETS_CONFIG_JSON = "secrets/config.json"; + private static final JSONFormat JSON_FORMAT = new JSONFormat().recordFormat(RecordFormat.OBJECT); + + private final ExtendedNameTransformer nameTransformer = new DatabricksNameTransformer(); + private JsonNode configJson; + private DatabricksDestinationConfig databricksConfig; + private S3DestinationConfig s3Config; + private AmazonS3 s3Client; + + @Override + protected String getImageName() { + return "airbyte/destination-databricks:dev"; + } + + @Override + protected JsonNode getConfig() { + return configJson; + } + + @Override + protected JsonNode getFailCheckConfig() { + JsonNode failCheckJson = Jsons.clone(configJson); + // set invalid credential + ((ObjectNode) failCheckJson.get("data_source")) + .put("s3_access_key_id", "fake-key") + .put("s3_secret_access_key", "fake-secret"); + return failCheckJson; + } + + @Override + protected List retrieveRecords(TestDestinationEnv testEnv, + String streamName, + String namespace, + JsonNode streamSchema) + throws SQLException { + String tableName = nameTransformer.getIdentifier(streamName); + String schemaName = StreamCopierFactory.getSchema(namespace, databricksConfig.getDatabaseSchema(), nameTransformer); + JsonFieldNameUpdater nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema); + + Database database = getDatabase(databricksConfig); + return database.query(ctx -> ctx.select(asterisk()) + .from(String.format("%s.%s", schemaName, tableName)) + .orderBy(field(JavaBaseConstants.COLUMN_NAME_EMITTED_AT).asc()) + .fetch().stream() + .map(record -> { + JsonNode json = Jsons.deserialize(record.formatJSON(JSON_FORMAT)); + JsonNode jsonWithOriginalFields = nameUpdater.getJsonWithOriginalFieldNames(json); + return AvroRecordHelper.pruneAirbyteJson(jsonWithOriginalFields); + }) + .collect(Collectors.toList())); + } + + @Override + protected void setup(TestDestinationEnv testEnv) { + JsonNode baseConfigJson = Jsons.deserialize(IOs.readFile(Path.of(SECRETS_CONFIG_JSON))); + + // Set a random s3 bucket path and database schema for each integration test + String randomString = RandomStringUtils.randomAlphanumeric(5); + JsonNode configJson = Jsons.clone(baseConfigJson); + ((ObjectNode) configJson).put("database_schema", "integration_test_" + randomString); + JsonNode dataSource = configJson.get("data_source"); + ((ObjectNode) dataSource).put("s3_bucket_path", "test_" + randomString); + + this.configJson = configJson; + this.databricksConfig = DatabricksDestinationConfig.get(configJson); + this.s3Config = databricksConfig.getS3DestinationConfig(); + LOGGER.info("Test full path: s3://{}/{}", s3Config.getBucketName(), s3Config.getBucketPath()); + + this.s3Client = s3Config.getS3Client(); + } + + @Override + protected void tearDown(TestDestinationEnv testEnv) throws SQLException { + // clean up s3 + List keysToDelete = new LinkedList<>(); + List objects = s3Client + .listObjects(s3Config.getBucketName(), s3Config.getBucketPath()) + .getObjectSummaries(); + for (S3ObjectSummary object : objects) { + keysToDelete.add(new KeyVersion(object.getKey())); + } + + if (keysToDelete.size() > 0) { + LOGGER.info("Tearing down test bucket path: {}/{}", s3Config.getBucketName(), + s3Config.getBucketPath()); + DeleteObjectsResult result = s3Client + .deleteObjects(new DeleteObjectsRequest(s3Config.getBucketName()).withKeys(keysToDelete)); + LOGGER.info("Deleted {} file(s).", result.getDeletedObjects().size()); + } + + // clean up database + LOGGER.info("Dropping database schema {}", databricksConfig.getDatabaseSchema()); + Database database = getDatabase(databricksConfig); + // we cannot use jooq dropSchemaIfExists method here because there is no proper dialect for + // Databricks, and it incorrectly quotes the schema name + database.query(ctx -> ctx.execute(String.format("DROP SCHEMA IF EXISTS %s CASCADE;", databricksConfig.getDatabaseSchema()))); + } + + private static Database getDatabase(DatabricksDestinationConfig databricksConfig) { + return Databases.createDatabase( + DatabricksConstants.DATABRICKS_USERNAME, + databricksConfig.getDatabricksPersonalAccessToken(), + DatabricksDestination.getDatabricksConnectionString(databricksConfig), + DatabricksConstants.DATABRICKS_DRIVER_CLASS, + SQLDialect.DEFAULT); + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfigTest.java b/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfigTest.java new file mode 100644 index 0000000000000..7bf3f05e253bd --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationConfigTest.java @@ -0,0 +1,63 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import org.junit.jupiter.api.Test; + +class DatabricksDestinationConfigTest { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Test + public void testConfigCreationFromJson() { + ObjectNode dataSourceConfig = OBJECT_MAPPER.createObjectNode() + .put("data_source_type", "S3") + .put("s3_bucket_name", "bucket_name") + .put("s3_bucket_path", "bucket_path") + .put("s3_bucket_region", "bucket_region") + .put("s3_access_key_id", "access_key_id") + .put("s3_secret_access_key", "secret_access_key"); + + ObjectNode databricksConfig = OBJECT_MAPPER.createObjectNode() + .put("databricks_server_hostname", "server_hostname") + .put("databricks_http_path", "http_path") + .put("databricks_personal_access_token", "pak") + .set("data_source", dataSourceConfig); + + DatabricksDestinationConfig config1 = DatabricksDestinationConfig.get(databricksConfig); + assertEquals(DatabricksDestinationConfig.DEFAULT_DATABRICKS_PORT, config1.getDatabricksPort()); + assertEquals(DatabricksDestinationConfig.DEFAULT_DATABASE_SCHEMA, config1.getDatabaseSchema()); + + databricksConfig.put("databricks_port", "1000").put("database_schema", "testing_schema"); + DatabricksDestinationConfig config2 = DatabricksDestinationConfig.get(databricksConfig); + assertEquals("1000", config2.getDatabricksPort()); + assertEquals("testing_schema", config2.getDatabaseSchema()); + } + +} diff --git a/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierTest.java b/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierTest.java new file mode 100644 index 0000000000000..ffbfc398f6edb --- /dev/null +++ b/airbyte-integrations/connectors/destination-databricks/src/test/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopierTest.java @@ -0,0 +1,44 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.integrations.destination.databricks; + +import static org.junit.jupiter.api.Assertions.*; + +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import java.util.UUID; +import org.junit.jupiter.api.Test; + +class DatabricksStreamCopierTest { + + @Test + public void testGetStagingS3DestinationConfig() { + String bucketPath = UUID.randomUUID().toString(); + S3DestinationConfig config = new S3DestinationConfig("", "", bucketPath, "", "", "", null); + String stagingFolder = UUID.randomUUID().toString(); + S3DestinationConfig stagingConfig = DatabricksStreamCopier.getStagingS3DestinationConfig(config, stagingFolder); + assertEquals(String.format("%s/%s", bucketPath, stagingFolder), stagingConfig.getBucketPath()); + } + +} diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java index 230cd278c91cd..fa11d93a79bbc 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsAvroDestinationAcceptanceTest.java @@ -31,6 +31,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; +import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import java.util.LinkedList; import java.util.List; import org.apache.avro.file.DataFileReader; diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java index f5ddccd8d44e8..ef93d70f26db5 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/GcsParquetDestinationAcceptanceTest.java @@ -32,6 +32,7 @@ import io.airbyte.integrations.destination.gcs.parquet.GcsParquetWriter; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; +import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 9dcb075575bb7..a11d341b9144d 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -24,7 +24,6 @@ package io.airbyte.integrations.destination.jdbc.copy; -import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; @@ -37,8 +36,6 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.sql.Timestamp; -import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -94,8 +91,7 @@ private static Map createWrite for (var configuredStream : catalog.getStreams()) { var stream = configuredStream.getStream(); var pair = AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream); - var syncMode = configuredStream.getDestinationSyncMode(); - var copier = streamCopierFactory.create(defaultSchema, config, stagingFolder, syncMode, stream, namingResolver, database, sqlOperations); + var copier = streamCopierFactory.create(defaultSchema, config, stagingFolder, configuredStream, namingResolver, database, sqlOperations); pairToCopier.put(pair, copier); } @@ -116,8 +112,7 @@ private static RecordWriter recordWriterFunction(Map { StreamCopier create(String configuredSchema, T config, String stagingFolder, - DestinationSyncMode syncMode, - AirbyteStream stream, + ConfiguredAirbyteStream configuredStream, ExtendedNameTransformer nameTransformer, JdbcDatabase db, SqlOperations sqlOperations); + static String getSchema(String namespace, String configuredSchema, ExtendedNameTransformer nameTransformer) { + if (namespace != null) { + return nameTransformer.convertStreamName(namespace); + } else { + return nameTransformer.convertStreamName(configuredSchema); + } + } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java index c74ee13e83890..69fe6dddff781 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java @@ -30,10 +30,12 @@ import com.google.cloud.storage.BlobInfo; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; +import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.DestinationSyncMode; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -44,6 +46,7 @@ import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.sql.Timestamp; +import java.time.Instant; import java.util.UUID; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; @@ -103,8 +106,10 @@ public GcsStreamCopier(String stagingFolder, } @Override - public void write(UUID id, String jsonDataString, Timestamp emittedAt) throws Exception { - csvPrinter.printRecord(id, jsonDataString, emittedAt); + public void write(UUID id, AirbyteRecordMessage recordMessage) throws Exception { + csvPrinter.printRecord(id, + Jsons.serialize(recordMessage.getData()), + Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); } @Override diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java index 594eaf85285f1..358da1a644973 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java @@ -28,12 +28,12 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; import io.airbyte.db.jdbc.JdbcDatabase; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; import java.io.ByteArrayInputStream; import java.io.InputStream; @@ -47,14 +47,14 @@ public abstract class GcsStreamCopierFactory implements StreamCopierFactory { @@ -43,17 +43,17 @@ public abstract class S3StreamCopierFactory implements StreamCopierFactory buildKafkaProducer(JsonNode config) { .put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.get("bootstrap_servers").asText()) .putAll(propertiesByProtocol(config)) .put(ProducerConfig.CLIENT_ID_CONFIG, - config.has("client_id") ? config.get("client_id").asText() : null) + config.has("client_id") ? config.get("client_id").asText() : "") .put(ProducerConfig.ACKS_CONFIG, config.get("acks").asText()) - .put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, config.get("enable_idempotence").booleanValue()) + .put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, config.get("enable_idempotence").asBoolean()) .put(ProducerConfig.COMPRESSION_TYPE_CONFIG, config.get("compression_type").asText()) - .put(ProducerConfig.BATCH_SIZE_CONFIG, config.get("batch_size").intValue()) - .put(ProducerConfig.LINGER_MS_CONFIG, config.get("linger_ms").longValue()) + .put(ProducerConfig.BATCH_SIZE_CONFIG, config.get("batch_size").asInt()) + .put(ProducerConfig.LINGER_MS_CONFIG, config.get("linger_ms").asLong()) .put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, - config.get("max_in_flight_requests_per_connection").intValue()) + config.get("max_in_flight_requests_per_connection").asInt()) .put(ProducerConfig.CLIENT_DNS_LOOKUP_CONFIG, config.get("client_dns_lookup").asText()) - .put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.get("buffer_memory").longValue()) - .put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, config.get("max_request_size").intValue()) - .put(ProducerConfig.RETRIES_CONFIG, config.get("retries").intValue()) + .put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.get("buffer_memory").asLong()) + .put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, config.get("max_request_size").asInt()) + .put(ProducerConfig.RETRIES_CONFIG, config.get("retries").asInt()) .put(ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, - config.get("socket_connection_setup_timeout_ms").longValue()) + config.get("socket_connection_setup_timeout_ms").asLong()) .put(ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, - config.get("socket_connection_setup_timeout_max_ms").longValue()) - .put(ProducerConfig.MAX_BLOCK_MS_CONFIG, config.get("max_block_ms").longValue()) - .put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, config.get("request_timeout_ms").intValue()) - .put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.get("delivery_timeout_ms").intValue()) - .put(ProducerConfig.SEND_BUFFER_CONFIG, config.get("send_buffer_bytes").intValue()) - .put(ProducerConfig.RECEIVE_BUFFER_CONFIG, config.get("receive_buffer_bytes").intValue()) + config.get("socket_connection_setup_timeout_max_ms").asLong()) + .put(ProducerConfig.MAX_BLOCK_MS_CONFIG, config.get("max_block_ms").asInt()) + .put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, config.get("request_timeout_ms").asInt()) + .put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.get("delivery_timeout_ms").asInt()) + .put(ProducerConfig.SEND_BUFFER_CONFIG, config.get("send_buffer_bytes").asInt()) + .put(ProducerConfig.RECEIVE_BUFFER_CONFIG, config.get("receive_buffer_bytes").asInt()) .put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()) .put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName()) .build(); diff --git a/airbyte-integrations/connectors/destination-kafka/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-kafka/src/main/resources/spec.json index 0ffa9eefbaec2..2eb35a5b22cd9 100644 --- a/airbyte-integrations/connectors/destination-kafka/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-kafka/src/main/resources/spec.json @@ -162,19 +162,19 @@ "title": "Batch size", "description": "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent to the same partition.", "type": "integer", - "default": 16384 + "examples": [16384] }, "linger_ms": { "title": "Linger ms", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request.", - "type": "number", - "default": 0 + "type": "string", + "examples": [0] }, "max_in_flight_requests_per_connection": { "title": "Max in flight requests per connection", "description": "The maximum number of unacknowledged requests the client will send on a single connection before blocking.", "type": "integer", - "default": 5 + "examples": [5] }, "client_dns_lookup": { "title": "Client DNS lookup", @@ -191,62 +191,62 @@ "buffer_memory": { "title": "Buffer memory", "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server.", - "type": "number", - "default": 33554432 + "type": "string", + "examples": 33554432 }, "max_request_size": { "title": "Max request size", "description": "The maximum size of a request in bytes.", "type": "integer", - "default": 1048576 + "examples": [1048576] }, "retries": { "title": "Retries", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error.", "type": "integer", - "default": 2147483647 + "examples": [2147483647] }, "socket_connection_setup_timeout_ms": { "title": "Socket connection setup timeout", "description": "The amount of time the client will wait for the socket connection to be established.", - "type": "number", - "default": 10000 + "type": "string", + "examples": [10000] }, "socket_connection_setup_timeout_max_ms": { "title": "Socket connection setup max timeout", "description": "The maximum amount of time the client will wait for the socket connection to be established. The connection setup timeout will increase exponentially for each consecutive connection failure up to this maximum.", - "type": "number", - "default": 30000 + "type": "string", + "examples": [30000] }, "max_block_ms": { "title": "Max block ms", "description": "The configuration controls how long the KafkaProducer's send(), partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction() and abortTransaction() methods will block.", - "type": "number", - "default": 60000 + "type": "string", + "examples": [60000] }, "request_timeout_ms": { "title": "Request timeout", "description": "The configuration controls the maximum amount of time the client will wait for the response of a request. If the response is not received before the timeout elapses the client will resend the request if necessary or fail the request if retries are exhausted.", "type": "integer", - "default": 30000 + "examples": [30000] }, "delivery_timeout_ms": { "title": "Delivery timeout", "description": "An upper bound on the time to report success or failure after a call to 'send()' returns.", "type": "integer", - "default": 120000 + "examples": [120000] }, "send_buffer_bytes": { "title": "Send buffer bytes", "description": "The size of the TCP send buffer (SO_SNDBUF) to use when sending data. If the value is -1, the OS default will be used.", "type": "integer", - "default": 131072 + "examples": [131072] }, "receive_buffer_bytes": { "title": "Receive buffer bytes", "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data. If the value is -1, the OS default will be used.", "type": "integer", - "default": 32768 + "examples": [32768] } } } diff --git a/airbyte-integrations/connectors/destination-kafka/src/test-integration/java/io/airbyte/integrations/destination/kafka/KafkaDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-kafka/src/test-integration/java/io/airbyte/integrations/destination/kafka/KafkaDestinationAcceptanceTest.java index 0de8df3703cc9..12c46825a9f48 100644 --- a/airbyte-integrations/connectors/destination-kafka/src/test-integration/java/io/airbyte/integrations/destination/kafka/KafkaDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-kafka/src/test-integration/java/io/airbyte/integrations/destination/kafka/KafkaDestinationAcceptanceTest.java @@ -75,15 +75,15 @@ protected JsonNode getConfig() { .put("enable_idempotence", true) .put("compression_type", "none") .put("batch_size", 16384) - .put("linger_ms", 0) + .put("linger_ms", "0") .put("max_in_flight_requests_per_connection", 5) .put("client_dns_lookup", "use_all_dns_ips") - .put("buffer_memory", 33554432) + .put("buffer_memory", "33554432") .put("max_request_size", 1048576) .put("retries", 2147483647) - .put("socket_connection_setup_timeout_ms", 10000) - .put("socket_connection_setup_timeout_max_ms", 30000) - .put("max_block_ms", 60000) + .put("socket_connection_setup_timeout_ms", "10000") + .put("socket_connection_setup_timeout_max_ms", "30000") + .put("max_block_ms", "60000") .put("request_timeout_ms", 30000) .put("delivery_timeout_ms", 120000) .put("send_buffer_bytes", -1) diff --git a/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java b/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java index 1898d21ab9f7c..22867e782ba2e 100644 --- a/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java @@ -122,16 +122,16 @@ private JsonNode getConfig(String topicPattern) { .put("transactional_id", "txn-id") .put("enable_idempotence", true) .put("compression_type", "none") - .put("batch_size", 16384) - .put("linger_ms", 0) - .put("max_in_flight_requests_per_connection", 5) + .put("batch_size", "16384") + .put("linger_ms", "0") + .put("max_in_flight_requests_per_connection", "5") .put("client_dns_lookup", "use_all_dns_ips") .put("buffer_memory", 33554432) .put("max_request_size", 1048576) .put("retries", 1) - .put("socket_connection_setup_timeout_ms", 10) - .put("socket_connection_setup_timeout_max_ms", 30) - .put("max_block_ms", 100) + .put("socket_connection_setup_timeout_ms", "10") + .put("socket_connection_setup_timeout_max_ms", "30") + .put("max_block_ms", "100") .put("request_timeout_ms", 100) .put("delivery_timeout_ms", 120) .put("send_buffer_bytes", -1) diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java index 3b1146ec42da6..3c33a93dc4cfd 100644 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java +++ b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java @@ -75,9 +75,9 @@ public KeenTimestampService(ConfiguredAirbyteCatalog catalog, boolean timestampI /** * Tries to inject keen.timestamp field to the given message data. If the stream contains cursor * field, it's value is tried to be parsed to timestamp. If this procedure fails, stream is removed - * from timestamp-parsable stream map, so parsing is not tried for future messages in the same stream. - * If parsing succeeds, keen.timestamp field is put as a JSON node to the message data and whole data - * is returned. Otherwise, keen.timestamp is set to emittedAt value + * from timestamp-parsable stream map, so parsing is not tried for future messages in the same + * stream. If parsing succeeds, keen.timestamp field is put as a JSON node to the message data and + * whole data is returned. Otherwise, keen.timestamp is set to emittedAt value * * @param message AirbyteRecordMessage containing record data * @return Record data together with keen.timestamp field diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Consumer.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Consumer.java index 5fedf2406c5bd..a552aa4cd1362 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Consumer.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Consumer.java @@ -24,13 +24,7 @@ package io.airbyte.integrations.destination.s3; -import com.amazonaws.ClientConfiguration; -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; @@ -71,31 +65,7 @@ public S3Consumer(S3DestinationConfig s3DestinationConfig, @Override protected void startTracked() throws Exception { - - var endpoint = s3DestinationConfig.getEndpoint(); - - AWSCredentials awsCreds = new BasicAWSCredentials(s3DestinationConfig.getAccessKeyId(), s3DestinationConfig.getSecretAccessKey()); - AmazonS3 s3Client = null; - - if (endpoint.isEmpty()) { - s3Client = AmazonS3ClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) - .withRegion(s3DestinationConfig.getBucketRegion()) - .build(); - - } else { - ClientConfiguration clientConfiguration = new ClientConfiguration(); - clientConfiguration.setSignerOverride("AWSS3V4SignerType"); - - s3Client = AmazonS3ClientBuilder - .standard() - .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, s3DestinationConfig.getBucketRegion())) - .withPathStyleAccessEnabled(true) - .withClientConfiguration(clientConfiguration) - .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) - .build(); - } - + AmazonS3 s3Client = s3DestinationConfig.getS3Client(); Timestamp uploadTimestamp = new Timestamp(System.currentTimeMillis()); for (ConfiguredAirbyteStream configuredStream : configuredCatalog.getStreams()) { diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java index 5f055b3e6a3d4..26952552b4d40 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java @@ -24,8 +24,21 @@ package io.airbyte.integrations.destination.s3; +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3Config; +/** + * This class is similar to {@link io.airbyte.integrations.destination.jdbc.copy.s3.S3Config}. It + * has an extra {@code bucketPath} parameter, which is necessary for more delicate data syncing to + * S3. + */ public class S3DestinationConfig { private final String endpoint; @@ -92,4 +105,34 @@ public S3FormatConfig getFormatConfig() { return formatConfig; } + public AmazonS3 getS3Client() { + final AWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretAccessKey); + + if (endpoint == null || endpoint.isEmpty()) { + return AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) + .withRegion(bucketRegion) + .build(); + } + + ClientConfiguration clientConfiguration = new ClientConfiguration(); + clientConfiguration.setSignerOverride("AWSS3V4SignerType"); + + return AmazonS3ClientBuilder + .standard() + .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, bucketRegion)) + .withPathStyleAccessEnabled(true) + .withClientConfiguration(clientConfiguration) + .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) + .build(); + } + + /** + * @return {@link S3Config} for convenience. The part size should not matter in any use case that + * gets an {@link S3Config} from this class. So the default 10 MB is used. + */ + public S3Config getS3Config() { + return new S3Config(endpoint, bucketName, accessKeyId, secretAccessKey, bucketRegion, 10); + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java index 0eb575671031a..317f434caa306 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java @@ -66,7 +66,7 @@ public S3AvroWriter(S3DestinationConfig config, String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.AVRO); String objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); this.avroRecordFactory = new AvroRecordFactory(schema, nameUpdater); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java index de0c53ed922bc..5a477b92c0549 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java @@ -68,7 +68,7 @@ public S3CsvWriter(S3DestinationConfig config, String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.CSV); String objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); this.uploadManager = S3StreamTransferManagerHelper.getDefault(config.getBucketName(), objectKey, s3Client); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java index 6c28d0a590c2d..fa3c4d4cbfbeb 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java @@ -67,7 +67,7 @@ public S3JsonlWriter(S3DestinationConfig config, String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.JSONL); String objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); this.uploadManager = S3StreamTransferManagerHelper.getDefault(config.getBucketName(), objectKey, s3Client); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java index 806852411c920..0f99d5df1dddb 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java @@ -56,6 +56,8 @@ public class S3ParquetWriter extends BaseS3Writer implements S3Writer { private final ParquetWriter parquetWriter; private final AvroRecordFactory avroRecordFactory; + private final Schema parquetSchema; + private final String outputFilename; public S3ParquetWriter(S3DestinationConfig config, AmazonS3 s3Client, @@ -66,10 +68,10 @@ public S3ParquetWriter(S3DestinationConfig config, throws URISyntaxException, IOException { super(config, s3Client, configuredStream); - String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.PARQUET); + this.outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.PARQUET); String objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); URI uri = new URI( @@ -88,6 +90,7 @@ public S3ParquetWriter(S3DestinationConfig config, .withDictionaryEncoding(formatConfig.isDictionaryEncoding()) .build(); this.avroRecordFactory = new AvroRecordFactory(schema, nameUpdater); + this.parquetSchema = schema; } public static Configuration getHadoopConfig(S3DestinationConfig config) { @@ -105,6 +108,21 @@ public static Configuration getHadoopConfig(S3DestinationConfig config) { return hadoopConfig; } + public Schema getParquetSchema() { + return parquetSchema; + } + + /** + * The file path includes prefix and filename, but does not include the bucket name. + */ + public String getOutputFilePath() { + return outputPrefix + "/" + outputFilename; + } + + public String getOutputFilename() { + return outputFilename; + } + @Override public void write(UUID id, AirbyteRecordMessage recordMessage) throws IOException { parquetWriter.write(avroRecordFactory.getAvroRecord(id, recordMessage)); diff --git a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/AvroRecordHelper.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/util/AvroRecordHelper.java similarity index 94% rename from airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/AvroRecordHelper.java rename to airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/util/AvroRecordHelper.java index db7cf31e1d7fe..839fcce27dc3a 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/test-integration/java/io/airbyte/integrations/destination/gcs/AvroRecordHelper.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/util/AvroRecordHelper.java @@ -22,7 +22,7 @@ * SOFTWARE. */ -package io.airbyte.integrations.destination.gcs; +package io.airbyte.integrations.destination.s3.util; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -31,6 +31,9 @@ import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; import io.airbyte.integrations.destination.s3.avro.JsonToAvroSchemaConverter; +/** + * Helper methods for unit tests. This is needed by multiple modules, so it is in the src directory. + */ public class AvroRecordHelper { public static JsonFieldNameUpdater getFieldNameUpdater(String streamName, String namespace, JsonNode streamSchema) { diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java index cf2b2aecb5b85..caba07b4ebc12 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java @@ -72,6 +72,10 @@ protected BaseS3Writer(S3DestinationConfig config, this.outputPrefix = S3OutputPathHelper.getOutputPrefix(config.getBucketPath(), stream); } + public String getOutputPrefix() { + return outputPrefix; + } + /** *
  • 1. Create bucket if necessary.
  • *
  • 2. Under OVERWRITE mode, delete all objects with the output prefix.
  • diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/AvroRecordHelper.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/AvroRecordHelper.java deleted file mode 100644 index 83b3d5134a97c..0000000000000 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/AvroRecordHelper.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * MIT License - * - * Copyright (c) 2020 Airbyte - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package io.airbyte.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.integrations.base.JavaBaseConstants; -import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.integrations.destination.s3.avro.JsonToAvroSchemaConverter; - -public class AvroRecordHelper { - - public static JsonFieldNameUpdater getFieldNameUpdater(String streamName, String namespace, JsonNode streamSchema) { - JsonToAvroSchemaConverter schemaConverter = new JsonToAvroSchemaConverter(); - schemaConverter.getAvroSchema(streamSchema, streamName, namespace, true); - return new JsonFieldNameUpdater(schemaConverter.getStandardizedNames()); - } - - /** - * Convert an Airbyte JsonNode from Avro / Parquet Record to a plain one. - *
  • Remove the airbyte id and emission timestamp fields.
  • - *
  • Remove null fields that must exist in Parquet but does not in original Json.
  • This - * function mutates the input Json. - */ - public static JsonNode pruneAirbyteJson(JsonNode input) { - ObjectNode output = (ObjectNode) input; - - // Remove Airbyte columns. - output.remove(JavaBaseConstants.COLUMN_NAME_AB_ID); - output.remove(JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - - // Fields with null values does not exist in the original Json but only in Parquet. - for (String field : MoreIterators.toList(output.fieldNames())) { - if (output.get(field) == null || output.get(field).isNull()) { - output.remove(field); - } - } - - return output; - } - -} diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java index db7ca2343784a..9d12d8cbd5cc1 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3AvroDestinationAcceptanceTest.java @@ -30,6 +30,7 @@ import com.fasterxml.jackson.databind.ObjectReader; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; +import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import java.util.LinkedList; import java.util.List; import org.apache.avro.file.DataFileReader; diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3DestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3DestinationAcceptanceTest.java index 9f0bae2af0fa7..daec38b94bbfc 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3DestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3DestinationAcceptanceTest.java @@ -24,13 +24,9 @@ package io.airbyte.integrations.destination.s3; -import com.amazonaws.ClientConfiguration; -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; +import static io.airbyte.integrations.destination.s3.S3DestinationConstants.NAME_TRANSFORMER; + import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; import com.amazonaws.services.s3.model.DeleteObjectsResult; @@ -53,8 +49,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static io.airbyte.integrations.destination.s3.S3DestinationConstants.NAME_TRANSFORMER; - /** * When adding a new S3 destination acceptance test, extend this class and do the following: *
  • Implement {@link #getFormatConfig} that returns a {@link S3FormatConfig}
  • @@ -146,27 +140,7 @@ protected void setup(TestDestinationEnv testEnv) { this.config = S3DestinationConfig.getS3DestinationConfig(configJson); LOGGER.info("Test full path: {}/{}", config.getBucketName(), config.getBucketPath()); - AWSCredentials awsCreds = new BasicAWSCredentials(config.getAccessKeyId(), - config.getSecretAccessKey()); - String endpoint = config.getEndpoint(); - - if (endpoint.isEmpty()) { - this.s3Client = AmazonS3ClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) - .withRegion(config.getBucketRegion()) - .build(); - } else { - ClientConfiguration clientConfiguration = new ClientConfiguration(); - clientConfiguration.setSignerOverride("AWSS3V4SignerType"); - - this.s3Client = AmazonS3ClientBuilder - .standard() - .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, config.getBucketRegion())) - .withPathStyleAccessEnabled(true) - .withClientConfiguration(clientConfiguration) - .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) - .build(); - } + this.s3Client = config.getS3Client(); } /** diff --git a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java index 6166a8869bd64..07b0bba1ef825 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test-integration/java/io/airbyte/integrations/destination/s3/S3ParquetDestinationAcceptanceTest.java @@ -31,6 +31,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.s3.avro.JsonFieldNameUpdater; import io.airbyte.integrations.destination.s3.parquet.S3ParquetWriter; +import io.airbyte.integrations.destination.s3.util.AvroRecordHelper; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; diff --git a/airbyte-integrations/connectors/source-close-com/README.md b/airbyte-integrations/connectors/source-close-com/README.md index 50beae86ad621..ecbd21be28127 100644 --- a/airbyte-integrations/connectors/source-close-com/README.md +++ b/airbyte-integrations/connectors/source-close-com/README.md @@ -95,7 +95,7 @@ Place custom tests inside `integration_tests/` folder, then, from the connector python -m pytest integration_tests ``` #### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](source-acceptance-tests.md) for more information. +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. To run your integration tests with acceptance tests, from the connector root, run ``` diff --git a/airbyte-integrations/connectors/source-close-com/acceptance-test-config.yml b/airbyte-integrations/connectors/source-close-com/acceptance-test-config.yml index 9030e6d18a9e4..c166de2f2c6a6 100644 --- a/airbyte-integrations/connectors/source-close-com/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-close-com/acceptance-test-config.yml @@ -1,4 +1,4 @@ -# See [Source Acceptance Tests](https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/source-acceptance-tests.md) +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-close-com:dev tests: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile index 4d37727352262..2edff54bae844 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.15 +LABEL io.airbyte.version=0.2.17 LABEL io.airbyte.name=airbyte/source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml index 2e84915bed60d..ac45f955c0cba 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml @@ -8,18 +8,19 @@ tests: - config_path: "secrets/config.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" - status: "exception" + status: "failed" discovery: - config_path: "secrets/config.json" basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" - # FB serializes numeric fields as strings - validate_schema: no + timeout_seconds: 600 incremental: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog_without_insights.json" - future_state_path: "integration_tests/abnormal_state.json" + future_state_path: "integration_tests/future_state.json" full_refresh: - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + # TODO Change below `configured_catalog_without_insights.json` to `configured_catalog.json` after October 7 2021 + # because all running campaigns should be finished by that time. + configured_catalog_path: "integration_tests/configured_catalog_without_insights.json" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json similarity index 91% rename from airbyte-integrations/connectors/source-facebook-marketing/integration_tests/abnormal_state.json rename to airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json index 35ed87f2c13a9..f1dfc2e605e05 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json @@ -42,9 +42,5 @@ "ads_insights_action_types": { "date_start": "2121-07-25T13:34:26Z", "include_deleted": true - }, - "ads_insights_action_types": { - "date_start": "2021-07-25T13:34:26Z", - "include_deleted": true } } diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json index f655e2b17a218..f7b8210b9e5db 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json @@ -2,5 +2,5 @@ "start_date": "2021-04-01T00:00:00Z", "account_id": "account", "access_token": "wrong_token", - "include_deleted": "true" + "include_deleted": true } diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json index b5ad2fcae6f71..bb203911eb613 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_sets.json @@ -48,20 +48,10 @@ "format": "date-time" }, "daily_budget": { - "type": ["null", "number"], - "maximum": 100000000000000000000000000000000, - "minimum": -100000000000000000000000000000000, - "multipleOf": 0.000001, - "exclusiveMaximum": true, - "exclusiveMinimum": true + "type": ["null", "number"] }, "budget_remaining": { - "type": ["null", "number"], - "maximum": 100000000000000000000000000000000, - "minimum": -100000000000000000000000000000000, - "multipleOf": 0.000001, - "exclusiveMaximum": true, - "exclusiveMinimum": true + "type": ["null", "number"] }, "effective_status": { "type": ["null", "string"] @@ -78,12 +68,7 @@ "format": "date-time" }, "lifetime_budget": { - "type": ["null", "number"], - "maximum": 100000000000000000000000000000000, - "minimum": -100000000000000000000000000000000, - "multipleOf": 0.000001, - "exclusiveMaximum": true, - "exclusiveMinimum": true + "type": ["null", "number"] }, "targeting": { "$ref": "targeting.json" }, "bid_info": { diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json index 3361f870bc9bd..1428e29633076 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json @@ -148,18 +148,18 @@ "type": ["null", "number"] }, "created_time": { - "format": "date-time", + "format": "date", "type": ["null", "string"] }, "ctr": { "type": ["null", "number"] }, "date_start": { - "format": "date-time", + "format": "date", "type": ["null", "string"] }, "date_stop": { - "format": "date-time", + "format": "date", "type": ["null", "string"] }, "engagement_rate_ranking": { @@ -214,7 +214,7 @@ "type": ["null", "number"] }, "instant_experience_outbound_clicks": { - "type": ["null", "integer"] + "$ref": "ads_action_stats.json" }, "labels": { "type": ["null", "string"] @@ -280,7 +280,7 @@ "$ref": "ads_action_stats.json" }, "updated_time": { - "format": "date-time", + "format": "date", "type": ["null", "string"] }, "video_15_sec_watched_actions": { @@ -311,7 +311,7 @@ "$ref": "ads_action_stats.json" }, "video_play_actions": { - "$ref": "ads_histogram_stats.json" + "$ref": "ads_action_stats.json" }, "video_play_curve_actions": { "$ref": "ads_histogram_stats.json" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/targeting.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/targeting.json index 62137a1909196..b17fd1c819fc8 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/targeting.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/targeting.json @@ -21,7 +21,7 @@ "$ref": "targeting.json#/definitions/id_name_pairs" }, "home_type": { - "$ref$": "targeting.json#/definitions/id_name_pairs" + "$ref": "targeting.json#/definitions/id_name_pairs" }, "friends_of_connections": { "$ref": "targeting.json#/definitions/id_name_pairs" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py index 5ea87e9564d50..794aaa6ddbec7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py @@ -27,7 +27,7 @@ from abc import ABC from collections import deque from datetime import datetime -from typing import Any, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Sequence +from typing import Any, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Sequence, Union import backoff import pendulum @@ -46,7 +46,7 @@ backoff_policy = retry_pattern(backoff.expo, FacebookRequestError, max_tries=5, factor=5) -def remove_params_from_url(url: str, params: [str]) -> str: +def remove_params_from_url(url: str, params: List[str]) -> str: """ Parses a URL and removes the query parameters specified in params :param url: URL @@ -110,7 +110,63 @@ def read_records( ) -> Iterable[Mapping[str, Any]]: """Main read method used by CDK""" for record in self._read_records(params=self.request_params(stream_state=stream_state)): - yield self._extend_record(record, fields=self.fields) + yield self.transform(self._extend_record(record, fields=self.fields)) + + def transform(self, record: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Use this method to remove update fields types in record according to schema. + """ + schema = self.get_json_schema() + self.convert_to_schema_types(record, schema["properties"]) + return record + + def get_python_type(self, _types: Union[list, str]) -> tuple: + """Converts types from schema to python types. Examples: + - `["string", "null"]` will be converted to `(str,)` + - `["array", "string", "null"]` will be converted to `(list, str,)` + - `"boolean"` will be converted to `(bool,)` + """ + types_mapping = { + "string": str, + "number": float, + "integer": int, + "object": dict, + "array": list, + "boolean": bool, + } + + if isinstance(_types, list): + return tuple([types_mapping[t] for t in _types if t != "null"]) + + return (types_mapping[_types],) + + def convert_to_schema_types(self, record: Mapping[str, Any], schema: Mapping[str, Any]): + """ + Converts values' type from record to appropriate type from schema. For example, let's say we have `reach` value + and in schema it has `number` type because it's, well, a number, but from API we are getting `reach` as string. + This function fixes this and converts `reach` value from `string` to `number`. Same for all fields and all + types from schema. + """ + if not schema: + return + + for key, value in record.items(): + if key not in schema: + continue + + if isinstance(value, dict): + self.convert_to_schema_types(record=value, schema=schema[key].get("properties", {})) + elif isinstance(value, list) and "items" in schema[key]: + for record_list_item in value: + if list in self.get_python_type(schema[key]["items"]["type"]): + # TODO Currently we don't have support for list of lists. + pass + elif dict in self.get_python_type(schema[key]["items"]["type"]): + self.convert_to_schema_types(record=record_list_item, schema=schema[key]["items"]["properties"]) + elif not isinstance(record_list_item, self.get_python_type(schema[key]["items"]["type"])): + record[key] = self.get_python_type(schema[key]["items"]["type"])[0](record_list_item) + elif not isinstance(value, self.get_python_type(schema[key]["type"])): + record[key] = self.get_python_type(schema[key]["type"])[0](value) def _read_records(self, params: Mapping[str, Any]) -> Iterable: """Wrapper around query to backoff errors. @@ -298,7 +354,7 @@ class AdsInsights(FBMarketingIncrementalStream): MAX_WAIT_TO_START = pendulum.duration(minutes=5) MAX_WAIT_TO_FINISH = pendulum.duration(minutes=30) MAX_ASYNC_SLEEP = pendulum.duration(minutes=5) - MAX_ASYNC_JOBS = 3 + MAX_ASYNC_JOBS = 10 INSIGHTS_RETENTION_PERIOD = pendulum.duration(days=37 * 30) action_breakdowns = ALL_ACTION_BREAKDOWNS @@ -325,7 +381,7 @@ def read_records( # because we query `lookback_window` days before actual cursor we might get records older then cursor for obj in result.get_result(): - yield obj.export_all_data() + yield self.transform(obj.export_all_data()) def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: """Slice by date periods and schedule async job for each period, run at most MAX_ASYNC_JOBS jobs at the same time. @@ -356,7 +412,7 @@ def wait_for_job(self, job) -> AdReportRun: job = job.api_get() job_progress_pct = job["async_percent_completion"] job_id = job["report_run_id"] - self.logger.info(f"ReportRunId {job_id} is {job_progress_pct}% complete") + self.logger.info(f"ReportRunId {job_id} is {job_progress_pct}% complete ({job['async_status']})") runtime = pendulum.now() - start_time if job["async_status"] == "Job Completed": diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py index b41e09faeeb78..446c12cf316a1 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py @@ -21,6 +21,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # + from source_facebook_marketing.streams import remove_params_from_url diff --git a/airbyte-integrations/connectors/source-github/Dockerfile b/airbyte-integrations/connectors/source-github/Dockerfile index 60e4d1628891c..2a2a50789a9ca 100644 --- a/airbyte-integrations/connectors/source-github/Dockerfile +++ b/airbyte-integrations/connectors/source-github/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-github diff --git a/airbyte-integrations/connectors/source-github/source_github/streams.py b/airbyte-integrations/connectors/source-github/source_github/streams.py index 0bf9c41f0fec4..7a8c50e1550cd 100644 --- a/airbyte-integrations/connectors/source-github/source_github/streams.py +++ b/airbyte-integrations/connectors/source-github/source_github/streams.py @@ -56,6 +56,16 @@ class GithubStream(HttpStream, ABC): cache = request_cache() url_base = "https://api.github.com/" + # To prevent dangerous behavior, the `vcr` library prohibits the use of nested caching. + # Here's an example of dangerous behavior: + # cache = Cassette.use('whatever') + # with cache: + # with cache: + # pass + # + # Therefore, we will only use `cache` for the top-level stream, so as not to cause possible difficulties. + top_level_stream = True + primary_key = "id" # GitHub pagination could be from 1 to 100. @@ -110,7 +120,11 @@ def backoff_time(self, response: requests.Response) -> Union[int, float]: def read_records(self, stream_slice: Mapping[str, any] = None, **kwargs) -> Iterable[Mapping[str, Any]]: try: - yield from super().read_records(stream_slice=stream_slice, **kwargs) + if self.top_level_stream: + with self.cache: + yield from super().read_records(stream_slice=stream_slice, **kwargs) + else: + yield from super().read_records(stream_slice=stream_slice, **kwargs) except HTTPError as e: error_msg = str(e) @@ -310,6 +324,8 @@ class PullRequestStats(GithubStream): API docs: https://docs.github.com/en/rest/reference/pulls#get-a-pull-request """ + top_level_stream = False + @property def record_keys(self) -> List[str]: return list(self.get_json_schema()["properties"].keys()) @@ -338,6 +354,8 @@ class Reviews(GithubStream): API docs: https://docs.github.com/en/rest/reference/pulls#list-reviews-for-a-pull-request """ + top_level_stream = False + def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: @@ -505,8 +523,7 @@ def read_records(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iter Decide if this a first read or not by the presence of the state object """ self._first_read = not bool(stream_state) - with self.cache: - yield from super().read_records(stream_state=stream_state, **kwargs) + yield from super().read_records(stream_state=stream_state, **kwargs) def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"repos/{stream_slice['repository']}/pulls" @@ -697,6 +714,7 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: class ReactionStream(GithubStream, ABC): parent_key = "id" + top_level_stream = False def __init__(self, **kwargs): self._stream_kwargs = deepcopy(kwargs) diff --git a/airbyte-integrations/connectors/source-google-search-console/README.md b/airbyte-integrations/connectors/source-google-search-console/README.md index 0ef5cd71a9a44..5c280d3bc910b 100755 --- a/airbyte-integrations/connectors/source-google-search-console/README.md +++ b/airbyte-integrations/connectors/source-google-search-console/README.md @@ -95,7 +95,7 @@ Place custom tests inside `integration_tests/` folder, then, from the connector python -m pytest integration_tests ``` #### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](source-acceptance-tests.md) for more information. +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. To run your integration tests with acceptance tests, from the connector root, run ``` diff --git a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml index 231e24d86b0c7..aeb4e3f7c722c 100755 --- a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml @@ -1,4 +1,4 @@ -# See [Source Acceptance Tests](https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/source-acceptance-tests.md) +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-google-search-console:dev tests: diff --git a/airbyte-integrations/connectors/source-google-search-console/credentials/credentials.json b/airbyte-integrations/connectors/source-google-search-console/credentials/credentials.json index c24ccd87710b2..6b0d672141935 100644 --- a/airbyte-integrations/connectors/source-google-search-console/credentials/credentials.json +++ b/airbyte-integrations/connectors/source-google-search-console/credentials/credentials.json @@ -2,4 +2,4 @@ "client_id": "YOUR_CLIENT_ID", "client_secret": "YOUR_CLIENT_SECRET", "redirect_uri": "YOUR_REDIRECTED_URI" -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/catalog.json b/airbyte-integrations/connectors/source-google-search-console/integration_tests/catalog.json index ada99aded37ea..71c9382b7384f 100755 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/catalog.json +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/catalog.json @@ -4,9 +4,7 @@ "stream": { "name": "sites", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh" - ] + "supported_sync_modes": ["full_refresh"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -15,9 +13,7 @@ "stream": { "name": "sitemaps", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh" - ] + "supported_sync_modes": ["full_refresh"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -26,10 +22,7 @@ "stream": { "name": "search_analytics_by_country", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -38,10 +31,7 @@ "stream": { "name": "search_analytics_by_date", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -50,10 +40,7 @@ "stream": { "name": "search_analytics_by_device", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -62,10 +49,7 @@ "stream": { "name": "search_analytics_by_page", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -74,10 +58,7 @@ "stream": { "name": "search_analytics_by_query", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" @@ -86,10 +67,7 @@ "stream": { "name": "search_analytics_all_fields", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-google-search-console/integration_tests/configured_catalog.json index 501f4f2c844fd..4484f24c8e632 100755 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/configured_catalog.json @@ -4,114 +4,72 @@ "stream": { "name": "search_analytics_by_date", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" }, { "stream": { "name": "search_analytics_by_country", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" }, { "stream": { "name": "search_analytics_by_device", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" }, { "stream": { "name": "search_analytics_by_page", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" }, { "stream": { "name": "search_analytics_by_query", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" }, { "stream": { "name": "search_analytics_all_fields", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ] + "default_cursor_field": ["date"] }, "sync_mode": "incremental", - "cursor_field": [ - "date" - ], + "cursor_field": ["date"], "destination_sync_mode": "append" } ] diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-google-search-console/integration_tests/invalid_config.json index 115161f4292a6..d7f3ac088bc24 100755 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/invalid_config.json @@ -1,8 +1,5 @@ { - "site_urls": [ - "https://example1.com", - "https://example2.com" - ], + "site_urls": ["https://example1.com", "https://example2.com"], "start_date": "2021-05-01", "end_date": "2021-05-31", "authorization": { diff --git a/airbyte-integrations/connectors/source-google-search-console/sample_files/sample_config.json b/airbyte-integrations/connectors/source-google-search-console/sample_files/sample_config.json index 21730b0fc88f3..689fb05a2e857 100755 --- a/airbyte-integrations/connectors/source-google-search-console/sample_files/sample_config.json +++ b/airbyte-integrations/connectors/source-google-search-console/sample_files/sample_config.json @@ -1,8 +1,5 @@ { - "site_urls": [ - "https://example1.com", - "https://example2.com" - ], + "site_urls": ["https://example1.com", "https://example2.com"], "start_date": "2021-05-01", "end_date": "2021-10-10", "authorization": { diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_all_fields.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_all_fields.json index ba9604297d6da..89fe0fe5e9a40 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_all_fields.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_all_fields.json @@ -3,72 +3,39 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "country": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "device": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "page": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "query": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_country.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_country.json index 570bdc5b9599e..9e74ea044ec56 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_country.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_country.json @@ -3,54 +3,30 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "country": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_date.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_date.json index e0126de137cc9..76ffa918c9af4 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_date.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_date.json @@ -3,48 +3,27 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_device.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_device.json index d49668002f582..4875135b7f07e 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_device.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_device.json @@ -3,54 +3,30 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "device": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_page.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_page.json index b5fea4a88f555..2a1a3d9af816f 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_page.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_page.json @@ -3,54 +3,30 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "page": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_query.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_query.json index e353d655f9231..8e84cbda814c4 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_query.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/search_analytics_by_query.json @@ -3,54 +3,30 @@ "type": "object", "properties": { "site_url": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "search_type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "date": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date" }, "query": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "clicks": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "impressions": { - "type": [ - "null", - "integer" - ] + "type": ["null", "integer"] }, "ctr": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 }, "position": { - "type": [ - "null", - "number" - ], + "type": ["null", "number"], "multipleOf": 1e-25 } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sitemaps.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sitemaps.json index d10f41e05dfdc..e84568e418793 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sitemaps.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sitemaps.json @@ -3,54 +3,30 @@ "type": "object", "properties": { "path": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "lastSubmitted": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date-time" }, "isPending": { - "type": [ - "null", - "boolean" - ] + "type": ["null", "boolean"] }, "isSitemapsIndex": { - "type": [ - "null", - "boolean" - ] + "type": ["null", "boolean"] }, "type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "lastDownloaded": { - "type": [ - "null", - "string" - ], + "type": ["null", "string"], "format": "date-time" }, "warnings": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "errors": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "contents": { "type": "array", @@ -58,25 +34,16 @@ "type": "object", "properties": { "type": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "submitted": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "indexed": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] } } } } } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sites.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sites.json index 0d9ad638cf7ad..12b94a4dc0841 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sites.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/schemas/sites.json @@ -3,16 +3,10 @@ "type": "object", "properties": { "siteUrl": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] }, "permissionLevel": { - "type": [ - "null", - "string" - ] + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/spec.json b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/spec.json index f6e14e322e1f9..e060196070b4b 100755 --- a/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/spec.json +++ b/airbyte-integrations/connectors/source-google-search-console/source_google_search_console/spec.json @@ -5,34 +5,23 @@ "title": "Google Search Console Spec", "type": "object", "additionalProperties": false, - "required": [ - "site_urls", - "start_date", - "authorization" - ], + "required": ["site_urls", "start_date", "authorization"], "properties": { "site_urls": { "type": "array", "description": "Website URLs property; do not include the domain-level property in the list", - "examples": [ - "https://example1.com", - "https://example2.com" - ] + "examples": ["https://example1.com", "https://example2.com"] }, "start_date": { "type": "string", "description": "The date from which you'd like to replicate data in the format YYYY-MM-DD.", - "examples": [ - "2021-01-01" - ], + "examples": ["2021-01-01"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" }, "end_date": { "type": "string", "description": "The date from which you'd like to replicate data in the format YYYY-MM-DD. Must be greater or equal start_date field", - "examples": [ - "2021-12-12" - ], + "examples": ["2021-12-12"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" }, "authorization": { @@ -70,10 +59,7 @@ }, { "type": "object", - "required": [ - "auth_type", - "service_account_info" - ], + "required": ["auth_type", "service_account_info"], "properties": { "auth_type": { "type": "string", diff --git a/airbyte-integrations/connectors/source-lever-hiring/README.md b/airbyte-integrations/connectors/source-lever-hiring/README.md index 8d323f9563303..7198e6590ebd1 100644 --- a/airbyte-integrations/connectors/source-lever-hiring/README.md +++ b/airbyte-integrations/connectors/source-lever-hiring/README.md @@ -1,7 +1,6 @@ # Lever Hiring Source This is the repository for the Lever Hiring source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/lever-hiring). ## Local development @@ -38,8 +37,7 @@ To build using Gradle, from the Airbyte repository root, run: ``` #### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/lever-hiring) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_lever_hiring/spec.json` file. +**If you are a community contributor**, get the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_lever_hiring/spec.json` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connectors/source-posthog/Dockerfile b/airbyte-integrations/connectors/source-posthog/Dockerfile index d610222cd6aaf..bf17d72d4cdbe 100644 --- a/airbyte-integrations/connectors/source-posthog/Dockerfile +++ b/airbyte-integrations/connectors/source-posthog/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-posthog diff --git a/airbyte-integrations/connectors/source-posthog/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-posthog/acceptance-test-docker.sh old mode 100644 new mode 100755 index c522eebbd94e8..4ceedd9e7ba03 --- a/airbyte-integrations/connectors/source-posthog/acceptance-test-docker.sh +++ b/airbyte-integrations/connectors/source-posthog/acceptance-test-docker.sh @@ -1,7 +1,7 @@ #!/usr/bin/env sh # Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev # Pull latest acctest image docker pull airbyte/source-acceptance-test:latest diff --git a/airbyte-integrations/connectors/source-posthog/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-posthog/integration_tests/invalid_config.json index 1b3435fb9a6d1..2428e75446a3b 100644 --- a/airbyte-integrations/connectors/source-posthog/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-posthog/integration_tests/invalid_config.json @@ -1,4 +1,4 @@ { "api_key": "value1", - "start_date": "2021-01-01-T00:00:00.000000Z" + "start_date": "2021-01-01T00:00:00Z" } diff --git a/airbyte-integrations/connectors/source-posthog/source_posthog/source.py b/airbyte-integrations/connectors/source-posthog/source_posthog/source.py index 60e9255282257..1c29c63e818ca 100644 --- a/airbyte-integrations/connectors/source-posthog/source_posthog/source.py +++ b/airbyte-integrations/connectors/source-posthog/source_posthog/source.py @@ -46,13 +46,17 @@ Trends, ) +DEFAULT_BASE_URL = "https://app.posthog.com" + class SourcePosthog(AbstractSource): def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: try: _ = pendulum.parse(config["start_date"]) authenticator = TokenAuthenticator(token=config["api_key"]) - stream = PingMe(authenticator=authenticator) + base_url = config.get("base_url", DEFAULT_BASE_URL) + + stream = PingMe(authenticator=authenticator, base_url=base_url) records = stream.read_records(sync_mode=SyncMode.full_refresh) _ = next(records) return True, None @@ -69,15 +73,17 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: This stream was requested to be removed due to this reason. """ authenticator = TokenAuthenticator(token=config["api_key"]) + base_url = config.get("base_url", DEFAULT_BASE_URL) + return [ - Annotations(authenticator=authenticator, start_date=config["start_date"]), - Cohorts(authenticator=authenticator), - Events(authenticator=authenticator, start_date=config["start_date"]), - EventsSessions(authenticator=authenticator), - FeatureFlags(authenticator=authenticator), - Insights(authenticator=authenticator), - InsightsPath(authenticator=authenticator), - InsightsSessions(authenticator=authenticator), - Persons(authenticator=authenticator), - Trends(authenticator=authenticator), + Annotations(authenticator=authenticator, start_date=config["start_date"], base_url=base_url), + Cohorts(authenticator=authenticator, base_url=base_url), + Events(authenticator=authenticator, start_date=config["start_date"], base_url=base_url), + EventsSessions(authenticator=authenticator, base_url=base_url), + FeatureFlags(authenticator=authenticator, base_url=base_url), + Insights(authenticator=authenticator, base_url=base_url), + InsightsPath(authenticator=authenticator, base_url=base_url), + InsightsSessions(authenticator=authenticator, base_url=base_url), + Persons(authenticator=authenticator, base_url=base_url), + Trends(authenticator=authenticator, base_url=base_url), ] diff --git a/airbyte-integrations/connectors/source-posthog/source_posthog/spec.json b/airbyte-integrations/connectors/source-posthog/source_posthog/spec.json index 59e6afb5cb1f3..ae7e8beb9e04b 100644 --- a/airbyte-integrations/connectors/source-posthog/source_posthog/spec.json +++ b/airbyte-integrations/connectors/source-posthog/source_posthog/spec.json @@ -12,12 +12,18 @@ "type": "string", "description": "The date from which you'd like to replicate the data", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": "2021-01-01T00:00:00.000000Z" + "examples": ["2021-01-01T00:00:00Z"] }, "api_key": { "type": "string", "airbyte_secret": true, "description": "API Key. See the docs for information on how to generate this key." + }, + "base_url": { + "type": "string", + "default": "https://app.posthog.com", + "description": "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).", + "examples": ["https://posthog.example.com"] } } } diff --git a/airbyte-integrations/connectors/source-posthog/source_posthog/streams.py b/airbyte-integrations/connectors/source-posthog/source_posthog/streams.py index aa783f03823f3..52f8303a161eb 100644 --- a/airbyte-integrations/connectors/source-posthog/source_posthog/streams.py +++ b/airbyte-integrations/connectors/source-posthog/source_posthog/streams.py @@ -34,10 +34,17 @@ class PosthogStream(HttpStream, ABC): - url_base = "https://app.posthog.com/api/" primary_key = "id" data_field = "results" + def __init__(self, base_url: str, **kwargs): + super().__init__(**kwargs) + self._url_base = f"{base_url}/api/" + + @property + def url_base(self) -> str: + return self._url_base + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: resp_json = response.json() if resp_json.get("next"): @@ -76,8 +83,8 @@ class IncrementalPosthogStream(PosthogStream, ABC): state_checkpoint_interval = math.inf - def __init__(self, start_date: str, **kwargs): - super().__init__(**kwargs) + def __init__(self, base_url: str, start_date: str, **kwargs): + super().__init__(base_url=base_url, **kwargs) self._start_date = start_date self._initial_state = None # we need to keep it here because next_page_token doesn't accept state argument diff --git a/airbyte-integrations/connectors/source-stripe/Dockerfile b/airbyte-integrations/connectors/source-stripe/Dockerfile index b4eb1572a9a0c..c77a33e1f2eee 100644 --- a/airbyte-integrations/connectors/source-stripe/Dockerfile +++ b/airbyte-integrations/connectors/source-stripe/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.16 +LABEL io.airbyte.version=0.1.17 LABEL io.airbyte.name=airbyte/source-stripe diff --git a/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml b/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml index af28785c95487..5e7808e11384d 100644 --- a/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml @@ -14,13 +14,9 @@ tests: - config_path: "secrets/connected_account_config.json" basic_read: # TEST 1 - Reading catalog without invoice_line_items - # Along with this test we expect subscriptions with status in ["active","canceled"] - # If this test fails for some reason, please check the expected_subscriptions_records.json for valid subset of data. - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/non_invoice_line_items_catalog.json" timeout_seconds: 3600 - expect_records: - path: "integration_tests/expected_subscriptions_records.txt" # TEST 2 - Reading data from account that has no records for stream Disputes - config_path: "secrets/connected_account_config.json" configured_catalog_path: "integration_tests/non_disputes_events_catalog.json" @@ -29,13 +25,9 @@ tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/non_invoice_line_items_catalog.json" future_state_path: "integration_tests/abnormal_state.json" - cursor_paths: - charges: ["created"] - config_path: "secrets/connected_account_config.json" configured_catalog_path: "integration_tests/non_disputes_events_catalog.json" future_state_path: "integration_tests/abnormal_state.json" - cursor_paths: - charges: ["created"] full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/non_invoice_line_items_catalog.json" diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json index 065f544102790..1703284cdb4de 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json @@ -12,5 +12,6 @@ "payouts": { "created": 161706755600 }, "disputes": { "created": 161099630500 }, "products": { "created": 158551134100 }, - "refunds": { "created": 161959562900 } + "refunds": { "created": 161959562900 }, + "payment_intents": { "created": 161959562900 } } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json index fed7e6a0b31a0..07f2b651b9585 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json @@ -12,6 +12,17 @@ "sync_mode": "incremental", "destination_sync_mode": "overwrite", "cursor_field": ["created"] + }, + { + "stream": { + "name": "payment_intents", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_subscriptions_records.txt b/airbyte-integrations/connectors/source-stripe/integration_tests/expected_subscriptions_records.txt deleted file mode 100644 index a786ff4eca896..0000000000000 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_subscriptions_records.txt +++ /dev/null @@ -1,25 +0,0 @@ -{"stream": "subscriptions", "data": {"id": "sub_HzZz2kXi3X5JeO", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1602278873, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1599686873, "current_period_end": 1628544473, "current_period_start": 1625866073, "customer": "cus_HzZzA5Cm3Pb8Rk", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HzZzkViznILf47", "object": "subscription_item", "billing_thresholds": null, "created": 1599686874, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_HzZz2kXi3X5JeO", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HzZz2kXi3X5JeO"}, "latest_invoice": "in_1JBRG2IEn5WyEQxn2Ic7V2RS", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1599686873, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1602278873, "trial_start": 1599686873}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_HzZz9jG0XoSTzp", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1602278873, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1599686873, "current_period_end": 1628544473, "current_period_start": 1625866073, "customer": "cus_HzZzA5Cm3Pb8Rk", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HzZzdUREzTkBGP", "object": "subscription_item", "billing_thresholds": null, "created": 1599686873, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_HzZz9jG0XoSTzp", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HzZz9jG0XoSTzp"}, "latest_invoice": "in_1JBRG1IEn5WyEQxn1UZZ6j5W", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1599686873, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1602278873, "trial_start": 1599686873}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hm79IpijHbWG6Y", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1599173266, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1596581266, "current_period_end": 1628030866, "current_period_start": 1625352466, "customer": "cus_Hm79aU31H8NCaS", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hm79NwhbKPTnMW", "object": "subscription_item", "billing_thresholds": null, "created": 1596581266, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hm79IpijHbWG6Y", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hm79IpijHbWG6Y"}, "latest_invoice": "in_1J9Hf1IEn5WyEQxnRIsUHRNk", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1596581266, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1599173266, "trial_start": 1596581266}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hm79H35Cga2xyM", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1599173265, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1596581265, "current_period_end": 1628030865, "current_period_start": 1625352465, "customer": "cus_Hm79aU31H8NCaS", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hm79ez6Hthf2jP", "object": "subscription_item", "billing_thresholds": null, "created": 1596581266, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hm79H35Cga2xyM", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hm79H35Cga2xyM"}, "latest_invoice": "in_1J9He9IEn5WyEQxnIHnR07pp", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1596581265, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1599173265, "trial_start": 1596581265}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hgqd9cnZ3U2zso", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1595366620, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_HgqdUlT76RCID7", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hgqd3GbOq4nX96", "object": "subscription_item", "billing_thresholds": null, "created": 1595366621, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hgqd9cnZ3U2zso", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hgqd9cnZ3U2zso"}, "latest_invoice": "in_1J8OZSIEn5WyEQxnB2v5sbjq", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1595366620, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1595366620}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_HgqdZo49rNc9yd", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1595366619, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_HgqdUlT76RCID7", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HgqdPFqB83zOkG", "object": "subscription_item", "billing_thresholds": null, "created": 1595366620, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_HgqdZo49rNc9yd", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HgqdZo49rNc9yd"}, "latest_invoice": "in_1J8OaGIEn5WyEQxntxV9SoiC", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1595366619, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1595366619}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf34IdAAxZ52R9", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594951622, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf34W3ixOwxYl8", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf34ZIE6Eghfhg", "object": "subscription_item", "billing_thresholds": null, "created": 1594951623, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf34IdAAxZ52R9", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf34IdAAxZ52R9"}, "latest_invoice": "in_1J8OaNIEn5WyEQxnjVyxvHpB", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594951622, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594951622}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf34Qc1KVVs8S6", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594951621, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf34W3ixOwxYl8", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf34nezuRvVozR", "object": "subscription_item", "billing_thresholds": null, "created": 1594951622, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hf34Qc1KVVs8S6", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf34Qc1KVVs8S6"}, "latest_invoice": "in_1J8OZTIEn5WyEQxny3CxoLoP", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594951621, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594951621}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2qrsVuVGHXe3", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1594951250, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950743, "current_period_end": 1629165650, "current_period_start": 1626487250, "customer": "cus_Hf2qOhXoDIiXiK", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2qsLwNq56OJG", "object": "subscription_item", "billing_thresholds": null, "created": 1594950744, "metadata": {}, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf2qrsVuVGHXe3", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2qrsVuVGHXe3"}, "latest_invoice": "in_1JE2w0IEn5WyEQxnm556audH", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594950743, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1594951249, "trial_start": 1594950743}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2qWerNs8mYbh", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1594951270, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950743, "current_period_end": 1629165670, "current_period_start": 1626487270, "customer": "cus_Hf2qOhXoDIiXiK", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2quA7b9Kin4A", "object": "subscription_item", "billing_thresholds": null, "created": 1594950743, "metadata": {}, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCV3N7NM9cyAm4", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 9900, "unit_amount_decimal": "9900"}, "quantity": 1, "subscription": "sub_Hf2qWerNs8mYbh", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2qWerNs8mYbh"}, "latest_invoice": "in_1JE2vwIEn5WyEQxnkpZ7QEFP", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594950743, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1594951269, "trial_start": 1594950743}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2peKxCELwRbU", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1597542736, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950736, "current_period_end": 1629078736, "current_period_start": 1626400336, "customer": "cus_Hf2pXyADLHSHAC", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2pd6RdGIRH5x", "object": "subscription_item", "billing_thresholds": null, "created": 1594950737, "metadata": {}, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf2peKxCELwRbU", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2peKxCELwRbU"}, "latest_invoice": "in_1JDgEqIEn5WyEQxn4Jlk4r2a", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594950736, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1597542736, "trial_start": 1594950736}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2pipLVFj6x1D", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1597542736, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950736, "current_period_end": 1629078736, "current_period_start": 1626400336, "customer": "cus_Hf2pXyADLHSHAC", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2puA2lH18xJg", "object": "subscription_item", "billing_thresholds": null, "created": 1594950736, "metadata": {}, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCV3N7NM9cyAm4", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 9900, "unit_amount_decimal": "9900"}, "quantity": 1, "subscription": "sub_Hf2pipLVFj6x1D", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2pipLVFj6x1D"}, "latest_invoice": "in_1JDgEoIEn5WyEQxnXkd7OHep", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594950736, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1597542736, "trial_start": 1594950736}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2pEMwdL9JHnw", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950701, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf2pYlKKcVgz1y", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2pGheURlXx9V", "object": "subscription_item", "billing_thresholds": null, "created": 1594950701, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf2pEMwdL9JHnw", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2pEMwdL9JHnw"}, "latest_invoice": "in_1J8OZUIEn5WyEQxnn0zOsxaI", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594950701, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594950701}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2pgopSeOlHIU", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950700, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf2pYlKKcVgz1y", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2pubkQAtbkuS", "object": "subscription_item", "billing_thresholds": null, "created": 1594950701, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hf2pgopSeOlHIU", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2pgopSeOlHIU"}, "latest_invoice": "in_1J8OZSIEn5WyEQxnYYBrtSmw", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594950700, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594950700}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2hJeXoqmk7wy", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950255, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf2hDkaO4agtlI", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2huD5EitmlDD", "object": "subscription_item", "billing_thresholds": null, "created": 1594950256, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf2hJeXoqmk7wy", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2hJeXoqmk7wy"}, "latest_invoice": "in_1J8OaDIEn5WyEQxn94AzxKel", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594950255, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594950255}, "emitted_at": 1626172757000} -{"stream": "subscriptions", "data": {"id": "sub_Hf2hWoSfHPb1hL", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950254, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf2hDkaO4agtlI", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2h3C1NSDUp7O", "object": "subscription_item", "billing_thresholds": null, "created": 1594950255, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hf2hWoSfHPb1hL", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2hWoSfHPb1hL"}, "latest_invoice": "in_1J8OZSIEn5WyEQxncf0Hg3Fr", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594950254, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594950254}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_I4ZQMq8s1YtPcY", "object": "customer", "address": null, "balance": 0, "created": 1600837969, "currency": null, "default_source": null, "delinquent": false, "description": "Customer 4", "discount": {"id": "di_1JFaGSIEn5WyEQxngHGp1kXZ", "object": "discount", "checkout_session": null, "coupon": {"id": "MMERwFsd", "object": "coupon", "amount_off": null, "created": 1626853922, "currency": null, "duration": "repeating", "duration_in_months": 3, "livemode": false, "max_redemptions": null, "metadata": {}, "name": null, "percent_off": 25.23, "redeem_by": null, "times_redeemed": 1, "valid": true}, "customer": "cus_I4ZQMq8s1YtPcY", "end": 1634802888, "invoice": null, "invoice_item": null, "promotion_code": null, "start": 1626854088, "subscription": null}, "email": "customer4@test.com", "invoice_prefix": "38428A86", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {"coupon": "MMERwFsd"}, "name": null, "next_invoice_sequence": 1, "phone": "444-444-4444", "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQMq8s1YtPcY/sources"}, "subscriptions": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQMq8s1YtPcY/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQMq8s1YtPcY/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_I4ZQpl4L3qtlAB", "object": "customer", "address": null, "balance": 0, "created": 1600837969, "currency": null, "default_source": null, "delinquent": false, "description": "Customer 3", "discount": null, "email": "customer3@test.com", "invoice_prefix": "96FF4CF2", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {}, "name": null, "next_invoice_sequence": 1, "phone": "333-333-3333", "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQpl4L3qtlAB/sources"}, "subscriptions": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQpl4L3qtlAB/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQpl4L3qtlAB/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_I4ZQEnCrue3FsN", "object": "customer", "address": null, "balance": 0, "created": 1600837969, "currency": null, "default_source": null, "delinquent": false, "description": "Customer 2", "discount": null, "email": "customer2@test.com", "invoice_prefix": "F7C912BE", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {}, "name": null, "next_invoice_sequence": 1, "phone": "222-222-2222", "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEnCrue3FsN/sources"}, "subscriptions": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEnCrue3FsN/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEnCrue3FsN/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_I4ZQEfzwFq4fXO", "object": "customer", "address": null, "balance": 0, "created": 1600837969, "currency": null, "default_source": null, "delinquent": false, "description": "Customer 1", "discount": null, "email": "customer1@test.com", "invoice_prefix": "BDCCE7CD", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {}, "name": null, "next_invoice_sequence": 1, "phone": "111-111-1111", "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEfzwFq4fXO/sources"}, "subscriptions": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEfzwFq4fXO/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_I4ZQEfzwFq4fXO/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_HzZzA5Cm3Pb8Rk", "object": "customer", "address": null, "balance": 0, "created": 1599686872, "currency": "usd", "default_source": null, "delinquent": false, "description": null, "discount": null, "email": "michel@dataline.io", "invoice_prefix": "BC8FADBE", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {"workspace_id": "b5d5596b-8802-464e-ab39-48e2d2ceaa4b", "env": "dev", "eligibleForTrial": "true"}, "name": "default", "next_invoice_sequence": 23, "phone": null, "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_HzZzA5Cm3Pb8Rk/sources"}, "subscriptions": {"object": "list", "data": [{"id": "sub_HzZz2kXi3X5JeO", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1602278873, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1599686873, "current_period_end": 1628544473, "current_period_start": 1625866073, "customer": "cus_HzZzA5Cm3Pb8Rk", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HzZzkViznILf47", "object": "subscription_item", "billing_thresholds": null, "created": 1599686874, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_HzZz2kXi3X5JeO", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HzZz2kXi3X5JeO"}, "latest_invoice": "in_1JBRG2IEn5WyEQxn2Ic7V2RS", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1599686873, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1602278873, "trial_start": 1599686873}, {"id": "sub_HzZz9jG0XoSTzp", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1602278873, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1599686873, "current_period_end": 1628544473, "current_period_start": 1625866073, "customer": "cus_HzZzA5Cm3Pb8Rk", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HzZzdUREzTkBGP", "object": "subscription_item", "billing_thresholds": null, "created": 1599686873, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_HzZz9jG0XoSTzp", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HzZz9jG0XoSTzp"}, "latest_invoice": "in_1JBRG1IEn5WyEQxn1UZZ6j5W", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1599686873, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1602278873, "trial_start": 1599686873}], "has_more": false, "total_count": 2, "url": "/v1/customers/cus_HzZzA5Cm3Pb8Rk/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_HzZzA5Cm3Pb8Rk/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_Hm79aU31H8NCaS", "object": "customer", "address": null, "balance": 0, "created": 1596581264, "currency": "usd", "default_source": null, "delinquent": false, "description": null, "discount": null, "email": "jamakase54+10@gmail.com", "invoice_prefix": "F3BD4972", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {"workspace_id": "ad73703b-8397-4f79-a725-0e6f639f35a8", "env": "dev", "eligibleForTrial": "true"}, "name": "test2", "next_invoice_sequence": 25, "phone": null, "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hm79aU31H8NCaS/sources"}, "subscriptions": {"object": "list", "data": [{"id": "sub_Hm79IpijHbWG6Y", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1599173266, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1596581266, "current_period_end": 1628030866, "current_period_start": 1625352466, "customer": "cus_Hm79aU31H8NCaS", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hm79NwhbKPTnMW", "object": "subscription_item", "billing_thresholds": null, "created": 1596581266, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hm79IpijHbWG6Y", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hm79IpijHbWG6Y"}, "latest_invoice": "in_1J9Hf1IEn5WyEQxnRIsUHRNk", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1596581266, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1599173266, "trial_start": 1596581266}, {"id": "sub_Hm79H35Cga2xyM", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1599173265, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1596581265, "current_period_end": 1628030865, "current_period_start": 1625352465, "customer": "cus_Hm79aU31H8NCaS", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hm79ez6Hthf2jP", "object": "subscription_item", "billing_thresholds": null, "created": 1596581266, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hm79H35Cga2xyM", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hm79H35Cga2xyM"}, "latest_invoice": "in_1J9He9IEn5WyEQxnIHnR07pp", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1596581265, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1599173265, "trial_start": 1596581265}], "has_more": false, "total_count": 2, "url": "/v1/customers/cus_Hm79aU31H8NCaS/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hm79aU31H8NCaS/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_HgqdUlT76RCID7", "object": "customer", "address": null, "balance": 0, "created": 1595366618, "currency": "usd", "default_source": null, "delinquent": false, "description": null, "discount": null, "email": "john@dataline.io", "invoice_prefix": "DA31D4FD", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {"workspace_id": "619717fd-c858-478a-8493-71953801e0d5", "env": "dev", "eligibleForTrial": "false"}, "name": "default", "next_invoice_sequence": 27, "phone": null, "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_HgqdUlT76RCID7/sources"}, "subscriptions": {"object": "list", "data": [{"id": "sub_Hgqd9cnZ3U2zso", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1595366620, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_HgqdUlT76RCID7", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hgqd3GbOq4nX96", "object": "subscription_item", "billing_thresholds": null, "created": 1595366621, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hgqd9cnZ3U2zso", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hgqd9cnZ3U2zso"}, "latest_invoice": "in_1J8OZSIEn5WyEQxnB2v5sbjq", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1595366620, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1595366620}, {"id": "sub_HgqdZo49rNc9yd", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1595366619, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_HgqdUlT76RCID7", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_HgqdPFqB83zOkG", "object": "subscription_item", "billing_thresholds": null, "created": 1595366620, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_HgqdZo49rNc9yd", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_HgqdZo49rNc9yd"}, "latest_invoice": "in_1J8OaGIEn5WyEQxntxV9SoiC", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1595366619, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1595366619}], "has_more": false, "total_count": 2, "url": "/v1/customers/cus_HgqdUlT76RCID7/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_HgqdUlT76RCID7/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_Hf34W3ixOwxYl8", "object": "customer", "address": null, "balance": 0, "created": 1594951620, "currency": "usd", "default_source": null, "delinquent": false, "description": null, "discount": null, "email": "sherif+friends@dataline.io", "invoice_prefix": "A7936507", "invoice_settings": {"custom_fields": null, "default_payment_method": null, "footer": null}, "livemode": false, "metadata": {"workspace_id": "9f071356-1b6e-4daf-9032-8e99b3f29bb9", "env": "dev", "eligibleForTrial": "false"}, "name": "default", "next_invoice_sequence": 27, "phone": null, "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hf34W3ixOwxYl8/sources"}, "subscriptions": {"object": "list", "data": [{"id": "sub_Hf34IdAAxZ52R9", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594951622, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf34W3ixOwxYl8", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf34ZIE6Eghfhg", "object": "subscription_item", "billing_thresholds": null, "created": 1594951623, "metadata": {}, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "plan_HDfijky2JcM0pm", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf34IdAAxZ52R9", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf34IdAAxZ52R9"}, "latest_invoice": "in_1J8OaNIEn5WyEQxnjVyxvHpB", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HDfijky2JcM0pm", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1588637439, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free-overage", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 5000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594951622, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594951622}, {"id": "sub_Hf34Qc1KVVs8S6", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1596283200, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594951621, "current_period_end": 1627819200, "current_period_start": 1625140800, "customer": "cus_Hf34W3ixOwxYl8", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf34nezuRvVozR", "object": "subscription_item", "billing_thresholds": null, "created": 1594951622, "metadata": {}, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCaq3bqVvJH4sN", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 0, "unit_amount_decimal": "0"}, "quantity": 1, "subscription": "sub_Hf34Qc1KVVs8S6", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf34Qc1KVVs8S6"}, "latest_invoice": "in_1J8OZTIEn5WyEQxny3CxoLoP", "livemode": false, "metadata": {"eligibleForTrial": "false"}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCaq3bqVvJH4sN", "object": "plan", "active": true, "aggregate_usage": null, "amount": 0, "amount_decimal": "0", "billing_scheme": "per_unit", "created": 1588388692, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "free", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594951621, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1596283200, "trial_start": 1594951621}], "has_more": false, "total_count": 2, "url": "/v1/customers/cus_Hf34W3ixOwxYl8/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hf34W3ixOwxYl8/tax_ids"}}, "emitted_at": 1626172757000} -{"stream": "customers", "data": {"id": "cus_Hf2qOhXoDIiXiK", "object": "customer", "address": null, "balance": 0, "created": 1594950742, "currency": "usd", "default_source": null, "delinquent": false, "description": null, "discount": null, "email": "sherif@dataline.io", "invoice_prefix": "F763206E", "invoice_settings": {"custom_fields": null, "default_payment_method": "pm_1H5ipwIEn5WyEQxn2LlKZjqJ", "footer": null}, "livemode": false, "metadata": {"workspace_id": "23818f66-a538-48d3-8d50-48ace4a51555", "env": "dev", "eligibleForTrial": "false"}, "name": "paying-with-overage", "next_invoice_sequence": 31, "phone": null, "preferred_locales": [], "shipping": null, "sources": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hf2qOhXoDIiXiK/sources"}, "subscriptions": {"object": "list", "data": [{"id": "sub_Hf2qrsVuVGHXe3", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1594951250, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950743, "current_period_end": 1629165650, "current_period_start": 1626487250, "customer": "cus_Hf2qOhXoDIiXiK", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2qsLwNq56OJG", "object": "subscription_item", "billing_thresholds": null, "created": 1594950744, "metadata": {}, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "price": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "price", "active": true, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": "sum", "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "metered"}, "tiers_mode": "graduated", "transform_quantity": null, "type": "recurring", "unit_amount": null, "unit_amount_decimal": null}, "subscription": "sub_Hf2qrsVuVGHXe3", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2qrsVuVGHXe3"}, "latest_invoice": "in_1JE2w0IEn5WyEQxnm556audH", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "price_1H5DS3IEn5WyEQxnqW2x4pWu", "object": "plan", "active": true, "aggregate_usage": "sum", "amount": null, "amount_decimal": null, "billing_scheme": "tiered", "created": 1594830443, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-overage-monthly-v2", "product": "prod_H07MQuAFARZ6K6", "tiers": [{"flat_amount": null, "flat_amount_decimal": null, "unit_amount": 0, "unit_amount_decimal": "0", "up_to": 50000}, {"flat_amount": null, "flat_amount_decimal": null, "unit_amount": null, "unit_amount_decimal": "0.5", "up_to": null}], "tiers_mode": "graduated", "transform_usage": null, "trial_period_days": null, "usage_type": "metered"}, "quantity": 1, "schedule": null, "start_date": 1594950743, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1594951249, "trial_start": 1594950743}, {"id": "sub_Hf2qWerNs8mYbh", "object": "subscription", "application_fee_percent": null, "automatic_tax": {"enabled": false}, "billing_cycle_anchor": 1594951270, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "collection_method": "charge_automatically", "created": 1594950743, "current_period_end": 1629165670, "current_period_start": 1626487270, "customer": "cus_Hf2qOhXoDIiXiK", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "discount": null, "ended_at": null, "items": {"object": "list", "data": [{"id": "si_Hf2quA7b9Kin4A", "object": "subscription_item", "billing_thresholds": null, "created": 1594950743, "metadata": {}, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "plan_HCV3N7NM9cyAm4", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "livemode": false, "lookup_key": null, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 9900, "unit_amount_decimal": "9900"}, "quantity": 1, "subscription": "sub_Hf2qWerNs8mYbh", "tax_rates": []}], "has_more": false, "total_count": 1, "url": "/v1/subscription_items?subscription=sub_Hf2qWerNs8mYbh"}, "latest_invoice": "in_1JE2vwIEn5WyEQxnkpZ7QEFP", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "plan_HCV3N7NM9cyAm4", "object": "plan", "active": true, "aggregate_usage": null, "amount": 9900, "amount_decimal": "9900", "billing_scheme": "per_unit", "created": 1588367151, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": "starter-monthly", "product": "prod_H07MQuAFARZ6K6", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": null, "start_date": 1594950743, "status": "active", "tax_percent": null, "transfer_data": null, "trial_end": 1594951269, "trial_start": 1594950743}], "has_more": false, "total_count": 2, "url": "/v1/customers/cus_Hf2qOhXoDIiXiK/subscriptions"}, "tax_exempt": "none", "tax_ids": {"object": "list", "data": [], "has_more": false, "total_count": 0, "url": "/v1/customers/cus_Hf2qOhXoDIiXiK/tax_ids"}}, "emitted_at": 1626172757000} diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-stripe/integration_tests/invalid_config.json index 51dcd08dde3e1..63da3f8de65fa 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/invalid_config.json @@ -1,5 +1,5 @@ { - "client_secret": "wrong-client-secret", + "client_secret": "sk_test_wrongClientSecret", "account_id": "wrong-account-id", "start_date": "2020-05-01T00:00:00Z" } diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/non_disputes_events_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/non_disputes_events_catalog.json index b71e667190b65..eed53127e0631 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/non_disputes_events_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/non_disputes_events_catalog.json @@ -25,6 +25,18 @@ "sync_mode": "incremental", "destination_sync_mode": "overwrite", "cursor_field": ["created"] + }, + { + "stream": { + "name": "payment_intents", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-stripe/setup.py b/airbyte-integrations/connectors/source-stripe/setup.py index 470add42946c1..f9119c5e51008 100644 --- a/airbyte-integrations/connectors/source-stripe/setup.py +++ b/airbyte-integrations/connectors/source-stripe/setup.py @@ -25,7 +25,7 @@ from setuptools import find_packages, setup -MAIN_REQUIREMENTS = ["airbyte-cdk", "stripe"] +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "stripe==2.56.0"] TEST_REQUIREMENTS = [ "pytest~=6.1", diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payment_intents.json b/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payment_intents.json new file mode 100644 index 0000000000000..e055503470717 --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payment_intents.json @@ -0,0 +1,944 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "integer"] + }, + "amount_capturable": { + "type": ["null", "integer"] + }, + "amount_received": { + "type": ["null", "integer"] + }, + "application": { + "type": ["null", "string"] + }, + "application_fee_amount": { + "type": ["null", "integer"] + }, + "canceled_at": { + "type": ["null", "integer"] + }, + "cancellation_reason": { + "type": ["null", "string"] + }, + "capture_method": { + "type": ["null", "string"], + "enum": ["automatic", "manual"] + }, + "charges": { + "type": ["null", "object"], + "properties": { + "object": { + "type": ["null", "string"] + }, + "data": { + "type": ["null", "array"] + }, + "has_more": { + "type": ["null", "boolean"] + }, + "total_count": { + "type": ["null", "integer"] + }, + "url": { + "type": ["null", "string"] + } + } + }, + "client_secret": { + "type": ["null", "string"] + }, + "confirmation_method": { + "type": ["null", "string"], + "enum": ["automatic", "manual"] + }, + "created": { + "type": ["null", "integer"] + }, + "currency": { + "type": ["null", "string"] + }, + "customer": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "invoice": { + "type": ["null", "string"] + }, + "last_payment_error": { + "type": ["null", "object"], + "properties": { + "charge": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "decline_code": { + "type": ["null", "string"] + }, + "doc_url": { + "type": ["null", "string"] + }, + "message": { + "type": ["null", "string"] + }, + "param": { + "type": ["null", "string"] + }, + "payment_method": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "acss_debit": { + "type": ["null", "object"], + "properties": { + "bank_name": { + "type": ["null", "string"] + }, + "fingerprint": { + "type": ["null", "string"] + }, + "institution_number": { + "type": ["null", "string"] + }, + "last4": { + "type": ["null", "string"] + }, + "transit_number": { + "type": ["null", "string"] + } + } + }, + "afterpay_clearpay": {}, + "alipay": {}, + "au_becs_debit": { + "type": ["null", "object"], + "properties": { + "bsb_number": { + "type": ["null", "string"] + }, + "fingerprint": { + "type": ["null", "string"] + }, + "last4": { + "type": ["null", "string"] + } + } + }, + "bacs_debit": { + "type": ["null", "object"], + "properties": { + "fingerprint": { + "type": ["null", "string"] + }, + "last4": { + "type": ["null", "string"] + }, + "sort_code": { + "type": ["null", "string"] + } + } + }, + "bancontact": {}, + "billing_details": { + "type": ["null", "object"], + "properties": { + "address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "email": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "phone": { + "type": ["null", "string"] + } + } + }, + "boleto": { + "type": ["null", "object"], + "properties": { + "tax_id\n": { + "type": ["null", "string"] + } + } + }, + "card": { + "type": ["null", "object"], + "properties": { + "brand": { + "type": ["null", "string"] + }, + "checks": { + "type": ["null", "object"], + "properties": { + "address_line1_check": { + "type": ["null", "string"] + }, + "address_postal_code_check": { + "type": ["null", "string"] + }, + "cvc_check": { + "type": ["null", "string"] + } + } + }, + "country": { + "type": ["null", "string"] + }, + "exp_month": { + "type": ["null", "integer"] + }, + "exp_year": { + "type": ["null", "integer"] + }, + "fingerprint": { + "type": ["null", "string"] + }, + "funding": { + "type": ["null", "string"] + }, + "generated_from": { + "type": ["null", "object"], + "properties": { + "charge": { + "type": ["null", "string"] + }, + "payment_method_details": { + "type": ["null", "object"], + "properties": { + "card_present": { + "type": ["null", "object"], + "properties": { + "brand": { + "type": ["null", "string"] + }, + "cardholder_name": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "emv_auth_data": { + "type": ["null", "string"] + }, + "exp_month": { + "type": ["null", "integer"] + }, + "exp_year": { + "type": ["null", "integer"] + }, + "fingerprint": { + "type": ["null", "string"] + }, + "funding": { + "type": ["null", "string"], + "enum": ["credit", "debit", "prepaid", "unknown"] + }, + "generated_card": { + "type": ["null", "string"] + }, + "lsat4": { + "type": ["null", "string"] + }, + "network": { + "type": ["null", "string"], + "enum": [ + "contact_emv", + "contactless_emv", + "magnetic_stripe_track2", + "magnetic_stripe_fallback", + "contactless_magstripe_mode" + ] + }, + "read_method": { + "type": ["null", "string"] + }, + "receipt": { + "type": ["null", "object"], + "properties": { + "account_type": { + "type": ["null", "string"], + "enum": [ + "credit", + "checking", + "prepaid", + "unknown" + ] + }, + "application_cryptogram": { + "type": ["null", "string"] + }, + "application_preferred_name": { + "type": ["null", "string"] + }, + "authorization_code": { + "type": ["null", "string"] + }, + "authorization_response_code": { + "type": ["null", "string"] + }, + "cardholder_verification_method": { + "type": ["null", "string"] + }, + "dedicated_file_name": { + "type": ["null", "string"] + }, + "terminal_verification_results": { + "type": ["null", "string"] + }, + "transaction_status_information": { + "type": ["null", "string"] + } + } + }, + "type": { + "type": ["null", "string"] + } + } + }, + "type": { + "type": ["null", "string"] + } + } + }, + "setup_attempt": { + "type": ["null", "string"] + } + } + }, + "last4": { + "type": ["null", "string"] + }, + "networks": { + "type": ["null", "object"], + "properties": { + "available": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "preferred": { + "type": ["null", "string"] + } + } + }, + "three_d_secure_usage": { + "type": ["null", "object"], + "properties": { + "supported": { + "type": ["null", "boolean"] + } + } + }, + "wallet": { + "type": ["null", "object"], + "properties": { + "amex_express_checkout": {}, + "apple_pay": {}, + "dynamic_last4": { + "type": ["null", "string"] + }, + "google_pay": {}, + "masterpass": { + "type": ["null", "object"], + "properties": { + "billing_address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "email": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "shipping_address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + } + } + }, + "samsung_pay": {}, + "type": { + "type": ["null", "string"] + }, + "visa_checkout": { + "type": ["null", "object"], + "properties": { + "billing_address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "email": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "shipping_address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + } + } + } + } + } + } + }, + "card_present": { + "type": ["null", "object"], + "properties": {} + }, + "created": { + "type": ["null", "integer"] + }, + "customer": { + "type": ["null", "string"] + }, + "eps": { + "type": ["null", "object"], + "properties": { + "bank": { + "type": ["null", "string"], + "enum": [ + "arzte_und_apotheker_bank", + "austrian_anadi_bank_ag", + "bank_austria", + "bankhaus_carl_spangler", + "bankhaus_schelhammer_und_schattera_ag", + "bawag_psk_ag", + "bks_bank_ag", + "brull_kallmus_bank_ag", + "btv_vier_lander_bank", + "capital_bank_grawe_gruppe_ag", + "dolomitenbank", + "easybank_ag", + "erste_bank_und_sparkassen", + "hypo_alpeadriabank_international_ag", + "hypo_noe_lb_fur_niederosterreich_u_wien", + "hypo_oberosterreich_salzburg_steiermark", + "hypo_tirol_bank_ag", + "hypo_vorarlberg_bank_ag", + "hypo_bank_burgenland_aktiengesellschaft", + "marchfelder_bank", + "oberbank_ag", + "raiffeisen_bankengruppe_osterreich", + "schoellerbank_ag", + "sparda_bank_wien", + "volksbank_gruppe", + "volkskreditbank_ag", + "vr_bank_braunau" + ] + } + } + }, + "fpx": { + "type": ["null", "object"], + "properties": { + "bank": { + "type": ["null", "string"], + "enum": [ + "affin_bank", + "alliance_bank", + "ambank", + "bank_islam", + "bank_muamalat", + "bank_rakyat", + "bsn", + "cimb", + "hong_leong_bank", + "hsbc", + "kfh", + "maybank2u", + "ocbc", + "public_bank", + "rhb", + "standard_chartered", + "uob", + "deutsche_bank", + "maybank2e", + "pb_enterprise" + ] + } + } + }, + "giropay": { + "type": ["null", "object"], + "properties": {} + }, + "grabpay": { + "type": ["null", "object"], + "properties": {} + }, + "ideal": { + "type": ["null", "object"], + "properties": { + "bank": { + "type": ["null", "string"], + "enum": [ + "abn_amro", + "asn_bank", + "bunq", + "handelsbanken", + "ing", + "knab", + "moneyou", + "rabobank", + "regiobank", + "revolut", + "sns_bank", + "triodos_bank", + "van_lanschot" + ] + } + } + }, + "interac_present": { + "type": ["null", "object"], + "properties": {} + }, + "livemode": { + "type": ["null", "boolean"] + }, + "metadata": { + "type": ["null", "object"], + "properties": {} + }, + "oxxo": { + "type": ["null", "object"], + "properties": {} + }, + "p24": { + "type": ["null", "object"], + "properties": { + "bank": { + "type": ["null", "string"] + } + } + }, + "sepa_debit": { + "type": ["null", "object"], + "properties": { + "bank_code": { + "type": ["null", "string"] + }, + "branch_code": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "fingerprint": { + "type": ["null", "string"] + }, + "generated_from": { + "type": ["null", "object"], + "properties": { + "charge": { + "type": ["null", "string"] + }, + "setup_attempt": { + "type": ["null", "string"] + } + } + }, + "last4": { + "type": ["null", "string"] + } + } + }, + "sofort": { + "type": ["null", "object"], + "properties": { + "country": { + "type": ["null", "string"] + } + } + }, + "type": { + "type": ["null", "string"], + "enum": [ + "acss_debit", + "afterpay_clearpay", + "alipay", + "au_becs_debit", + "bacs_debit", + "bancontact", + "boleto", + "card", + "card_present", + "eps", + "fpx", + "giropay", + "grabpay", + "ideal", + "interac_present", + "oxxo", + "p24", + "sepa_debit", + "sofort", + "wechat_pay" + ] + }, + "wechat_pay": { + "type": ["null", "object"], + "properties": {} + } + } + }, + "payment_method_type": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"], + "enum": [ + "api_error", + "card_error", + "idempotency_error", + "invalid_request_error" + ] + } + } + }, + "livemode": { + "type": ["null", "boolean"] + }, + "metadata": { + "type": ["null", "object"], + "properties": {} + }, + "next_action": { + "type": ["null", "object"], + "properties": { + "alipay_handle_redirect": { + "type": ["null", "object"], + "properties": { + "native_data": { + "type": ["null", "string"] + }, + "native_url": { + "type": ["null", "string"] + }, + "return_url": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + } + } + }, + "boleto_display_details": { + "type": ["null", "object"], + "properties": { + "expires_at": { + "type": ["null", "integer"] + }, + "hosted_voucher_url": { + "type": ["null", "string"] + }, + "number": { + "type": ["null", "string"] + }, + "pdf": { + "type": ["null", "string"] + } + } + }, + "oxxo_display_details": { + "type": ["null", "object"], + "properties": { + "expires_after": { + "type": ["null", "integer"] + }, + "hosted_voucher_url": { + "type": ["null", "string"] + }, + "number": { + "type": ["null", "string"] + } + } + }, + "redirect_to_url": { + "type": ["null", "object"], + "properties": { + "return_url": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + } + } + }, + "type": { + "type": ["null", "string"] + }, + "use_stripe_sdk": { + "type": ["null", "object"], + "properties": {} + }, + "verify_with_microdeposits": { + "type": ["null", "object"], + "properties": { + "arrival_date": { + "type": ["null", "integer"] + }, + "hosted_verification_url": { + "type": ["null", "string"] + } + } + }, + "wechat_pay_display_qr_code": { + "type": ["null", "object"], + "properties": { + "data": { + "type": ["null", "string"] + }, + "image_data_url": { + "type": ["null", "string"] + } + } + }, + "wechat_pay_redirect_to_android_app": { + "type": ["null", "object"], + "properties": { + "app_id": { + "type": ["null", "string"] + }, + "nonce_str": { + "type": ["null", "string"] + }, + "package": { + "type": ["null", "string"] + }, + "partner_id": { + "type": ["null", "string"] + }, + "prepay_id": { + "type": ["null", "string"] + }, + "sign": { + "type": ["null", "string"] + }, + "timestamp": { + "type": ["null", "string"] + } + } + }, + "wechat_pay_redirect_to_ios_app": { + "type": ["null", "object"], + "properties": { + "native_url": { + "type": ["null", "string"] + } + } + } + } + }, + "on_behalf_of": { + "type": ["null", "string"] + }, + "payment_method": { + "type": ["null", "string"] + }, + "payment_method_options": { + "type": ["null", "object"], + "properties": {} + }, + "payment_method_types": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "receipt_email": { + "type": ["null", "string"] + }, + "review": { + "type": ["null", "string"] + }, + "setup_future_usage": { + "type": ["null", "string"] + }, + "shipping": { + "type": ["null", "object"], + "properties": { + "address": { + "type": ["null", "object"], + "properties": { + "city": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "line1": { + "type": ["null", "string"] + }, + "line2": { + "type": ["null", "string"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "carrier": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "phone": { + "type": ["null", "string"] + }, + "tracking_number": { + "type": ["null", "string"] + } + } + }, + "source": { + "type": ["null", "string"] + }, + "statement_description": { + "type": ["null", "string"] + }, + "statement_descriptor_suffix": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "transfer_data": { + "type": ["null", "object"], + "properties": { + "amount": { + "type": ["null", "integer"] + }, + "destination": { + "type": ["null", "string"] + } + } + }, + "transfer_group": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py index 711d58953ae6f..3d7076e1aed2e 100644 --- a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py @@ -23,253 +23,34 @@ # -import math -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from typing import Any, List, Mapping, Tuple -import requests import stripe from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator - - -class StripeStream(HttpStream, ABC): - url_base = "https://api.stripe.com/v1/" - primary_key = "id" - - def __init__(self, account_id: str, **kwargs): - super().__init__(**kwargs) - self.account_id = account_id - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - decoded_response = response.json() - if bool(decoded_response.get("has_more", "False")) and decoded_response.get("data", []): - last_object_id = decoded_response["data"][-1]["id"] - return {"starting_after": last_object_id} - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - - # Stripe default pagination is 10, max is 100 - params = {"limit": 100} - - # Handle pagination by inserting the next page's token in the request parameters - if next_page_token: - params.update(next_page_token) - - return params - - def request_headers(self, **kwargs) -> Mapping[str, Any]: - if self.account_id: - return {"Stripe-Account": self.account_id} - - return {} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - yield from response_json.get("data", []) # Stripe puts records in a container array "data" - - -class IncrementalStripeStream(StripeStream, ABC): - # Stripe returns most recently created objects first, so we don't want to persist state until the entire stream has been read - state_checkpoint_interval = math.inf - - @property - @abstractmethod - def cursor_field(self) -> str: - """ - Defining a cursor field indicates that a stream is incremental, so any incremental stream must extend this class - and define a cursor field. - """ - pass - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - return {self.cursor_field: max(latest_record.get(self.cursor_field), current_stream_state.get(self.cursor_field, 0))} - - def request_params(self, stream_state=None, **kwargs): - stream_state = stream_state or {} - params = super().request_params(stream_state=stream_state, **kwargs) - params["created[gte]"] = stream_state.get(self.cursor_field) - return params - - -class Customers(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs) -> str: - return "customers" - - -class BalanceTransactions(IncrementalStripeStream): - cursor_field = "created" - name = "balance_transactions" - - def path(self, **kwargs) -> str: - return "balance_transactions" - - -class Charges(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs) -> str: - return "charges" - - -class CustomerBalanceTransactions(StripeStream): - name = "customer_balance_transactions" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): - customer_id = stream_slice["customer_id"] - return f"customers/{customer_id}/balance_transactions" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - customers_stream = Customers(authenticator=self.authenticator, account_id=self.account_id) - for customer in customers_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"customer_id": customer["id"]}, **kwargs) - - -class Coupons(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "coupons" - - -class Disputes(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "disputes" - - -class Events(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "events" - - -class Invoices(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "invoices" - - -class InvoiceLineItems(StripeStream): - name = "invoice_line_items" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): - return f"invoices/{stream_slice['invoice_id']}/lines" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - invoices_stream = Invoices(authenticator=self.authenticator, account_id=self.account_id) - for invoice in invoices_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"invoice_id": invoice["id"]}, **kwargs) - - -class InvoiceItems(IncrementalStripeStream): - cursor_field = "date" - name = "invoice_items" - - def path(self, **kwargs): - return "invoiceitems" - - -class Payouts(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "payouts" - - -class Plans(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "plans" - - -class Products(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "products" - - -class Subscriptions(IncrementalStripeStream): - cursor_field = "created" - status = "all" - - def path(self, **kwargs): - return "subscriptions" - - def request_params(self, stream_state=None, **kwargs): - stream_state = stream_state or {} - params = super().request_params(stream_state=stream_state, **kwargs) - params["status"] = self.status - return params - - -class SubscriptionItems(StripeStream): - name = "subscription_items" - - def path(self, **kwargs): - return "subscription_items" - - def request_params(self, stream_slice: Mapping[str, Any] = None, **kwargs): - params = super().request_params(stream_slice=stream_slice, **kwargs) - params["subscription"] = stream_slice["subscription_id"] - return params - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - subscriptions_stream = Subscriptions(authenticator=self.authenticator, account_id=self.account_id) - for subscriptions in subscriptions_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"subscription_id": subscriptions["id"]}, **kwargs) - - -class Transfers(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "transfers" - - -class Refunds(IncrementalStripeStream): - cursor_field = "created" - - def path(self, **kwargs): - return "refunds" - - -class BankAccounts(StripeStream): - name = "bank_accounts" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): - customer_id = stream_slice["customer_id"] - return f"customers/{customer_id}/sources" - - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - params["object"] = "bank_account" - return params - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - customers_stream = Customers(authenticator=self.authenticator, account_id=self.account_id) - for customer in customers_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"customer_id": customer["id"]}, **kwargs) +from source_stripe.streams import ( + BalanceTransactions, + BankAccounts, + Charges, + Coupons, + CustomerBalanceTransactions, + Customers, + Disputes, + Events, + InvoiceItems, + InvoiceLineItems, + Invoices, + PaymentIntents, + Payouts, + Plans, + Products, + Refunds, + SubscriptionItems, + Subscriptions, + Transfers, +) class SourceStripe(AbstractSource): @@ -285,22 +66,23 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: authenticator = TokenAuthenticator(config["client_secret"]) args = {"authenticator": authenticator, "account_id": config["account_id"]} return [ - BankAccounts(**args), BalanceTransactions(**args), + BankAccounts(**args), Charges(**args), Coupons(**args), - Customers(**args), CustomerBalanceTransactions(**args), + Customers(**args), Disputes(**args), Events(**args), InvoiceItems(**args), InvoiceLineItems(**args), Invoices(**args), - Plans(**args), + PaymentIntents(**args), Payouts(**args), + Plans(**args), Products(**args), - Subscriptions(**args), - SubscriptionItems(**args), Refunds(**args), + SubscriptionItems(**args), + Subscriptions(**args), Transfers(**args), ] diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py b/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py new file mode 100644 index 0000000000000..896c61b0aaec6 --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py @@ -0,0 +1,351 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +import math +from abc import ABC, abstractmethod +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams.http import HttpStream + + +class StripeStream(HttpStream, ABC): + url_base = "https://api.stripe.com/v1/" + primary_key = "id" + + def __init__(self, account_id: str, **kwargs): + super().__init__(**kwargs) + self.account_id = account_id + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + decoded_response = response.json() + if bool(decoded_response.get("has_more", "False")) and decoded_response.get("data", []): + last_object_id = decoded_response["data"][-1]["id"] + return {"starting_after": last_object_id} + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + + # Stripe default pagination is 10, max is 100 + params = {"limit": 100} + + # Handle pagination by inserting the next page's token in the request parameters + if next_page_token: + params.update(next_page_token) + + return params + + def request_headers(self, **kwargs) -> Mapping[str, Any]: + if self.account_id: + return {"Stripe-Account": self.account_id} + + return {} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + yield from response_json.get("data", []) # Stripe puts records in a container array "data" + + +class IncrementalStripeStream(StripeStream, ABC): + # Stripe returns most recently created objects first, so we don't want to persist state until the entire stream has been read + state_checkpoint_interval = math.inf + + @property + @abstractmethod + def cursor_field(self) -> str: + """ + Defining a cursor field indicates that a stream is incremental, so any incremental stream must extend this class + and define a cursor field. + """ + pass + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object + and returning an updated state object. + """ + return {self.cursor_field: max(latest_record.get(self.cursor_field), current_stream_state.get(self.cursor_field, 0))} + + def request_params(self, stream_state: Mapping[str, Any] = None, **kwargs): + stream_state = stream_state or {} + params = super().request_params(stream_state=stream_state, **kwargs) + if stream_state and self.cursor_field in stream_state: + params["created[gte]"] = stream_state.get(self.cursor_field) + return params + + +class Customers(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/customers/list + """ + + cursor_field = "created" + + def path(self, **kwargs) -> str: + return "customers" + + +class BalanceTransactions(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/balance_transactions/list + """ + + cursor_field = "created" + name = "balance_transactions" + + def path(self, **kwargs) -> str: + return "balance_transactions" + + +class Charges(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/charges/list + """ + + cursor_field = "created" + + def path(self, **kwargs) -> str: + return "charges" + + +class CustomerBalanceTransactions(StripeStream): + """ + API docs: https://stripe.com/docs/api/customer_balance_transactions/list + """ + + name = "customer_balance_transactions" + + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): + customer_id = stream_slice["customer_id"] + return f"customers/{customer_id}/balance_transactions" + + def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + customers_stream = Customers(authenticator=self.authenticator, account_id=self.account_id) + for customer in customers_stream.read_records(sync_mode=SyncMode.full_refresh): + yield from super().read_records(stream_slice={"customer_id": customer["id"]}, **kwargs) + + +class Coupons(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/coupons/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "coupons" + + +class Disputes(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/disputes/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "disputes" + + +class Events(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/events/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "events" + + +class Invoices(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/invoices/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "invoices" + + +class InvoiceLineItems(StripeStream): + """ + API docs: https://stripe.com/docs/api/invoices/invoice_lines + """ + + name = "invoice_line_items" + + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): + return f"invoices/{stream_slice['invoice_id']}/lines" + + def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + invoices_stream = Invoices(authenticator=self.authenticator, account_id=self.account_id) + for invoice in invoices_stream.read_records(sync_mode=SyncMode.full_refresh): + yield from super().read_records(stream_slice={"invoice_id": invoice["id"]}, **kwargs) + + +class InvoiceItems(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/invoiceitems/list + """ + + cursor_field = "date" + name = "invoice_items" + + def path(self, **kwargs): + return "invoiceitems" + + +class Payouts(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/payouts/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "payouts" + + +class Plans(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/plans/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "plans" + + +class Products(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/products/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "products" + + +class Subscriptions(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/subscriptions/list + """ + + cursor_field = "created" + status = "all" + + def path(self, **kwargs): + return "subscriptions" + + def request_params(self, stream_state=None, **kwargs): + stream_state = stream_state or {} + params = super().request_params(stream_state=stream_state, **kwargs) + params["status"] = self.status + return params + + +class SubscriptionItems(StripeStream): + """ + API docs: https://stripe.com/docs/api/subscription_items/list + """ + + name = "subscription_items" + + def path(self, **kwargs): + return "subscription_items" + + def request_params(self, stream_slice: Mapping[str, Any] = None, **kwargs): + params = super().request_params(stream_slice=stream_slice, **kwargs) + params["subscription"] = stream_slice["subscription_id"] + return params + + def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + subscriptions_stream = Subscriptions(authenticator=self.authenticator, account_id=self.account_id) + for subscriptions in subscriptions_stream.read_records(sync_mode=SyncMode.full_refresh): + yield from super().read_records(stream_slice={"subscription_id": subscriptions["id"]}, **kwargs) + + +class Transfers(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/transfers/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "transfers" + + +class Refunds(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/refunds/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "refunds" + + +class PaymentIntents(IncrementalStripeStream): + """ + API docs: https://stripe.com/docs/api/payment_intents/list + """ + + cursor_field = "created" + + def path(self, **kwargs): + return "payment_intents" + + +class BankAccounts(StripeStream): + """ + API docs: https://stripe.com/docs/api/customer_bank_accounts/list + """ + + name = "bank_accounts" + + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): + customer_id = stream_slice["customer_id"] + return f"customers/{customer_id}/sources" + + def request_params(self, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(**kwargs) + params["object"] = "bank_account" + return params + + def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + customers_stream = Customers(authenticator=self.authenticator, account_id=self.account_id) + for customer in customers_stream.read_records(sync_mode=SyncMode.full_refresh): + yield from super().read_records(stream_slice={"customer_id": customer["id"]}, **kwargs) diff --git a/airbyte-migration/src/main/resources/migrations/migrationV0_14_0/airbyte_db/Attempts.yaml b/airbyte-migration/src/main/resources/migrations/migrationV0_14_0/airbyte_db/Attempts.yaml index e978588d8a938..758f53c322f6f 100644 --- a/airbyte-migration/src/main/resources/migrations/migrationV0_14_0/airbyte_db/Attempts.yaml +++ b/airbyte-migration/src/main/resources/migrations/migrationV0_14_0/airbyte_db/Attempts.yaml @@ -11,7 +11,7 @@ required: - status - created_at - updated_at -additionalProperties: false +additionalProperties: true properties: id: type: number diff --git a/airbyte-migration/src/test/java/io/airbyte/migrate/MigrationCurrentSchemaTest.java b/airbyte-migration/src/test/java/io/airbyte/migrate/MigrationCurrentSchemaTest.java index f330df5c3b002..685f98de6c10f 100644 --- a/airbyte-migration/src/test/java/io/airbyte/migrate/MigrationCurrentSchemaTest.java +++ b/airbyte-migration/src/test/java/io/airbyte/migrate/MigrationCurrentSchemaTest.java @@ -30,14 +30,11 @@ public class MigrationCurrentSchemaTest { - /** - * The file-based migration is deprecated. We need to ensure that v0.29.0 is the last one. All new - * migrations should be written in Flyway. - */ @Test public void testLastMigration() { final Migration lastMigration = Migrations.MIGRATIONS.get(Migrations.MIGRATIONS.size() - 1); - assertEquals(Migrations.MIGRATION_V_0_29_0.getVersion(), lastMigration.getVersion()); + assertEquals(Migrations.MIGRATION_V_0_29_0.getVersion(), lastMigration.getVersion(), + "The file-based migration is deprecated. Please do not write a new migration this way. Use Flyway instead."); } } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleAdsOauthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleAdsOauthFlow.java index 28c79a3f8f70f..8a26b6e2ecb53 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleAdsOauthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleAdsOauthFlow.java @@ -25,24 +25,45 @@ package io.airbyte.oauth.google; import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import io.airbyte.config.persistence.ConfigRepository; +import java.io.IOException; +import java.net.http.HttpClient; +import java.util.Map; public class GoogleAdsOauthFlow extends GoogleOAuthFlow { + @VisibleForTesting + static final String SCOPE = "https://www.googleapis.com/auth/adwords"; + public GoogleAdsOauthFlow(ConfigRepository configRepository) { - super(configRepository, "https://www.googleapis.com/auth/adwords"); + super(configRepository, SCOPE); + } + + @VisibleForTesting + GoogleAdsOauthFlow(ConfigRepository configRepository, HttpClient client) { + super(configRepository, SCOPE, client); } @Override protected String getClientIdUnsafe(JsonNode config) { // the config object containing client ID and secret is nested inside the "credentials" object + Preconditions.checkArgument(config.hasNonNull("credentials")); return super.getClientIdUnsafe(config.get("credentials")); } @Override protected String getClientSecretUnsafe(JsonNode config) { // the config object containing client ID and secret is nested inside the "credentials" object + Preconditions.checkArgument(config.hasNonNull("credentials")); return super.getClientSecretUnsafe(config.get("credentials")); } + @Override + protected Map completeOAuthFlow(String clientId, String clientSecret, String code, String redirectUrl) throws IOException { + // the config object containing refresh token is nested inside the "credentials" object + return Map.of("credentials", super.completeOAuthFlow(clientId, clientSecret, code, redirectUrl)); + } + } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleOAuthFlow.java index 2613408a23ce6..94a6c3c296e10 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/google/GoogleOAuthFlow.java @@ -144,7 +144,7 @@ public Map completeDestinationOAuth(UUID workspaceId, } } - private Map completeOAuthFlow(String clientId, String clientSecret, String code, String redirectUrl) throws IOException { + protected Map completeOAuthFlow(String clientId, String clientSecret, String code, String redirectUrl) throws IOException { final ImmutableMap body = new Builder() .put("client_id", clientId) .put("client_secret", clientSecret) diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/google/GoogleAdsOauthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/google/GoogleAdsOauthFlowTest.java new file mode 100644 index 0000000000000..45807d7d28e86 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/google/GoogleAdsOauthFlowTest.java @@ -0,0 +1,133 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.oauth.google; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.DestinationOAuthParameter; +import io.airbyte.config.SourceOAuthParameter; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.net.http.HttpClient; +import java.net.http.HttpResponse; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class GoogleAdsOauthFlowTest { + + private static final String SCOPE = "https%3A//www.googleapis.com/auth/analytics.readonly"; + private static final String REDIRECT_URL = "https://airbyte.io"; + + private HttpClient httpClient; + private ConfigRepository configRepository; + private GoogleAdsOauthFlow googleAdsOauthFlow; + + private UUID workspaceId; + private UUID definitionId; + + @BeforeEach + public void setup() { + httpClient = mock(HttpClient.class); + configRepository = mock(ConfigRepository.class); + googleAdsOauthFlow = new GoogleAdsOauthFlow(configRepository, httpClient); + + workspaceId = UUID.randomUUID(); + definitionId = UUID.randomUUID(); + } + + @Test + public void testCompleteSourceOAuth() throws IOException, ConfigNotFoundException, JsonValidationException, InterruptedException { + when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() + .withOauthParameterId(UUID.randomUUID()) + .withSourceDefinitionId(definitionId) + .withWorkspaceId(workspaceId) + .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()))))); + + Map returnedCredentials = Map.of("refresh_token", "refresh_token_response"); + final HttpResponse response = mock(HttpResponse.class); + when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); + when(httpClient.send(any(), any())).thenReturn(response); + final Map queryParams = Map.of("code", "test_code"); + final Map actualQueryParams = googleAdsOauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); + + assertEquals(Jsons.serialize(Map.of("credentials", returnedCredentials)), Jsons.serialize(actualQueryParams)); + } + + @Test + public void testCompleteDestinationOAuth() throws IOException, ConfigNotFoundException, JsonValidationException, InterruptedException { + when(configRepository.listDestinationOAuthParam()).thenReturn(List.of(new DestinationOAuthParameter() + .withOauthParameterId(UUID.randomUUID()) + .withDestinationDefinitionId(definitionId) + .withWorkspaceId(workspaceId) + .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()))))); + + Map returnedCredentials = Map.of("refresh_token", "refresh_token_response"); + final HttpResponse response = mock(HttpResponse.class); + when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); + when(httpClient.send(any(), any())).thenReturn(response); + final Map queryParams = Map.of("code", "test_code"); + final Map actualQueryParams = googleAdsOauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); + + assertEquals(Jsons.serialize(Map.of("credentials", returnedCredentials)), Jsons.serialize(actualQueryParams)); + } + + @Test + public void testGetClientIdUnsafe() { + String clientId = "123"; + Map clientIdMap = Map.of("client_id", clientId); + Map> nestedConfig = Map.of("credentials", clientIdMap); + + assertThrows(IllegalArgumentException.class, () -> googleAdsOauthFlow.getClientIdUnsafe(Jsons.jsonNode(clientIdMap))); + assertEquals(clientId, googleAdsOauthFlow.getClientIdUnsafe(Jsons.jsonNode(nestedConfig))); + } + + @Test + public void testGetClientSecretUnsafe() { + String clientSecret = "secret"; + Map clientIdMap = Map.of("client_secret", clientSecret); + Map> nestedConfig = Map.of("credentials", clientIdMap); + + assertThrows(IllegalArgumentException.class, () -> googleAdsOauthFlow.getClientSecretUnsafe(Jsons.jsonNode(clientIdMap))); + assertEquals(clientSecret, googleAdsOauthFlow.getClientSecretUnsafe(Jsons.jsonNode(nestedConfig))); + } + +} diff --git a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java index d77912d5abcf9..1aa0ece764752 100644 --- a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java +++ b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java @@ -27,6 +27,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.airbyte.analytics.Deployment; import io.airbyte.analytics.TrackingClientSingleton; +import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.invoker.ApiClient; +import io.airbyte.api.client.invoker.ApiException; +import io.airbyte.api.client.model.HealthCheckRead; import io.airbyte.commons.concurrency.GracefulShutdownHandler; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs; @@ -46,7 +50,6 @@ import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; import io.airbyte.scheduler.persistence.job_tracker.JobTracker; -import io.airbyte.workers.WorkerApp; import io.airbyte.workers.temporal.TemporalClient; import java.io.IOException; import java.nio.file.Path; @@ -153,6 +156,25 @@ private void cleanupZombies(JobPersistence jobPersistence, JobNotifier jobNotifi } } + public static void waitForServer(Configs configs) throws InterruptedException { + final AirbyteApiClient apiClient = new AirbyteApiClient( + new ApiClient().setScheme("http") + .setHost(configs.getAirbyteApiHost()) + .setPort(configs.getAirbyteApiPort()) + .setBasePath("/api")); + + boolean isHealthy = false; + while (!isHealthy) { + try { + HealthCheckRead healthCheck = apiClient.getHealthApi().getHealthCheck(); + isHealthy = healthCheck.getDb(); + } catch (ApiException e) { + LOGGER.info("Waiting for server to become available..."); + Thread.sleep(2000); + } + } + } + public static void main(String[] args) throws IOException, InterruptedException { final Configs configs = new EnvConfigs(); @@ -166,7 +188,7 @@ public static void main(String[] args) throws IOException, InterruptedException LOGGER.info("temporalHost = " + temporalHost); // Wait for the server to initialize the database and run migration - WorkerApp.waitForServer(configs); + waitForServer(configs); LOGGER.info("Creating Job DB connection pool..."); final Database jobDatabase = new JobsDatabaseInstance( diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java index 2dda3e9089c26..67bdbcc438779 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java @@ -43,6 +43,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.FileSystemConfigPersistence; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import io.airbyte.validation.json.JsonValidationException; @@ -96,6 +97,7 @@ class WorkspaceHelperTest { ConfigRepository configRepository; JobPersistence jobPersistence; WorkspaceHelper workspaceHelper; + ConnectorSpecification emptyConnectorSpec; @BeforeEach public void setup() throws IOException { @@ -105,6 +107,9 @@ public void setup() throws IOException { jobPersistence = mock(JobPersistence.class); workspaceHelper = new WorkspaceHelper(configRepository, jobPersistence); + + emptyConnectorSpec = mock(ConnectorSpecification.class); + when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); } @Test @@ -130,13 +135,13 @@ public void testMissingObjectsProperException() { @Test public void testSource() throws IOException, JsonValidationException { configRepository.writeStandardSource(SOURCE_DEF); - configRepository.writeSourceConnection(SOURCE); + configRepository.writeSourceConnection(SOURCE, emptyConnectorSpec); final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID); assertEquals(WORKSPACE_ID, retrievedWorkspace); // check that caching is working - configRepository.writeSourceConnection(Jsons.clone(SOURCE).withWorkspaceId(UUID.randomUUID())); + configRepository.writeSourceConnection(Jsons.clone(SOURCE).withWorkspaceId(UUID.randomUUID()), emptyConnectorSpec); final UUID retrievedWorkspaceAfterUpdate = workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID); assertEquals(WORKSPACE_ID, retrievedWorkspaceAfterUpdate); } @@ -144,13 +149,13 @@ public void testSource() throws IOException, JsonValidationException { @Test public void testDestination() throws IOException, JsonValidationException { configRepository.writeStandardDestinationDefinition(DEST_DEF); - configRepository.writeDestinationConnection(DEST); + configRepository.writeDestinationConnection(DEST, emptyConnectorSpec); final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); assertEquals(WORKSPACE_ID, retrievedWorkspace); // check that caching is working - configRepository.writeDestinationConnection(Jsons.clone(DEST).withWorkspaceId(UUID.randomUUID())); + configRepository.writeDestinationConnection(Jsons.clone(DEST).withWorkspaceId(UUID.randomUUID()), emptyConnectorSpec); final UUID retrievedWorkspaceAfterUpdate = workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); assertEquals(WORKSPACE_ID, retrievedWorkspaceAfterUpdate); } @@ -158,9 +163,9 @@ public void testDestination() throws IOException, JsonValidationException { @Test public void testConnection() throws IOException, JsonValidationException { configRepository.writeStandardSource(SOURCE_DEF); - configRepository.writeSourceConnection(SOURCE); + configRepository.writeSourceConnection(SOURCE, emptyConnectorSpec); configRepository.writeStandardDestinationDefinition(DEST_DEF); - configRepository.writeDestinationConnection(DEST); + configRepository.writeDestinationConnection(DEST, emptyConnectorSpec); // set up connection configRepository.writeStandardSync(CONNECTION); @@ -175,8 +180,8 @@ public void testConnection() throws IOException, JsonValidationException { // check that caching is working final UUID newWorkspace = UUID.randomUUID(); - configRepository.writeSourceConnection(Jsons.clone(SOURCE).withWorkspaceId(newWorkspace)); - configRepository.writeDestinationConnection(Jsons.clone(DEST).withWorkspaceId(newWorkspace)); + configRepository.writeSourceConnection(Jsons.clone(SOURCE).withWorkspaceId(newWorkspace), emptyConnectorSpec); + configRepository.writeDestinationConnection(Jsons.clone(DEST).withWorkspaceId(newWorkspace), emptyConnectorSpec); final UUID retrievedWorkspaceAfterUpdate = workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); assertEquals(WORKSPACE_ID, retrievedWorkspaceAfterUpdate); } @@ -198,9 +203,9 @@ public void testOperation() throws IOException, JsonValidationException { @Test public void testConnectionAndJobs() throws IOException, JsonValidationException { configRepository.writeStandardSource(SOURCE_DEF); - configRepository.writeSourceConnection(SOURCE); + configRepository.writeSourceConnection(SOURCE, emptyConnectorSpec); configRepository.writeStandardDestinationDefinition(DEST_DEF); - configRepository.writeDestinationConnection(DEST); + configRepository.writeDestinationConnection(DEST, emptyConnectorSpec); configRepository.writeStandardSync(CONNECTION); // test jobs diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java index e6da55223c9e1..3039b399d55e8 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java @@ -52,7 +52,10 @@ import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.server.errors.IdNotFoundKnownException; +import io.airbyte.server.handlers.DestinationHandler; +import io.airbyte.server.handlers.SourceHandler; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import java.io.File; @@ -90,23 +93,29 @@ public class ConfigDumpImporter { private final ConfigRepository configRepository; private final WorkspaceHelper workspaceHelper; + private final SpecFetcher specFetcher; private final JsonSchemaValidator jsonSchemaValidator; private final JobPersistence jobPersistence; private final Path stagedResourceRoot; - public ConfigDumpImporter(ConfigRepository configRepository, JobPersistence jobPersistence, WorkspaceHelper workspaceHelper) { - this(configRepository, jobPersistence, workspaceHelper, new JsonSchemaValidator()); + public ConfigDumpImporter(ConfigRepository configRepository, + JobPersistence jobPersistence, + WorkspaceHelper workspaceHelper, + SpecFetcher specFetcher) { + this(configRepository, jobPersistence, workspaceHelper, new JsonSchemaValidator(), specFetcher); } @VisibleForTesting public ConfigDumpImporter(ConfigRepository configRepository, JobPersistence jobPersistence, WorkspaceHelper workspaceHelper, - JsonSchemaValidator jsonSchemaValidator) { + JsonSchemaValidator jsonSchemaValidator, + SpecFetcher specFetcher) { this.jsonSchemaValidator = jsonSchemaValidator; this.jobPersistence = jobPersistence; this.configRepository = configRepository; this.workspaceHelper = workspaceHelper; + this.specFetcher = specFetcher; try { this.stagedResourceRoot = Path.of(TMP_AIRBYTE_STAGED_RESOURCES); if (stagedResourceRoot.toFile().exists()) { @@ -407,13 +416,15 @@ private void importConfigsIntoWorkspace(Path sourceRoot, UUID workspaceId, b (sourceConnection) -> { // make sure connector definition exists try { - if (configRepository.getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()) == null) { + final StandardSourceDefinition sourceDefinition = + configRepository.getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); + if (sourceDefinition == null) { return; } + configRepository.writeSourceConnection(sourceConnection, SourceHandler.getSpecFromSourceDefinitionId(specFetcher, sourceDefinition)); } catch (ConfigNotFoundException e) { return; } - configRepository.writeSourceConnection(sourceConnection); })); case STANDARD_DESTINATION_DEFINITION -> importDestinationDefinitionIntoWorkspace(configs); case DESTINATION_CONNECTION -> destinationIdMap.putAll(importIntoWorkspace( @@ -429,13 +440,15 @@ private void importConfigsIntoWorkspace(Path sourceRoot, UUID workspaceId, b (destinationConnection) -> { // make sure connector definition exists try { - if (configRepository.getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()) == null) { + StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition( + destinationConnection.getDestinationDefinitionId()); + if (destinationDefinition == null) { return; } + configRepository.writeDestinationConnection(destinationConnection, DestinationHandler.getSpec(specFetcher, destinationDefinition)); } catch (ConfigNotFoundException e) { return; } - configRepository.writeDestinationConnection(destinationConnection); })); case STANDARD_SYNC -> standardSyncs = configs; case STANDARD_SYNC_OPERATION -> operationIdMap.putAll(importIntoWorkspace( diff --git a/airbyte-server/src/main/java/io/airbyte/server/RunMigration.java b/airbyte-server/src/main/java/io/airbyte/server/RunMigration.java index 983e57deb1931..a1c7d79ed77db 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/RunMigration.java +++ b/airbyte-server/src/main/java/io/airbyte/server/RunMigration.java @@ -29,6 +29,7 @@ import io.airbyte.migrate.MigrateConfig; import io.airbyte.migrate.MigrationRunner; import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.validation.json.JsonValidationException; import java.io.File; import java.io.IOException; @@ -52,11 +53,12 @@ public class RunMigration implements Runnable, AutoCloseable { public RunMigration(JobPersistence jobPersistence, ConfigRepository configRepository, String targetVersion, - ConfigPersistence seedPersistence) { + ConfigPersistence seedPersistence, + SpecFetcher specFetcher) { this.targetVersion = targetVersion; this.seedPersistence = seedPersistence; this.configDumpExporter = new ConfigDumpExporter(configRepository, jobPersistence, null); - this.configDumpImporter = new ConfigDumpImporter(configRepository, jobPersistence, null); + this.configDumpImporter = new ConfigDumpImporter(configRepository, jobPersistence, null, specFetcher); } @Override diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java index ea91deae20745..997e872103653 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java @@ -54,6 +54,7 @@ import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.job_factory.OAuthConfigSupplier; import io.airbyte.scheduler.persistence.job_tracker.JobTracker; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.server.errors.InvalidInputExceptionMapper; import io.airbyte.server.errors.InvalidJsonExceptionMapper; import io.airbyte.server.errors.InvalidJsonInputExceptionMapper; @@ -212,13 +213,25 @@ public static ServerRunnable getServer(final ServerFactory apiFactory) throws Ex jobPersistence.setVersion(airbyteVersion); } + final JobTracker jobTracker = new JobTracker(configRepository, jobPersistence); + final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService(configs.getTemporalHost()); + final TemporalClient temporalClient = TemporalClient.production(configs.getTemporalHost(), configs.getWorkspaceRoot()); + final OAuthConfigSupplier oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, false); + final SchedulerJobClient schedulerJobClient = new DefaultSchedulerJobClient(jobPersistence, new DefaultJobCreator(jobPersistence)); + final DefaultSynchronousSchedulerClient syncSchedulerClient = + new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, oAuthConfigSupplier); + final SynchronousSchedulerClient bucketSpecCacheSchedulerClient = + new BucketSpecCacheSchedulerClient(syncSchedulerClient, configs.getSpecCacheBucket()); + final SpecCachingSynchronousSchedulerClient cachingSchedulerClient = new SpecCachingSynchronousSchedulerClient(bucketSpecCacheSchedulerClient); + final SpecFetcher specFetcher = new SpecFetcher(cachingSchedulerClient); + Optional airbyteDatabaseVersion = jobPersistence.getVersion(); if (airbyteDatabaseVersion.isPresent() && isDatabaseVersionBehindAppVersion(airbyteVersion, airbyteDatabaseVersion.get())) { final boolean isKubernetes = configs.getWorkerEnvironment() == WorkerEnvironment.KUBERNETES; final boolean versionSupportsAutoMigrate = new AirbyteVersion(airbyteDatabaseVersion.get()).patchVersionCompareTo(KUBE_SUPPORT_FOR_AUTOMATIC_MIGRATION) >= 0; if (!isKubernetes || versionSupportsAutoMigrate) { - runAutomaticMigration(configRepository, jobPersistence, airbyteVersion, airbyteDatabaseVersion.get()); + runAutomaticMigration(configRepository, jobPersistence, specFetcher, airbyteVersion, airbyteDatabaseVersion.get()); // After migration, upgrade the DB version airbyteDatabaseVersion = jobPersistence.getVersion(); } else { @@ -231,17 +244,6 @@ public static ServerRunnable getServer(final ServerFactory apiFactory) throws Ex if (airbyteDatabaseVersion.isPresent() && AirbyteVersion.isCompatible(airbyteVersion, airbyteDatabaseVersion.get())) { LOGGER.info("Starting server..."); - final JobTracker jobTracker = new JobTracker(configRepository, jobPersistence); - final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService(configs.getTemporalHost()); - final TemporalClient temporalClient = TemporalClient.production(configs.getTemporalHost(), configs.getWorkspaceRoot()); - final OAuthConfigSupplier oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, false); - final SchedulerJobClient schedulerJobClient = new DefaultSchedulerJobClient(jobPersistence, new DefaultJobCreator(jobPersistence)); - final DefaultSynchronousSchedulerClient syncSchedulerClient = - new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, oAuthConfigSupplier); - final SynchronousSchedulerClient bucketSpecCacheSchedulerClient = - new BucketSpecCacheSchedulerClient(syncSchedulerClient, configs.getSpecCacheBucket()); - final SpecCachingSynchronousSchedulerClient cachingSchedulerClient = new SpecCachingSynchronousSchedulerClient(bucketSpecCacheSchedulerClient); - return apiFactory.create( schedulerJobClient, cachingSchedulerClient, @@ -267,6 +269,7 @@ public static void main(final String[] args) throws Exception { */ private static void runAutomaticMigration(final ConfigRepository configRepository, final JobPersistence jobPersistence, + final SpecFetcher specFetcher, final String airbyteVersion, final String airbyteDatabaseVersion) { LOGGER.info("Running Automatic Migration from version : " + airbyteDatabaseVersion + " to version : " + airbyteVersion); @@ -274,7 +277,8 @@ private static void runAutomaticMigration(final ConfigRepository configRepositor jobPersistence, configRepository, airbyteVersion, - YamlSeedConfigPersistence.get())) { + YamlSeedConfigPersistence.get(), + specFetcher)) { runMigration.run(); } catch (final Exception e) { LOGGER.error("Automatic Migration failed ", e); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java index dfcc65ad10307..295b7a59e5206 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java @@ -197,7 +197,8 @@ public ConfigurationApi(final ConfigRepository configRepository, webBackendSourcesHandler = new WebBackendSourcesHandler(sourceHandler, configRepository); webBackendDestinationsHandler = new WebBackendDestinationsHandler(destinationHandler, configRepository); healthCheckHandler = new HealthCheckHandler(configRepository); - archiveHandler = new ArchiveHandler(configs.getAirbyteVersion(), configRepository, jobPersistence, workspaceHelper, archiveTtlManager); + archiveHandler = + new ArchiveHandler(configs.getAirbyteVersion(), configRepository, jobPersistence, workspaceHelper, archiveTtlManager, specFetcher); logsHandler = new LogsHandler(); openApiConfigHandler = new OpenApiConfigHandler(); dbMigrationHandler = new DbMigrationHandler(configsDatabase, jobsDatabase); diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/OauthModelConverter.java b/airbyte-server/src/main/java/io/airbyte/server/converters/OauthModelConverter.java new file mode 100644 index 0000000000000..b86ee4e1f45f6 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/OauthModelConverter.java @@ -0,0 +1,50 @@ +/* + * MIT License + * + * Copyright (c) 2020 Airbyte + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.airbyte.server.converters; + +import io.airbyte.api.model.AuthSpecification; +import io.airbyte.api.model.OAuth2Specification; +import io.airbyte.protocol.models.ConnectorSpecification; +import java.util.Optional; + +public class OauthModelConverter { + + public static Optional getAuthSpec(ConnectorSpecification spec) { + if (spec.getAuthSpecification() == null) { + return Optional.empty(); + } + io.airbyte.protocol.models.AuthSpecification incomingAuthSpec = spec.getAuthSpecification(); + + AuthSpecification authSpecification = new AuthSpecification(); + if (incomingAuthSpec.getAuthType() == io.airbyte.protocol.models.AuthSpecification.AuthType.OAUTH_2_0) { + authSpecification.authType(AuthSpecification.AuthTypeEnum.OAUTH2_0) + .oauth2Specification(new OAuth2Specification() + .oauthFlowInitParameters(incomingAuthSpec.getOauth2Specification().getOauthFlowInitParameters())); + } + + return Optional.ofNullable(authSpecification); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java index cda53b09eb84c..0030496ffc935 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java @@ -37,6 +37,7 @@ import io.airbyte.scheduler.persistence.WorkspaceHelper; import io.airbyte.server.ConfigDumpExporter; import io.airbyte.server.ConfigDumpImporter; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.server.errors.InternalServerKnownException; import io.airbyte.validation.json.JsonValidationException; import java.io.File; @@ -58,12 +59,13 @@ public ArchiveHandler(final String version, final ConfigRepository configRepository, final JobPersistence jobPersistence, final WorkspaceHelper workspaceHelper, - final FileTtlManager fileTtlManager) { + final FileTtlManager fileTtlManager, + final SpecFetcher specFetcher) { this( version, fileTtlManager, new ConfigDumpExporter(configRepository, jobPersistence, workspaceHelper), - new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper)); + new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper, specFetcher)); } public ArchiveHandler(final String version, diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java index 8b1dad257be2d..e2a1cf73261b3 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java @@ -205,11 +205,14 @@ private void validateDestination(final ConnectorSpecification spec, final JsonNo validator.ensure(spec.getConnectionSpecification(), configuration); } - private ConnectorSpecification getSpec(UUID destinationDefinitionId) + public ConnectorSpecification getSpec(UUID destinationDefinitionId) throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardDestinationDefinition destinationDef = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - final String imageName = DockerUtils.getTaggedImageName(destinationDef.getDockerRepository(), destinationDef.getDockerImageTag()); - return specFetcher.execute(imageName); + return getSpec(specFetcher, configRepository.getStandardDestinationDefinition(destinationDefinitionId)); + } + + public static ConnectorSpecification getSpec(SpecFetcher specFetcher, StandardDestinationDefinition destinationDef) + throws JsonValidationException, IOException, ConfigNotFoundException { + return specFetcher.execute(DockerUtils.getTaggedImageName(destinationDef.getDockerRepository(), destinationDef.getDockerImageTag())); } private void persistDestinationConnection(final String name, @@ -218,7 +221,7 @@ private void persistDestinationConnection(final String name, final UUID destinationId, final JsonNode configurationJson, final boolean tombstone) - throws JsonValidationException, IOException { + throws JsonValidationException, IOException, ConfigNotFoundException { final DestinationConnection destinationConnection = new DestinationConnection() .withName(name) .withDestinationDefinitionId(destinationDefinitionId) @@ -226,8 +229,7 @@ private void persistDestinationConnection(final String name, .withDestinationId(destinationId) .withConfiguration(configurationJson) .withTombstone(tombstone); - - configRepository.writeDestinationConnection(destinationConnection); + configRepository.writeDestinationConnection(destinationConnection, getSpec(destinationDefinitionId)); } private DestinationRead buildDestinationRead(final UUID destinationId) throws JsonValidationException, IOException, ConfigNotFoundException { diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java index 8299d8c59dde8..d6a46858b754b 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java @@ -26,6 +26,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; +import io.airbyte.api.model.AuthSpecification; import io.airbyte.api.model.CheckConnectionRead; import io.airbyte.api.model.CheckConnectionRead.StatusEnum; import io.airbyte.api.model.ConnectionIdRequestBody; @@ -67,6 +68,7 @@ import io.airbyte.server.converters.CatalogConverter; import io.airbyte.server.converters.ConfigurationUpdate; import io.airbyte.server.converters.JobConverter; +import io.airbyte.server.converters.OauthModelConverter; import io.airbyte.server.converters.SpecFetcher; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; @@ -246,11 +248,18 @@ public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(Source final String imageName = DockerUtils.getTaggedImageName(source.getDockerRepository(), source.getDockerImageTag()); final SynchronousResponse response = getConnectorSpecification(imageName); final ConnectorSpecification spec = response.getOutput(); - return new SourceDefinitionSpecificationRead() + SourceDefinitionSpecificationRead specRead = new SourceDefinitionSpecificationRead() .jobInfo(JobConverter.getSynchronousJobRead(response)) .connectionSpecification(spec.getConnectionSpecification()) .documentationUrl(spec.getDocumentationUrl().toString()) .sourceDefinitionId(sourceDefinitionId); + + Optional authSpec = OauthModelConverter.getAuthSpec(spec); + if (authSpec.isPresent()) { + specRead.setAuthSpecification(authSpec.get()); + } + + return specRead; } public DestinationDefinitionSpecificationRead getDestinationSpecification(DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) @@ -260,7 +269,8 @@ public DestinationDefinitionSpecificationRead getDestinationSpecification(Destin final String imageName = DockerUtils.getTaggedImageName(destination.getDockerRepository(), destination.getDockerImageTag()); final SynchronousResponse response = getConnectorSpecification(imageName); final ConnectorSpecification spec = response.getOutput(); - return new DestinationDefinitionSpecificationRead() + + DestinationDefinitionSpecificationRead specRead = new DestinationDefinitionSpecificationRead() .jobInfo(JobConverter.getSynchronousJobRead(response)) .supportedDestinationSyncModes(Enums.convertListTo(spec.getSupportedDestinationSyncModes(), DestinationSyncMode.class)) .connectionSpecification(spec.getConnectionSpecification()) @@ -268,6 +278,13 @@ public DestinationDefinitionSpecificationRead getDestinationSpecification(Destin .supportsNormalization(spec.getSupportsNormalization()) .supportsDbt(spec.getSupportsDBT()) .destinationDefinitionId(destinationDefinitionId); + + Optional authSpec = OauthModelConverter.getAuthSpec(spec); + if (authSpec.isPresent()) { + specRead.setAuthSpecification(authSpec.get()); + } + + return specRead; } public SynchronousResponse getConnectorSpecification(String dockerImage) throws IOException { @@ -352,7 +369,7 @@ public JobInfoRead cancelJob(JobIdRequestBody jobIdRequestBody) throws IOExcepti private void cancelTemporalWorkflowIfPresent(long jobId) throws IOException { var latestAttemptId = jobPersistence.getJob(jobId).getAttempts().size() - 1; // attempts ids are monotonically increasing starting from 0 and - // specific to a job id, allowing us to do this. + // specific to a job id, allowing us to do this. var workflowId = jobPersistence.getAttemptTemporalWorkflowId(jobId, latestAttemptId); if (workflowId.isPresent()) { diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java index 73afb68e86583..8ec2e15cae14b 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java @@ -93,7 +93,7 @@ public SourceHandler(final ConfigRepository configRepository, public SourceRead createSource(SourceCreate sourceCreate) throws ConfigNotFoundException, IOException, JsonValidationException { // validate configuration - ConnectorSpecification spec = getSpecFromSourceDefinitionId( + final ConnectorSpecification spec = getSpecFromSourceDefinitionId( sourceCreate.getSourceDefinitionId()); validateSource(spec, sourceCreate.getConnectionConfiguration()); @@ -105,7 +105,8 @@ public SourceRead createSource(SourceCreate sourceCreate) sourceCreate.getWorkspaceId(), sourceId, false, - sourceCreate.getConnectionConfiguration()); + sourceCreate.getConnectionConfiguration(), + spec); // read configuration from db return buildSourceRead(sourceId, spec); @@ -117,7 +118,7 @@ public SourceRead updateSource(SourceUpdate sourceUpdate) final SourceConnection updatedSource = configurationUpdate .source(sourceUpdate.getSourceId(), sourceUpdate.getName(), sourceUpdate.getConnectionConfiguration()); - ConnectorSpecification spec = getSpecFromSourceId(updatedSource.getSourceId()); + final ConnectorSpecification spec = getSpecFromSourceId(updatedSource.getSourceId()); validateSource(spec, sourceUpdate.getConnectionConfiguration()); // persist @@ -127,7 +128,8 @@ public SourceRead updateSource(SourceUpdate sourceUpdate) updatedSource.getWorkspaceId(), updatedSource.getSourceId(), updatedSource.getTombstone(), - updatedSource.getConfiguration()); + updatedSource.getConfiguration(), + spec); // read configuration from db return buildSourceRead(sourceUpdate.getSourceId(), spec); @@ -185,6 +187,9 @@ public void deleteSource(SourceRead source) connectionsHandler.deleteConnection(connectionRead); } + final ConnectorSpecification spec = getSpecFromSourceId(source.getSourceId()); + validateSource(spec, source.getConnectionConfiguration()); + // persist persistSourceConnection( source.getName(), @@ -192,7 +197,8 @@ public void deleteSource(SourceRead source) source.getWorkspaceId(), source.getSourceId(), true, - source.getConnectionConfiguration()); + source.getConnectionConfiguration(), + spec); } private SourceRead buildSourceRead(UUID sourceId) @@ -231,10 +237,14 @@ private ConnectorSpecification getSpecFromSourceId(UUID sourceId) private ConnectorSpecification getSpecFromSourceDefinitionId(UUID sourceDefId) throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardSourceDefinition sourceDef = configRepository - .getStandardSourceDefinition(sourceDefId); + final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceDefId); + return getSpecFromSourceDefinitionId(specFetcher, sourceDef); + } + + public static ConnectorSpecification getSpecFromSourceDefinitionId(SpecFetcher specFetcher, StandardSourceDefinition sourceDefinition) + throws IOException, ConfigNotFoundException { final String imageName = DockerUtils - .getTaggedImageName(sourceDef.getDockerRepository(), sourceDef.getDockerImageTag()); + .getTaggedImageName(sourceDefinition.getDockerRepository(), sourceDefinition.getDockerImageTag()); return specFetcher.execute(imageName); } @@ -243,7 +253,8 @@ private void persistSourceConnection(final String name, final UUID workspaceId, final UUID sourceId, final boolean tombstone, - final JsonNode configurationJson) + final JsonNode configurationJson, + final ConnectorSpecification spec) throws JsonValidationException, IOException { final SourceConnection sourceConnection = new SourceConnection() .withName(name) @@ -253,7 +264,7 @@ private void persistSourceConnection(final String name, .withTombstone(tombstone) .withConfiguration(configurationJson); - configRepository.writeSourceConnection(sourceConnection); + configRepository.writeSourceConnection(sourceConnection, spec); } private SourceRead toSourceRead(final SourceConnection sourceConnection, diff --git a/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java b/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java index d112e22d7c2dc..73c0bfae022a9 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java @@ -44,9 +44,11 @@ import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import java.io.File; @@ -76,13 +78,21 @@ class ConfigDumpImporterTest { private DestinationConnection destinationConnection; private StandardSyncOperation operation; private StandardSync connection; + private ConnectorSpecification emptyConnectorSpec; + private SpecFetcher specFetcher; @BeforeEach public void setup() throws IOException, JsonValidationException, ConfigNotFoundException { configRepository = mock(ConfigRepository.class); jobPersistence = mock(JobPersistence.class); workspaceHelper = mock(WorkspaceHelper.class); - configDumpImporter = new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper, mock(JsonSchemaValidator.class)); + + specFetcher = mock(SpecFetcher.class); + emptyConnectorSpec = mock(ConnectorSpecification.class); + when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); + when(specFetcher.execute(any())).thenReturn(emptyConnectorSpec); + + configDumpImporter = new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper, mock(JsonSchemaValidator.class), specFetcher); configDumpExporter = new ConfigDumpExporter(configRepository, jobPersistence, workspaceHelper); workspaceId = UUID.randomUUID(); @@ -176,9 +186,12 @@ public void testImportIntoWorkspaceWithConflicts() throws JsonValidationExceptio configDumpImporter.importIntoWorkspace(TEST_VERSION, newWorkspaceId, archive); verify(configRepository) - .writeSourceConnection(Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId).withSourceId(not(eq(sourceConnection.getSourceId())))); + .writeSourceConnection( + Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId).withSourceId(not(eq(sourceConnection.getSourceId()))), + eq(emptyConnectorSpec)); verify(configRepository).writeDestinationConnection( - Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId).withDestinationId(not(eq(destinationConnection.getDestinationId())))); + Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId).withDestinationId(not(eq(destinationConnection.getDestinationId()))), + eq(emptyConnectorSpec)); verify(configRepository) .writeStandardSyncOperation(Jsons.clone(operation).withWorkspaceId(newWorkspaceId).withOperationId(not(eq(operation.getOperationId())))); verify(configRepository).writeStandardSync(Jsons.clone(connection).withConnectionId(not(eq(connection.getConnectionId())))); @@ -226,8 +239,10 @@ public void testImportIntoWorkspaceWithoutConflicts() throws JsonValidationExcep final UUID newWorkspaceId = UUID.randomUUID(); configDumpImporter.importIntoWorkspace(TEST_VERSION, newWorkspaceId, archive); - verify(configRepository).writeSourceConnection(Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId)); - verify(configRepository).writeDestinationConnection(Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId)); + verify(configRepository).writeSourceConnection( + Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId), + emptyConnectorSpec); + verify(configRepository).writeDestinationConnection(Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId), emptyConnectorSpec); verify(configRepository).writeStandardSyncOperation(Jsons.clone(operation).withWorkspaceId(newWorkspaceId)); verify(configRepository).writeStandardSync(connection); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java index 807047c86f8ed..500c369dbe784 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java @@ -27,6 +27,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.api.model.ImportRead; @@ -49,9 +52,11 @@ import io.airbyte.db.Database; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; +import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.validation.json.JsonValidationException; import java.io.File; import java.io.IOException; @@ -126,12 +131,18 @@ public void setup() throws Exception { jobPersistence.setVersion(VERSION); + final SpecFetcher specFetcher = mock(SpecFetcher.class); + final ConnectorSpecification emptyConnectorSpec = mock(ConnectorSpecification.class); + when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); + when(specFetcher.execute(any())).thenReturn(emptyConnectorSpec); + archiveHandler = new ArchiveHandler( VERSION, configRepository, jobPersistence, new WorkspaceHelper(configRepository, jobPersistence), - new NoOpFileTtlManager()); + new NoOpFileTtlManager(), + specFetcher); } @AfterEach @@ -252,13 +263,19 @@ void testLightWeightExportImportRoundTrip() throws Exception { .filter(sourceConnection -> secondWorkspaceId.equals(sourceConnection.getWorkspaceId())) .map(SourceConnection::getSourceId) .collect(Collectors.toList()).get(0); - configRepository.writeSourceConnection(new SourceConnection() + + final SourceConnection sourceConnection = new SourceConnection() .withWorkspaceId(secondWorkspaceId) .withSourceId(secondSourceId) .withName("Some new names") .withSourceDefinitionId(UUID.randomUUID()) .withTombstone(false) - .withConfiguration(Jsons.emptyObject())); + .withConfiguration(Jsons.emptyObject()); + + ConnectorSpecification emptyConnectorSpec = mock(ConnectorSpecification.class); + when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); + + configRepository.writeSourceConnection(sourceConnection, emptyConnectorSpec); // check that first workspace is unchanged even though modifications were made to second workspace // (that contains similar connections from importing the same archive) diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java index 837b3c49f8cbf..fb1c3673366b4 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java @@ -71,7 +71,6 @@ class DestinationHandlerTest { private ConfigRepository configRepository; private StandardDestinationDefinition standardDestinationDefinition; private DestinationDefinitionSpecificationRead destinationDefinitionSpecificationRead; - private DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody; private DestinationConnection destinationConnection; private DestinationHandler destinationHandler; private ConnectionsHandler connectionsHandler; @@ -104,8 +103,8 @@ void setUp() throws IOException { imageName = DockerUtils.getTaggedImageName(standardDestinationDefinition.getDockerRepository(), standardDestinationDefinition.getDockerImageTag()); - destinationDefinitionIdRequestBody = - new DestinationDefinitionIdRequestBody().destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()); + DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody = new DestinationDefinitionIdRequestBody().destinationDefinitionId( + standardDestinationDefinition.getDestinationDefinitionId()); connectorSpecification = ConnectorSpecificationHelpers.generateConnectorSpecification(); @@ -154,7 +153,7 @@ void testCreateDestination() throws JsonValidationException, ConfigNotFoundExcep assertEquals(expectedDestinationRead, actualDestinationRead); verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), destinationConnection.getConfiguration()); - verify(configRepository).writeDestinationConnection(destinationConnection); + verify(configRepository).writeDestinationConnection(destinationConnection, connectorSpecification); verify(secretsProcessor) .maskSecrets(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); } @@ -181,7 +180,7 @@ void testDeleteDestination() throws JsonValidationException, ConfigNotFoundExcep destinationHandler.deleteDestination(destinationId); - verify(configRepository).writeDestinationConnection(expectedDestinationConnection); + verify(configRepository).writeDestinationConnection(expectedDestinationConnection, connectorSpecification); verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); verify(connectionsHandler).deleteConnection(connectionRead); } @@ -225,7 +224,7 @@ void testUpdateDestination() throws JsonValidationException, ConfigNotFoundExcep assertEquals(expectedDestinationRead, actualDestinationRead); verify(secretsProcessor).maskSecrets(newConfiguration, destinationDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeDestinationConnection(expectedDestinationConnection); + verify(configRepository).writeDestinationConnection(expectedDestinationConnection, connectorSpecification); verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java index 9e777a6345f11..4c4e67f07a83b 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java @@ -138,7 +138,7 @@ void testCreateSource() throws JsonValidationException, ConfigNotFoundException, assertEquals(expectedSourceRead, actualSourceRead); verify(secretsProcessor).maskSecrets(sourceCreate.getConnectionConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeSourceConnection(sourceConnection); + verify(configRepository).writeSourceConnection(sourceConnection, connectorSpecification); verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), sourceConnection.getConfiguration()); } @@ -180,7 +180,7 @@ void testUpdateSource() throws JsonValidationException, ConfigNotFoundException, assertEquals(expectedSourceRead, actualSourceRead); verify(secretsProcessor).maskSecrets(newConfiguration, sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeSourceConnection(expectedSourceConnection); + verify(configRepository).writeSourceConnection(expectedSourceConnection, connectorSpecification); verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); } @@ -261,7 +261,7 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, sourceHandler.deleteSource(sourceIdRequestBody); - verify(configRepository).writeSourceConnection(expectedSourceConnection); + verify(configRepository).writeSourceConnection(expectedSourceConnection, connectorSpecification); verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); verify(connectionsHandler).deleteConnection(connectionRead); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/migration/RunMigrationTest.java b/airbyte-server/src/test/java/io/airbyte/server/migration/RunMigrationTest.java index 2b082a96e0c06..be961d3c98a59 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/migration/RunMigrationTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/migration/RunMigrationTest.java @@ -30,6 +30,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; import com.google.common.io.Resources; import io.airbyte.commons.io.Archives; @@ -51,6 +52,7 @@ import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.server.RunMigration; +import io.airbyte.server.converters.SpecFetcher; import io.airbyte.validation.json.JsonValidationException; import java.io.File; import java.io.IOException; @@ -308,7 +310,10 @@ private void runMigration(JobPersistence jobPersistence, Path configRoot) throws jobPersistence, new ConfigRepository(FileSystemConfigPersistence.createWithValidation(configRoot)), TARGET_VERSION, - YamlSeedConfigPersistence.get())) { + YamlSeedConfigPersistence.get(), + mock(SpecFetcher.class) // this test was disabled/broken when this fetcher mock was added. apologies if you have to fix this + // in the future. + )) { runMigration.run(); } } diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index afbb815221bcc..8393a4bba3a6c 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.29.17-alpha", + "version": "0.29.19-alpha", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 42eda350ee3e4..f175e0e9d0e01 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.29.17-alpha", + "version": "0.29.19-alpha", "private": true, "scripts": { "start": "react-scripts start", diff --git a/airbyte-webapp/public/newsletter.png b/airbyte-webapp/public/newsletter.png index f265def92e113..f49f174b00f45 100644 Binary files a/airbyte-webapp/public/newsletter.png and b/airbyte-webapp/public/newsletter.png differ diff --git a/airbyte-webapp/public/play.svg b/airbyte-webapp/public/play.svg new file mode 100644 index 0000000000000..a6976352ad286 --- /dev/null +++ b/airbyte-webapp/public/play.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/airbyte-webapp/public/process-arrow.svg b/airbyte-webapp/public/process-arrow.svg new file mode 100644 index 0000000000000..1258bc739c8a4 --- /dev/null +++ b/airbyte-webapp/public/process-arrow.svg @@ -0,0 +1,3 @@ + + + diff --git a/airbyte-webapp/public/rectangle.svg b/airbyte-webapp/public/rectangle.svg new file mode 100644 index 0000000000000..66aa72f35d110 --- /dev/null +++ b/airbyte-webapp/public/rectangle.svg @@ -0,0 +1,3 @@ + + + diff --git a/airbyte-webapp/public/rocket.png b/airbyte-webapp/public/rocket.png new file mode 100644 index 0000000000000..c5cf200f9d460 Binary files /dev/null and b/airbyte-webapp/public/rocket.png differ diff --git a/airbyte-webapp/public/stars-background.svg b/airbyte-webapp/public/stars-background.svg new file mode 100644 index 0000000000000..e90bb65949d09 --- /dev/null +++ b/airbyte-webapp/public/stars-background.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/airbyte-webapp/public/video-background.svg b/airbyte-webapp/public/video-background.svg new file mode 100644 index 0000000000000..be3171af5585b --- /dev/null +++ b/airbyte-webapp/public/video-background.svg @@ -0,0 +1,3 @@ + + + diff --git a/airbyte-webapp/public/videoCover.png b/airbyte-webapp/public/videoCover.png new file mode 100644 index 0000000000000..402cdfb7e16da Binary files /dev/null and b/airbyte-webapp/public/videoCover.png differ diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx index 46503702534a4..74f1740edd081 100644 --- a/airbyte-webapp/src/App.tsx +++ b/airbyte-webapp/src/App.tsx @@ -17,6 +17,7 @@ import { usePickFirstWorkspace, } from "hooks/services/useWorkspace"; import { Feature, FeatureService } from "hooks/services/Feature"; +import { OnboardingServiceProvider } from "hooks/services/Onboarding"; import { ServicesProvider } from "core/servicesProvider"; import { useApiServices } from "core/defaultServices"; import { envConfigProvider, windowConfigProvider } from "./config"; @@ -97,7 +98,9 @@ const App: React.FC = () => { - + + + diff --git a/airbyte-webapp/src/components/CenteredPageComponents/BigButton.tsx b/airbyte-webapp/src/components/CenteredPageComponents/BigButton.tsx index 570c953f17fb3..1087e426b119c 100644 --- a/airbyte-webapp/src/components/CenteredPageComponents/BigButton.tsx +++ b/airbyte-webapp/src/components/CenteredPageComponents/BigButton.tsx @@ -1,11 +1,13 @@ import styled from "styled-components"; import { Button } from "components"; -const BigButton = styled(Button)` +const BigButton = styled(Button)<{ shadow?: boolean }>` font-size: 16px; line-height: 19px; padding: 10px 27px; font-weight: 500; + box-shadow: ${({ shadow }) => + shadow ? "0 8px 5px -5px rgba(0, 0, 0, 0.2)" : "none"}; `; export default BigButton; diff --git a/airbyte-webapp/src/components/ContentCard/ContentCard.tsx b/airbyte-webapp/src/components/ContentCard/ContentCard.tsx index 873fe5a7efef1..e731d0a54474f 100644 --- a/airbyte-webapp/src/components/ContentCard/ContentCard.tsx +++ b/airbyte-webapp/src/components/ContentCard/ContentCard.tsx @@ -7,6 +7,7 @@ type IProps = { title?: string | React.ReactNode; className?: string; onClick?: () => void; + full?: boolean; }; const Title = styled(H5)` @@ -19,7 +20,7 @@ const Title = styled(H5)` `; const ContentCard: React.FC = (props) => ( - + {props.title ? {props.title} : null} {props.children} diff --git a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx index ae181e28dbe72..531f96cd230dc 100644 --- a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx +++ b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx @@ -40,6 +40,7 @@ type IProps = { source: Source; destination: Destination; afterSubmitConnection?: () => void; + noTitles?: boolean; }; const CreateConnectionContent: React.FC = ({ @@ -47,6 +48,7 @@ const CreateConnectionContent: React.FC = ({ destination, afterSubmitConnection, additionBottomControls, + noTitles, }) => { const { createConnection } = useConnection(); const analyticsService = useAnalytics(); @@ -80,7 +82,11 @@ const CreateConnectionContent: React.FC = ({ if (isLoading) { return ( - }> + + } + > ); @@ -88,7 +94,11 @@ const CreateConnectionContent: React.FC = ({ if (schemaErrorStatus) { return ( - }> + + } + > {additionBottomControls}} @@ -130,7 +140,11 @@ const CreateConnectionContent: React.FC = ({ }; return ( - }> + + } + > }> void; + clear?: boolean; + closeOnBackground?: boolean; }; const fadeIn = keyframes` @@ -27,7 +29,13 @@ const Overlay = styled.div` z-index: 10; `; -const Modal: React.FC = ({ children, title, onClose }) => { +const Modal: React.FC = ({ + children, + title, + onClose, + clear, + closeOnBackground, +}) => { const handleUserKeyPress = useCallback((event, closeModal) => { const { keyCode } = event; if (keyCode === 27) { @@ -50,8 +58,8 @@ const Modal: React.FC = ({ children, title, onClose }) => { }, [handleUserKeyPress, onClose]); return createPortal( - - {children} + (closeOnBackground && onClose ? onClose() : null)}> + {clear ? children : {children}} , document.body ); diff --git a/airbyte-webapp/src/components/base/Card/Card.tsx b/airbyte-webapp/src/components/base/Card/Card.tsx index 22aae4ba08266..783563ad89bb8 100644 --- a/airbyte-webapp/src/components/base/Card/Card.tsx +++ b/airbyte-webapp/src/components/base/Card/Card.tsx @@ -1,6 +1,7 @@ import styled from "styled-components"; -export const Card = styled.div` +export const Card = styled.div<{ full?: boolean }>` + width: ${({ full }) => (full ? "100%" : "auto")}; background: ${({ theme }) => theme.whiteColor}; border-radius: 10px; box-shadow: 0 2px 4px ${({ theme }) => theme.cardShadowColor}; diff --git a/airbyte-webapp/src/components/base/Titles/Titles.tsx b/airbyte-webapp/src/components/base/Titles/Titles.tsx index c3781261e6fc9..4ba492bf272b0 100644 --- a/airbyte-webapp/src/components/base/Titles/Titles.tsx +++ b/airbyte-webapp/src/components/base/Titles/Titles.tsx @@ -3,6 +3,7 @@ import styled from "styled-components"; type IProps = { center?: boolean; bold?: boolean; + parentColor?: boolean; }; export const H1 = styled.h1` @@ -12,7 +13,8 @@ export const H1 = styled.h1` font-weight: ${(props) => (props.bold ? 600 : 500)}; display: block; text-align: ${(props) => (props.center ? "center" : "left")}; - color: ${({ theme }) => theme.textColor}; + color: ${({ theme, parentColor }) => + parentColor ? "inherid" : theme.textColor}; margin: 0; `; diff --git a/airbyte-webapp/src/config/casesConfig.json b/airbyte-webapp/src/config/casesConfig.json new file mode 100644 index 0000000000000..66c30786c5c89 --- /dev/null +++ b/airbyte-webapp/src/config/casesConfig.json @@ -0,0 +1,7 @@ +[ + "replicateMySQL", + "consolidateMarketing", + "consolidatePayment", + "buildDashboard", + "zoomCalls" +] diff --git a/airbyte-webapp/src/core/domain/catalog/fieldUtil.ts b/airbyte-webapp/src/core/domain/catalog/fieldUtil.ts index f63cb20648a90..cb352d8b266aa 100644 --- a/airbyte-webapp/src/core/domain/catalog/fieldUtil.ts +++ b/airbyte-webapp/src/core/domain/catalog/fieldUtil.ts @@ -34,8 +34,7 @@ const traverseSchemaToField = ( const traverseJsonSchemaProperties = ( jsonSchema: JSONSchema7Definition, key: string, - path: string = key, - depth = 0 + path: string[] = [] ): SyncSchemaField[] => { if (typeof jsonSchema === "boolean") { return []; @@ -45,12 +44,7 @@ const traverseJsonSchemaProperties = ( if (jsonSchema.properties) { fields = Object.entries(jsonSchema.properties) .flatMap(([k, schema]) => - traverseJsonSchemaProperties( - schema, - k, - depth === 0 ? k : `${path}.${k}`, - depth + 1 - ) + traverseJsonSchemaProperties(schema, k, [...path, k]) ) .flat(2); } @@ -58,7 +52,7 @@ const traverseJsonSchemaProperties = ( return [ { cleanedName: key, - name: path, + path, key, fields, type: diff --git a/airbyte-webapp/src/core/domain/catalog/models.ts b/airbyte-webapp/src/core/domain/catalog/models.ts index cb99d68797dda..965b4f3bccc23 100644 --- a/airbyte-webapp/src/core/domain/catalog/models.ts +++ b/airbyte-webapp/src/core/domain/catalog/models.ts @@ -1,8 +1,8 @@ export type SyncSchemaField = { - name: string; cleanedName: string; type: string; key: string; + path: string[]; fields?: SyncSchemaField[]; }; diff --git a/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx b/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx new file mode 100644 index 0000000000000..951eef8fcf02d --- /dev/null +++ b/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx @@ -0,0 +1,55 @@ +import React, { useContext, useMemo } from "react"; +import { useLocalStorage } from "react-use"; +import useWorkspace from "hooks/services/useWorkspace"; +import casesConfig from "config/casesConfig.json"; + +type Context = { + feedbackPassed?: boolean; + passFeedback: () => void; + useCases?: string[]; + skipCase: (skipId: string) => void; +}; + +export const OnboardingServiceContext = React.createContext( + null +); + +export const OnboardingServiceProvider: React.FC = ({ children }) => { + const { workspace } = useWorkspace(); + const [feedbackPassed, setFeedbackPassed] = useLocalStorage( + `${workspace.workspaceId}/passFeedback`, + false + ); + const [useCases, setUseCases] = useLocalStorage( + `${workspace.workspaceId}/useCases`, + casesConfig + ); + + const ctx = useMemo( + () => ({ + feedbackPassed, + passFeedback: () => setFeedbackPassed(true), + useCases, + skipCase: (skipId: string) => + setUseCases(useCases?.filter((item) => item !== skipId)), + }), + [feedbackPassed, useCases] + ); + + return ( + + {children} + + ); +}; + +export const useOnboardingService = (): Context => { + const onboardingService = useContext(OnboardingServiceContext); + if (!onboardingService) { + throw new Error( + "useOnboardingService must be used within a OnboardingServiceProvider." + ); + } + + return onboardingService; +}; diff --git a/airbyte-webapp/src/hooks/services/Onboarding/index.tsx b/airbyte-webapp/src/hooks/services/Onboarding/index.tsx new file mode 100644 index 0000000000000..305b4ce97d088 --- /dev/null +++ b/airbyte-webapp/src/hooks/services/Onboarding/index.tsx @@ -0,0 +1 @@ +export * from "./OnboardingService"; diff --git a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx index d3c929a69448d..78fd1ea8a3962 100644 --- a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx @@ -17,12 +17,15 @@ import { SyncSchema } from "core/domain/catalog"; import { SourceDefinition } from "core/resources/SourceDefinition"; import { Source } from "core/resources/Source"; import { Routes } from "pages/routes"; -import useRouter from "../useRouter"; import { Destination } from "core/resources/Destination"; import useWorkspace from "./useWorkspace"; import { Operation } from "core/domain/connection/operation"; -import { equal } from "utils/objects"; import { useAnalytics } from "hooks/useAnalytics"; +import useRouter from "hooks/useRouter"; +import { useGetService } from "core/servicesProvider"; +import { RequestMiddleware } from "core/request/RequestMiddleware"; + +import { equal } from "utils/objects"; export type ValuesProps = { schedule: ScheduleProperties | null; @@ -65,8 +68,13 @@ type UpdateStateConnection = { function useConnectionService(): ConnectionService { const config = useConfig(); + const middlewares = useGetService( + "DefaultRequestMiddlewares" + ); - return useMemo(() => new ConnectionService(config.apiUrl), [config]); + return useMemo(() => new ConnectionService(config.apiUrl, middlewares), [ + config, + ]); } export const useConnectionLoad = ( @@ -95,10 +103,11 @@ const useConnection = (): { updateConnection: (conn: UpdateConnection) => Promise; updateStateConnection: (conn: UpdateStateConnection) => Promise; resetConnection: (connId: string) => Promise; + syncConnection: (conn: Connection) => Promise; deleteConnection: (payload: { connectionId: string }) => Promise; } => { const { push } = useRouter(); - const { finishOnboarding, workspace } = useWorkspace(); + const { workspace } = useWorkspace(); const analyticsService = useAnalytics(); const createConnectionResource = useFetcher(ConnectionResource.createShape()); @@ -108,6 +117,7 @@ const useConnection = (): { ); const deleteConnectionResource = useFetcher(ConnectionResource.deleteShape()); const resetConnectionResource = useFetcher(ConnectionResource.reset()); + const syncConnectionResource = useFetcher(ConnectionResource.syncShape()); const createConnection = async ({ values, @@ -155,9 +165,6 @@ const useConnection = (): { connector_destination_definition_id: destinationDefinition?.destinationDefinitionId, }); - if (workspace.displaySetupWizard) { - await finishOnboarding(); - } return result; } catch (e) { @@ -221,12 +228,37 @@ const useConnection = (): { [resetConnectionResource] ); + const syncConnection = async (connection: Connection) => { + analyticsService.track("Source - Action", { + action: "Full refresh sync", + connector_source: connection.source?.sourceName, + connector_source_id: connection.source?.sourceDefinitionId, + connector_destination: connection.destination?.name, + connector_destination_definition_id: + connection.destination?.destinationDefinitionId, + frequency: connection.schedule, + }); + await syncConnectionResource({ + connectionId: connection.connectionId, + }); + }; + return { createConnection, updateConnection, updateStateConnection, resetConnection, deleteConnection, + syncConnection, }; }; + +const useConnectionList = (): { connections: Connection[] } => { + const { workspace } = useWorkspace(); + return useResource(ConnectionResource.listShape(), { + workspaceId: workspace.workspaceId, + }); +}; + +export { useConnectionList }; export default useConnection; diff --git a/airbyte-webapp/src/hooks/services/useWorkspace.tsx b/airbyte-webapp/src/hooks/services/useWorkspace.tsx index f7700c123e4a6..1f1ca3ea33e98 100644 --- a/airbyte-webapp/src/hooks/services/useWorkspace.tsx +++ b/airbyte-webapp/src/hooks/services/useWorkspace.tsx @@ -6,6 +6,8 @@ import NotificationsResource, { } from "core/resources/Notifications"; import { useGetService } from "core/servicesProvider"; import { useAnalytics } from "../useAnalytics"; +import { Source } from "core/resources/Source"; +import { Destination } from "core/resources/Destination"; export const usePickFirstWorkspace = (): Workspace => { const { workspaces } = useResource(WorkspaceResource.listShape(), {}); @@ -44,6 +46,15 @@ const useWorkspace = (): { securityUpdates: boolean; }) => Promise; finishOnboarding: (skipStep?: string) => Promise; + sendFeedback: ({ + feedback, + source, + destination, + }: { + feedback: string; + source: Source; + destination: Destination; + }) => Promise; } => { const updateWorkspace = useFetcher(WorkspaceResource.updateShape()); const tryWebhookUrl = useFetcher(NotificationsResource.tryShape()); @@ -71,6 +82,24 @@ const useWorkspace = (): { ); }; + const sendFeedback = async ({ + feedback, + source, + destination, + }: { + feedback: string; + source: Source; + destination: Destination; + }) => { + analyticsService.track("Onboarding Feedback", { + feedback, + connector_source_definition: source?.sourceName, + connector_source_definition_id: source?.sourceDefinitionId, + connector_destination_definition: destination?.destinationName, + connector_destination_definition_id: destination?.destinationDefinitionId, + }); + }; + const setInitialSetupConfig = async (data: { email: string; anonymousDataCollection: boolean; @@ -147,6 +176,7 @@ const useWorkspace = (): { updatePreferences, updateWebhook, testWebhook, + sendFeedback, }; }; diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 69be4a569625a..89aef6078d6f5 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -11,6 +11,7 @@ "sidebar.connections": "Connections", "sidebar.settings": "Settings", "sidebar.update": "Update", + "sidebar.onboarding": "Onboarding", "form.continue": "Continue", "form.yourEmail": "Your email", @@ -167,6 +168,34 @@ "onboarding.fetchingSchema": "We are fetching the schema of your data source. \nThis should take less than a minute, but may take a few minutes on slow internet connections or data sources with a large amount of tables.", "onboarding.tutorial": "Check how you can sync PostgreSQL databases in minutes", "onboarding.skipOnboarding": "Skip Onboarding", + "onboarding.welcome": "Welcome to Airbyte!", + "onboarding.welcomeUser": "Welcome to Airbyte, {name}!", + "onboarding.welcomeUser.text": "Your path to syncing your data starts here.Connections are automated data pipelines that replicate data from a source to a destination. ", + "onboarding.or": "or", + "onboarding.watchVideo": "Watch the 2-min demo video", + "onboarding.exploreDemo": "Explore our demo app with test data", + "onboarding.firstConnection": "Set up your first connection", + "onboarding.createFirstSource": "Create your first source", + "onboarding.createFirstSource.text": "Sources are tools where the data will be replicated from. ", + "onboarding.createFirstDestination": "Create your first destination", + "onboarding.createFirstDestination.text": "Sources are tools where the data will be replicated from. ", + "onboarding.createConnection": "Set up the connection", + "onboarding.createConnection.text": "Sources are tools where the data will be replicated from. ", + "onboarding.synchronisationProgress": "SourceDestination = Synchronisation in progress", + "onboarding.useCases": "Enable popular use cases", + "onboarding.replicateMySQL": "Replicate your MySQL database to Postgres with log-based CDC", + "onboarding.consolidateMarketing": "Consolidate your marketing data to compute the CAC for your paid customers", + "onboarding.consolidatePayment": "Consolidate your payment data to compute your LTV", + "onboarding.buildDashboard": "Build an activity dashboard for your engineering project", + "onboarding.zoomCalls": "Visualize the time spent by your team in Zoom calls ", + "onboarding.skip": "Skip", + "onboarding.closeOnboarding": "Close onboarding", + "onboarding.syncCompleted": "Your first sync has been successfully completed!", + "onboarding.checkData": "Please check the data at the destination.\nDoes it fit with your expectations?", + "onboarding.skipNow": "Skip for now", + "onboarding.firstSync": "Start your first sync", + "onboarding.syncFailed": "Your sync is failed. Please try again", + "onboarding.startAgain": "Your sync was cancelled. You can start it again", "sources.searchIncremental": "Search cursor value for incremental", "sources.incrementalDefault": "{value} (default)", diff --git a/airbyte-webapp/src/packages/cloud/routes.tsx b/airbyte-webapp/src/packages/cloud/routes.tsx index 6d3b08cd0c4f4..ba63df6b10f1e 100644 --- a/airbyte-webapp/src/packages/cloud/routes.tsx +++ b/airbyte-webapp/src/packages/cloud/routes.tsx @@ -36,9 +36,11 @@ import { PageConfig } from "pages/SettingsPage/SettingsPage"; import { WorkspaceSettingsView } from "./views/workspaces/WorkspaceSettingsView"; import { UsersSettingsView } from "packages/cloud/views/users/UsersSettingsView/UsersSettingsView"; import { AccountSettingsView } from "packages/cloud/views/users/AccountSettingsView/AccountSettingsView"; +import OnboardingPage from "pages/OnboardingPage"; import { ConfirmEmailPage } from "./views/auth/ConfirmEmailPage"; import useRouter from "hooks/useRouter"; import { WithPageAnalytics } from "pages/withPageAnalytics"; +import useWorkspace from "../../hooks/services/useWorkspace"; export enum Routes { Preferences = "/preferences", @@ -75,6 +77,7 @@ const MainRoutes: React.FC<{ currentWorkspaceId: string }> = ({ }) => { useGetWorkspace(currentWorkspaceId); const { countNewSourceVersion, countNewDestinationVersion } = useConnector(); + const { workspace } = useWorkspace(); const pageConfig = useMemo( () => ({ @@ -145,6 +148,11 @@ const MainRoutes: React.FC<{ currentWorkspaceId: string }> = ({ + {workspace.displaySetupWizard && ( + + + + )} diff --git a/airbyte-webapp/src/packages/cloud/services/useDefaultRequestMiddlewares.tsx b/airbyte-webapp/src/packages/cloud/services/useDefaultRequestMiddlewares.tsx index 9ab2a4d103d87..20cc9b3818312 100644 --- a/airbyte-webapp/src/packages/cloud/services/useDefaultRequestMiddlewares.tsx +++ b/airbyte-webapp/src/packages/cloud/services/useDefaultRequestMiddlewares.tsx @@ -5,5 +5,5 @@ import { useGetService } from "core/servicesProvider"; * This hook is responsible for registering RequestMiddlewares used in BaseRequest */ export const useDefaultRequestMiddlewares = (): RequestMiddleware[] => { - return useGetService("DefaultRequestMiddlewares"); + return useGetService("DefaultRequestMiddlewares"); }; diff --git a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx index 5937522dd06e4..e2f6f02dee83f 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx @@ -16,7 +16,9 @@ import Indicator from "components/Indicator"; import Source from "views/layout/SideBar/components/SourceIcon"; import Connections from "views/layout/SideBar/components/ConnectionsIcon"; import Destination from "views/layout/SideBar/components/DestinationIcon"; +import Onboarding from "views/layout/SideBar/components/OnboardingIcon"; import { WorkspacePopout } from "packages/cloud/views/workspaces/WorkspacePopout"; +import useWorkspace from "hooks/services/useWorkspace"; const Bar = styled.nav` width: 100px; @@ -123,6 +125,7 @@ const WorkspaceButton = styled.div` const SideBar: React.FC = () => { const { hasNewVersions } = useConnector(); const config = useConfig(); + const { workspace } = useWorkspace(); return ( @@ -136,6 +139,16 @@ const SideBar: React.FC = () => { )} + {workspace.displaySetupWizard ? ( +
  • + + + + + + +
  • + ) : null}
  • diff --git a/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx b/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx index 9bbdf8e5fc5ba..8b4bfb0ab6de4 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx @@ -1,22 +1,15 @@ -import React, { useEffect, useState } from "react"; +import React, { Suspense, useEffect, useState } from "react"; import styled from "styled-components"; -import { FormattedMessage } from "react-intl"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { faPlay } from "@fortawesome/free-solid-svg-icons"; import { useResource } from "rest-hooks"; -import { useConfig } from "config"; - -import { Link } from "components"; -import { H2 } from "components"; -import StepsMenu from "components/StepsMenu"; import HeadTitle from "components/HeadTitle"; -import Version from "components/Version"; - import useSource, { useSourceList } from "hooks/services/useSourceHook"; import useDestination, { useDestinationList, } from "hooks/services/useDestinationHook"; +import useConnection, { + useConnectionList, +} from "hooks/services/useConnectionHook"; import { JobInfo } from "core/resources/Scheduler"; import { ConnectionConfiguration } from "core/domain/connection"; import SourceDefinitionResource from "core/resources/SourceDefinition"; @@ -25,65 +18,37 @@ import useGetStepsConfig from "./useStepsConfig"; import SourceStep from "./components/SourceStep"; import DestinationStep from "./components/DestinationStep"; import ConnectionStep from "./components/ConnectionStep"; +import WelcomeStep from "./components/WelcomeStep"; +import FinalStep from "./components/FinalStep"; +import LetterLine from "./components/LetterLine"; import { StepType } from "./types"; import { useAnalytics } from "hooks/useAnalytics"; +import StepsCounter from "./components/StepsCounter"; +import LoadingPage from "components/LoadingPage"; +import useWorkspace from "hooks/services/useWorkspace"; +import useRouterHook from "hooks/useRouter"; +import { Routes } from "pages/routes"; -const Content = styled.div<{ big?: boolean }>` +const Content = styled.div<{ big?: boolean; medium?: boolean }>` width: 100%; - max-width: ${({ big }) => (big ? 1140 : 813)}px; + max-width: ${({ big, medium }) => (big ? 1140 : medium ? 730 : 550)}px; margin: 0 auto; - padding: 33px 0 13px; + padding: 75px 0 30px; display: flex; flex-direction: column; - justify-content: space-between; align-items: center; min-height: 100%; - overflow: hidden; + position: relative; + z-index: 2; `; - -const Main = styled.div` +const ScreenContent = styled.div` width: 100%; -`; - -const Img = styled.img` - text-align: center; - width: 100%; -`; - -const MainTitle = styled(H2)` - margin-top: -39px; - font-family: ${({ theme }) => theme.highlightFont}; - color: ${({ theme }) => theme.darkPrimaryColor}; - letter-spacing: 0.008em; - font-weight: bold; -`; - -const Subtitle = styled.div` - font-size: 14px; - line-height: 21px; - color: ${({ theme }) => theme.greyColor40}; - text-align: center; - margin-top: 7px; -`; - -const StepsCover = styled.div` - margin: 33px 0 28px; -`; - -const TutorialLink = styled(Link)` - margin-top: 32px; - font-size: 14px; - text-align: center; - display: block; -`; - -const PlayIcon = styled(FontAwesomeIcon)` - margin-right: 6px; + position: relative; `; const OnboardingPage: React.FC = () => { const analyticsService = useAnalytics(); - const config = useConfig(); + const { push } = useRouterHook(); useEffect(() => { analyticsService.page("Onboarding Page"); @@ -91,7 +56,8 @@ const OnboardingPage: React.FC = () => { const { sources } = useSourceList(); const { destinations } = useDestinationList(); - + const { connections } = useConnectionList(); + const { syncConnection } = useConnection(); const { sourceDefinitions } = useResource( SourceDefinitionResource.listShape(), {} @@ -103,6 +69,7 @@ const OnboardingPage: React.FC = () => { const { createSource, recreateSource } = useSource(); const { createDestination, recreateDestination } = useDestination(); + const { finishOnboarding } = useWorkspace(); const [successRequest, setSuccessRequest] = useState(false); const [errorStatusRequest, setErrorStatusRequest] = useState<{ @@ -119,6 +86,7 @@ const OnboardingPage: React.FC = () => { const { currentStep, setCurrentStep, steps } = useGetStepsConfig( !!sources.length, !!destinations.length, + !!connections.length, afterUpdateStep ); @@ -129,6 +97,11 @@ const OnboardingPage: React.FC = () => { destinationDefinitions.find((item) => item.destinationDefinitionId === id); const renderStep = () => { + if (currentStep === StepType.INSTRUCTION) { + const onStart = () => setCurrentStep(StepType.CREATE_SOURCE); + //TODO: add username + return ; + } if (currentStep === StepType.CREATE_SOURCE) { const onSubmitSourceStep = async (values: { name: string; @@ -212,7 +185,6 @@ const OnboardingPage: React.FC = () => { availableServices={destinationDefinitions} hasSuccess={successRequest} error={errorStatusRequest} - currentSourceDefinitionId={sources[0].sourceDefinitionId} // destination={ // destinations.length && !successRequest ? destinations[0] : undefined // } @@ -220,42 +192,51 @@ const OnboardingPage: React.FC = () => { ); } + if (currentStep === StepType.SET_UP_CONNECTION) { + return ( + setCurrentStep(StepType.FINAl)} + /> + ); + } + + const onSync = () => syncConnection(connections[0]); + const onCloseOnboarding = () => { + finishOnboarding(); + push(Routes.Root); + }; + return ( - ); }; return ( - - -
    - - - - - - - - - - - {renderStep()} - - - - -
    - -
    + + {currentStep === StepType.CREATE_SOURCE ? ( + + ) : currentStep === StepType.CREATE_DESTINATION ? ( + + ) : null} + + + + + }>{renderStep()} + + ); }; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx index bc2549664b484..57b6e46414741 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx @@ -3,28 +3,45 @@ import React from "react"; import CreateConnectionContent from "components/CreateConnectionContent"; import { Source } from "core/resources/Source"; import { Destination } from "core/resources/Destination"; -import { Routes } from "../../routes"; -import useRouter from "hooks/useRouter"; -import SkipOnboardingButton from "./SkipOnboardingButton"; +import TitlesBlock from "./TitlesBlock"; +import { FormattedMessage } from "react-intl"; +import HighlightedText from "./HighlightedText"; type IProps = { errorStatus?: number; source: Source; destination: Destination; + afterSubmitConnection: () => void; }; -const ConnectionStep: React.FC = ({ source, destination }) => { - const { push } = useRouter(); - - const afterSubmitConnection = () => push(Routes.Root); - +const ConnectionStep: React.FC = ({ + source, + destination, + afterSubmitConnection, +}) => { return ( - } - source={source} - destination={destination} - afterSubmitConnection={afterSubmitConnection} - /> + <> + ( + {name} + ), + }} + /> + } + > + + + + ); }; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx index d2b217f860669..1b10858137e58 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx @@ -1,25 +1,22 @@ import React, { useState } from "react"; import { FormattedMessage } from "react-intl"; -import { useResource } from "rest-hooks"; import ContentCard from "components/ContentCard"; import ServiceForm from "views/Connector/ServiceForm"; -import ConnectionBlock from "components/ConnectionBlock"; import { JobsLogItem } from "components/JobItem"; -import SourceDefinitionResource from "core/resources/SourceDefinition"; import { useDestinationDefinitionSpecificationLoad } from "hooks/services/useDestinationHook"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { JobInfo } from "core/resources/Scheduler"; import { ConnectionConfiguration } from "core/domain/connection"; import { DestinationDefinition } from "core/resources/DestinationDefinition"; -import SkipOnboardingButton from "./SkipOnboardingButton"; +import TitlesBlock from "./TitlesBlock"; +import HighlightedText from "./HighlightedText"; import { useAnalytics } from "hooks/useAnalytics"; type IProps = { availableServices: DestinationDefinition[]; - currentSourceDefinitionId: string; onSubmit: (values: { name: string; serviceType: string; @@ -35,7 +32,6 @@ type IProps = { const DestinationStep: React.FC = ({ onSubmit, availableServices, - currentSourceDefinitionId, hasSuccess, error, jobInfo, @@ -46,9 +42,7 @@ const DestinationStep: React.FC = ({ destinationDefinitionSpecification, isLoading, } = useDestinationDefinitionSpecificationLoad(destinationDefinitionId); - const currentSource = useResource(SourceDefinitionResource.detailShape(), { - sourceDefinitionId: currentSourceDefinitionId, - }); + const analyticsService = useAnalytics(); const onDropDownSelect = (destinationDefinition: string) => { @@ -83,17 +77,23 @@ const DestinationStep: React.FC = ({ return ( <> - - } + ( + {name} + ), + }} + /> + } > + + + - } allowChangeConnector onServiceSelect={onDropDownSelect} onSubmit={onSubmitForm} diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/FinalStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/FinalStep.tsx new file mode 100644 index 0000000000000..0261bde007df4 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/FinalStep.tsx @@ -0,0 +1,136 @@ +import React, { useEffect, useState } from "react"; +import styled from "styled-components"; +import { FormattedMessage } from "react-intl"; +import { useResource, useSubscription } from "rest-hooks"; + +import VideoItem from "./VideoItem"; +import ProgressBlock from "./ProgressBlock"; +import HighlightedText from "./HighlightedText"; +import { H1, Button } from "components/base"; +import UseCaseBlock from "./UseCaseBlock"; +import ConnectionResource from "core/resources/Connection"; +import SyncCompletedModal from "views/Feedback/SyncCompletedModal"; +import { useOnboardingService } from "hooks/services/Onboarding/OnboardingService"; +import Status from "core/statuses"; +import useWorkspace from "hooks/services/useWorkspace"; + +type FinalStepProps = { + connectionId: string; + onSync: () => void; + onFinishOnboarding: () => void; +}; + +const Title = styled(H1)` + margin: 21px 0; +`; + +const Videos = styled.div` + width: 425px; + height: 205px; + display: flex; + justify-content: space-between; + align-items: center; + margin: 20px 0 50px; + background: url("/video-background.svg") no-repeat; + padding: 0 27px; +`; + +const CloseButton = styled(Button)` + margin-top: 30px; +`; + +const FinalStep: React.FC = ({ + connectionId, + onSync, + onFinishOnboarding, +}) => { + const { sendFeedback } = useWorkspace(); + const { + feedbackPassed, + passFeedback, + useCases, + skipCase, + } = useOnboardingService(); + const connection = useResource(ConnectionResource.detailShape(), { + connectionId, + }); + useSubscription(ConnectionResource.detailShape(), { + connectionId: connectionId, + }); + const [isOpen, setIsOpen] = useState(false); + + useEffect(() => { + if ( + connection.latestSyncJobStatus === Status.SUCCEEDED && + !feedbackPassed + ) { + setIsOpen(true); + } + }, [connection.latestSyncJobStatus, feedbackPassed]); + + const onSendFeedback = (feedback: string) => { + sendFeedback({ + feedback, + source: connection.source, + destination: connection.destination, + }); + passFeedback(); + setIsOpen(false); + }; + + return ( + <> + + } + videoId="sKDviQrOAbU" + img="/videoCover.png" + /> + } + videoId="sKDviQrOAbU" + img="/videoCover.png" + /> + + {!feedbackPassed && ( + + )} + + + <FormattedMessage + id="onboarding.useCases" + values={{ + name: (...name: React.ReactNode[]) => ( + <HighlightedText>{name}</HighlightedText> + ), + }} + /> + + + {useCases && + useCases.map((item, key) => ( + + ))} + + + + + + {isOpen ? ( + setIsOpen(false)} + onPassFeedback={onSendFeedback} + /> + ) : null} + + ); +}; + +export default FinalStep; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/HighlightedText.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/HighlightedText.tsx new file mode 100644 index 0000000000000..c998c6254cd6b --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/HighlightedText.tsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +const HighlightedText = styled.span` + color: ${({ theme }) => theme.redColor}; +`; + +export default HighlightedText; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/LetterLine.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/LetterLine.tsx new file mode 100644 index 0000000000000..c0a7f05ce5322 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/LetterLine.tsx @@ -0,0 +1,86 @@ +import React from "react"; +import styled, { keyframes } from "styled-components"; + +export const RollAnimation = keyframes` + 0% { + width: 0; + } + 100% { + width: 100%; + } +`; + +export const ExitRollAnimation = keyframes` + 0% { + width: 100%; + float: right; + } + 100% { + width: 0; + float: right; + } +`; + +export const EnterAnimation = keyframes` + 0% { + left: -78px; + } + 100% { + left: calc(50% - 39px); + } +`; + +export const ExitAnimation = keyframes` + 0% { + left: calc(50% - 39px); + } + 100% { + left: calc(100% + 78px); + } +`; + +const Line = styled.div<{ onRight?: boolean }>` + position: absolute; + width: calc(50% - 275px); + z-index: 1; + top: 382px; + left: ${({ onRight }) => (onRight ? "calc(50% + 275px)" : 0)}; +`; +const Path = styled.div<{ exit?: boolean }>` + width: 100%; + height: 2px; + background: ${({ theme }) => theme.primaryColor}; + animation: ${({ exit }) => (exit ? ExitRollAnimation : RollAnimation)} 0.6s + linear ${({ exit }) => (exit ? 0.8 : 0)}s; + animation-fill-mode: forwards; +`; +const Img = styled.img<{ exit?: boolean }>` + position: absolute; + top: -58px; + left: -78px; + animation: ${({ exit }) => (exit ? ExitAnimation : EnterAnimation)} 0.8s + linear ${({ exit }) => (exit ? 0 : 0.6)}s; + animation-fill-mode: both; +`; + +type LetterLineProps = { + onRight?: boolean; + exit?: boolean; +}; + +const LetterLine: React.FC = ({ onRight, exit }) => { + return ( + + + newsletter + + ); +}; + +export default LetterLine; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx new file mode 100644 index 0000000000000..302441becb869 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx @@ -0,0 +1,127 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; +import styled, { keyframes } from "styled-components"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { faChevronRight } from "@fortawesome/free-solid-svg-icons"; + +import { Connection } from "core/domain/connection"; +import Link from "components/Link"; +import { Button, H1 } from "components/base"; +import { Routes } from "pages/routes"; +import Status from "core/statuses"; + +const run = keyframes` + from { + background-position: 0 0; + } + + to { + background-position: 98% 0; + } +`; + +const Bar = styled.div` + width: 100%; + height: 49px; + background: ${({ theme }) => theme.darkBeigeColor} url("/rectangle.svg"); + color: ${({ theme }) => theme.redColor}; + border-radius: 15px; + font-weight: 500; + font-size: 13px; + line-height: 16px; + display: flex; + justify-content: center; + align-items: center; + + animation: ${run} 15s linear infinite; +`; +const Lnk = styled(Link)` + font-weight: 600; + text-decoration: underline; + color: ${({ theme }) => theme.redColor}; + padding: 0 5px; +`; +const Img = styled.img` + margin-right: 9px; +`; +const ControlBlock = styled.div` + height: 49px; + text-align: center; + display: flex; + justify-content: center; + align-items: center; +`; +const PaddedButton = styled(Button)` + margin-left: 10px; +`; + +type ProgressBlockProps = { + connection: Connection; + onSync: () => void; +}; + +const ProgressBlock: React.FC = ({ + connection, + onSync, +}) => { + const showMessage = (status: string | null) => { + if (status === null || !status) { + return ; + } + if (status === Status.FAILED) { + return ; + } + if (status === Status.CANCELLED) { + return ; + } + + return ""; + }; + + if (connection.latestSyncJobStatus === Status.SUCCEEDED) { + return null; + } + + if ( + connection.latestSyncJobStatus !== Status.RUNNING && + connection.latestSyncJobStatus !== Status.INCOMPLETE + ) { + return ( + +

    {showMessage(connection.latestSyncJobStatus)}

    + + + +
    + ); + } + + return ( + + + ( + <> + {sr}{" "} + + + ), + ds: (...ds: React.ReactNode[]) => ( + + {ds} + + ), + sync: (...sync: React.ReactNode[]) => ( + + {sync} + + ), + }} + /> + + ); +}; + +export default ProgressBlock; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx index dadcfd29167fe..8b9bd582a81b7 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx @@ -11,9 +11,10 @@ import { JobsLogItem } from "components/JobItem"; import { useSourceDefinitionSpecificationLoad } from "hooks/services/useSourceHook"; -import SkipOnboardingButton from "./SkipOnboardingButton"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { useAnalytics } from "hooks/useAnalytics"; +import HighlightedText from "./HighlightedText"; +import TitlesBlock from "./TitlesBlock"; type IProps = { onSubmit: (values: { @@ -72,24 +73,39 @@ const SourceStep: React.FC = ({ const errorMessage = error ? createFormErrorMessage(error) : ""; return ( - }> - + <> + ( + {name} + ), + }} + /> } - allowChangeConnector - onServiceSelect={onServiceSelect} - onSubmit={onSubmitForm} - formType="source" - availableServices={availableServices} - hasSuccess={hasSuccess} - errorMessage={errorMessage} - specifications={sourceDefinitionSpecification?.connectionSpecification} - documentationUrl={sourceDefinitionSpecification?.documentationUrl} - isLoading={isLoading} - /> - - + > + + + + + + + ); }; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/StepsCounter.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/StepsCounter.tsx new file mode 100644 index 0000000000000..9d954cc86db9d --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/StepsCounter.tsx @@ -0,0 +1,71 @@ +import React from "react"; +import styled from "styled-components"; + +import StepItem from "./components/StepItem"; +import StarsIcon from "./components/StarsIcon"; +import { StepType } from "../../types"; + +type StepsCounterProps = { + steps: { id: StepType; name?: React.ReactNode }[]; + currentStep: StepType; +}; + +const Steps = styled.div` + display: flex; + flex-direction: row; +`; + +const Content = styled.div` + position: relative; + display: flex; + flex-direction: row; +`; + +const Rocket = styled.img<{ stepNumber: number }>` + position: absolute; + width: 87px; + transform: matrix(0.99, 0.12, -0.12, 0.99, 0, 0) rotate(6.73deg); + top: 1px; + left: ${({ stepNumber }) => -23 + stepNumber * 95.5}px; + transition: 0.8s; +`; + +const Stars = styled.div<{ isLastStep?: boolean }>` + position: absolute; + top: -23px; + right: -35px; + color: ${({ theme }) => theme.dangerColor}; + opacity: ${({ isLastStep }) => (isLastStep ? 1 : 0)}; + transition: 0.8s 0.2s; +`; + +const StepsCounter: React.FC = ({ steps, currentStep }) => { + const stepItem = steps.find((item) => item.id === currentStep); + const stepIndex = stepItem ? steps.indexOf(stepItem) : 0; + const isLastStep = currentStep === steps[steps.length - 1].id; + + return ( + + + {steps.map((stepItem, key) => ( + = key} + current={stepItem.id === currentStep} + > + {key === steps.length - 1 ? : key} + + ))} + + + + + + + ); +}; + +export default StepsCounter; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StarsIcon.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StarsIcon.tsx new file mode 100644 index 0000000000000..5e7ee80d0f3f2 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StarsIcon.tsx @@ -0,0 +1,22 @@ +const StarsIcon = ({ + color = "currentColor", +}: { + color?: string; +}): JSX.Element => ( + + + + + +); + +export default StarsIcon; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StepItem.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StepItem.tsx new file mode 100644 index 0000000000000..bc71b20ed6c9a --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/components/StepItem.tsx @@ -0,0 +1,74 @@ +import React from "react"; +import styled from "styled-components"; + +type StepItemProps = { + active?: boolean; + current?: boolean; + children?: React.ReactNode; +}; + +const Content = styled.div<{ active?: boolean }>` + display: flex; + flex-direction: row; + align-items: center; + + &:last-child > .next-path { + display: none; + } + + &:first-child > .previous-path { + display: none; + } +`; + +const Item = styled.div<{ active?: boolean }>` + height: 46px; + width: 46px; + border-radius: 50%; + padding: 6px 5px; + border: 1px solid + ${({ theme, active }) => + active ? theme.primaryColor : theme.lightTextColor}; + background: ${({ theme, active }) => + active ? theme.primaryColor : theme.transparentColor}; + color: ${({ theme, active }) => + active ? theme.whiteColor : theme.lightTextColor}; + font-weight: normal; + font-size: 18px; + line-height: 22px; + display: flex; + justify-content: center; + align-items: center; + transition: 0.8s; +`; + +const Path = styled.div<{ active?: boolean }>` + width: 25px; + height: 1px; + background: ${({ theme }) => theme.lightTextColor}; + + &:before { + content: ""; + display: block; + width: ${({ active }) => (active ? 25 : 0)}px; + height: 1px; + background: ${({ theme }) => theme.primaryColor}; + transition: 0.8s 0.5s; + } + + &:first-child:before { + transition: 0.8s; + } +`; + +const StepItem: React.FC = ({ active, children }) => { + return ( + + + {children} + + + ); +}; + +export default StepItem; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/index.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/index.tsx new file mode 100644 index 0000000000000..de9748ebc946b --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/StepsCounter/index.tsx @@ -0,0 +1,4 @@ +import StepsCounter from "./StepsCounter"; + +export default StepsCounter; +export { StepsCounter }; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/TitlesBlock.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/TitlesBlock.tsx new file mode 100644 index 0000000000000..3b2480264f4f6 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/TitlesBlock.tsx @@ -0,0 +1,35 @@ +import React from "react"; +import { H1 } from "components/base"; +import styled from "styled-components"; + +type TitlesBlockProps = { + title: React.ReactNode; + children?: React.ReactNode; +}; + +const TitlesContent = styled.div` + padding: 42px 0 33px; + color: ${({ theme }) => theme.textColor}; + max-width: 493px; +`; + +const Text = styled.div` + padding-top: 10px; + font-weight: normal; + font-size: 13px; + line-height: 20px; + text-align: center; +`; + +const TitlesBlock: React.FC = ({ title, children }) => { + return ( + +

    + {title} +

    + {children} +
    + ); +}; + +export default TitlesBlock; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/UseCaseBlock.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/UseCaseBlock.tsx new file mode 100644 index 0000000000000..8b4de00971430 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/UseCaseBlock.tsx @@ -0,0 +1,60 @@ +import React from "react"; +import styled from "styled-components"; + +import ContentCard from "components/ContentCard"; +import { FormattedMessage } from "react-intl"; + +type UseCaseBlockProps = { + count: number; + id: string; + onSkip: (id: string) => void; +}; + +const Block = styled(ContentCard)` + margin-bottom: 10px; + width: 100%; + padding: 16px; + display: flex; + justify-content: space-between; + flex-direction: row; + align-items: center; + font-size: 16px; + line-height: 28px; +`; + +const Num = styled.div` + width: 28px; + height: 28px; + border-radius: 50%; + background: ${({ theme }) => theme.primaryColor}; + color: ${({ theme }) => theme.whiteColor}; + margin-right: 13px; + font-weight: bold; + font-size: 12px; + line-height: 28px; + display: inline-block; + text-align: center; +`; + +const SkipButton = styled.div` + color: ${({ theme }) => theme.lightTextColor}; + font-size: 16px; + line-height: 28px; + cursor: pointer; +`; + +const UseCaseBlock: React.FC = ({ id, count, onSkip }) => { + return ( + +
    + {count} + +
    + onSkip(id)}> + + +
    + ); +}; + +export default UseCaseBlock; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/VideoItem.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/VideoItem.tsx new file mode 100644 index 0000000000000..a112c1de191b8 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/VideoItem.tsx @@ -0,0 +1,108 @@ +import React, { useState } from "react"; +import styled from "styled-components"; + +import ShowVideo from "./components/ShowVideo"; +import PlayButton from "./components/PlayButton"; + +type VideoItemProps = { + small?: boolean; + videoId?: string; + img?: string; + description?: React.ReactNode; +}; + +const Content = styled.div<{ small?: boolean }>` + width: ${({ small }) => (small ? 158 : 317)}px; +`; + +const VideoBlock = styled.div<{ small?: boolean }>` + position: relative; + width: 100%; + height: ${({ small }) => (small ? 92 : 185)}px; + filter: drop-shadow(0px 14.4px 14.4px rgba(26, 25, 77, 0.2)); + + &:before, + &:after { + content: ""; + display: block; + position: absolute; + top: 0; + left: 0; + border-radius: ${({ small }) => (small ? 3.6 : 7.2)}px; + } + + &:before { + width: ${({ small }) => (small ? 158 : 317)}px; + height: ${({ small }) => (small ? 94 : 189)}px; + transform: rotate(2.98deg); + background: ${({ theme }) => theme.primaryColor}; + z-index: 1; + } + + &:after { + width: ${({ small }) => (small ? 160 : 320)}px; + height: ${({ small }) => (small ? 92 : 184)}px; + transform: rotate(-2.48deg); + background: ${({ theme }) => theme.successColor}; + z-index: 2; + } +`; + +const VideoFrame = styled.div<{ small?: boolean; img?: string }>` + cursor: pointer; + position: relative; + width: ${({ small }) => (small ? 158 : 317)}px; + height: ${({ small }) => (small ? 92 : 185)}px; + background: ${({ theme }) => theme.whiteColor} + ${({ img }) => (img ? `url(${img})` : "")}; + background-size: cover; + border: 2.4px solid ${({ theme }) => theme.whiteColor}; + box-shadow: 0 2.4px 4.8px rgba(26, 25, 77, 0.12), + 0 16.2px 7.2px -10.2px rgba(26, 25, 77, 0.2); + border-radius: ${({ small }) => (small ? 3.6 : 7.2)}px; + z-index: 3; + display: flex; + justify-content: center; + align-items: center; +`; + +const Description = styled.div<{ small?: boolean }>` + text-align: center; + color: ${({ theme, small }) => + small ? theme.textColor : theme.primaryColor}; + font-size: 13px; + line-height: ${({ small }) => (small ? 16 : 20)}px; + margin-top: 14px; + cursor: pointer; +`; + +const VideoItem: React.FC = ({ + description, + small, + videoId, + img, +}) => { + const [isVideoOpen, setIsVideoOpen] = useState(false); + + return ( + + + setIsVideoOpen(true)} + > + setIsVideoOpen(true)} /> + + + setIsVideoOpen(true)}> + {description} + + {isVideoOpen ? ( + setIsVideoOpen(false)} /> + ) : null} + + ); +}; + +export default VideoItem; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/PlayButton.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/PlayButton.tsx new file mode 100644 index 0000000000000..4770d5464cb23 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/PlayButton.tsx @@ -0,0 +1,103 @@ +import React from "react"; +import styled, { keyframes } from "styled-components"; + +type PlayButtonProps = { + small?: boolean; + onClick: () => void; +}; + +export const BigCircleAnimation = keyframes` + 0% { + height: 80%; + width: 80%; + } + 100% { + width: 100%; + height: 100%; + } +`; + +export const MiddleCircleAnimation = keyframes` + 0% { + height: 53%; + width: 53%; + } + 100% { + width: 73%; + height: 73%; + } +`; + +export const SmallCircleAnimation = keyframes` + 0% { + height: 20%; + width: 20%; + } + 100% { + width: 40%; + height: 40%; + } +`; + +const MainCircle = styled.div` + cursor: pointer; + height: ${({ small }) => (small ? 42 : 85)}px; + width: ${({ small }) => (small ? 42 : 85)}px; + border-radius: 50%; + background: ${({ theme }) => theme.primaryColor}; + padding: ${({ small }) => (small ? "10px 0 0 16px" : "20px 0 0 32px")}; + box-shadow: 0 2.4px 4.8px ${({ theme }) => theme.cardShadowColor}, + 0 16.2px 7.2px -10.2px ${({ theme }) => theme.cardShadowColor}; + + &:hover { + display: flex; + justify-content: center; + align-items: center; + padding: 0; + + & > img { + display: none; + } + & div { + display: flex; + justify-content: center; + align-items: center; + } + } +`; + +const BigCircle = styled.div<{ small?: boolean }>` + height: ${({ small }) => (small ? 32 : 65)}px; + width: ${({ small }) => (small ? 32 : 65)}px; + border-radius: 50%; + background: rgba(255, 255, 255, 0.5); + display: none; + animation: ${BigCircleAnimation} alternate 0.5s linear 0s infinite; +`; + +const MiddleCircle = styled(BigCircle)` + height: ${({ small }) => (small ? 22 : 45)}px; + width: ${({ small }) => (small ? 22 : 45)}px; + animation-name: ${MiddleCircleAnimation}; +`; + +const SmallCircle = styled(BigCircle)` + height: ${({ small }) => (small ? 8 : 17)}px; + width: ${({ small }) => (small ? 8 : 17)}px; + animation-name: ${SmallCircleAnimation}; +`; + +const PlayButton: React.FC = ({ small, onClick }) => { + return ( + + play + + + + + + + ); +}; + +export default PlayButton; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/ShowVideo.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/ShowVideo.tsx new file mode 100644 index 0000000000000..151ea3fbef036 --- /dev/null +++ b/airbyte-webapp/src/pages/OnboardingPage/components/VideoItem/components/ShowVideo.tsx @@ -0,0 +1,45 @@ +import React from "react"; +import styled from "styled-components"; +import { faTimes } from "@fortawesome/free-solid-svg-icons"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; + +import Modal from "components/Modal"; +import { Button } from "components/base"; + +type ShowVideoProps = { + videoId?: string; + onClose: () => void; +}; + +const CloseButton = styled(Button)` + position: absolute; + top: 30px; + right: 30px; + color: ${({ theme }) => theme.whiteColor}; + font-size: 20px; + + &:hover { + border: none; + } +`; + +const ShowVideo: React.FC = ({ videoId, onClose }) => { + return ( + + + + +