From bd0020c80ef5a008edc962712b3b67842c823a1a Mon Sep 17 00:00:00 2001 From: lvvvvvf <53883181+lvvvvvf@users.noreply.github.com> Date: Mon, 17 Aug 2020 15:05:11 -0700 Subject: [PATCH] samples: Add export to BigQuery (#70) --- .../snippets/quickstart_exportassets.py | 26 +++++++++++++++++ .../snippets/quickstart_exportassets_test.py | 29 +++++++++++++++++-- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/asset/snippets/snippets/quickstart_exportassets.py b/asset/snippets/snippets/quickstart_exportassets.py index f9784a90f74e..0d9c30664fd4 100644 --- a/asset/snippets/snippets/quickstart_exportassets.py +++ b/asset/snippets/snippets/quickstart_exportassets.py @@ -36,6 +36,32 @@ def export_assets(project_id, dump_file_path): # [END asset_quickstart_export_assets] +def export_assets_bigquery(project_id, dataset, table): + # [START asset_quickstart_export_assets_bigquery] + from google.cloud import asset_v1 + + # TODO project_id = 'Your Google Cloud Project ID' + # TODO dataset = 'Your BigQuery dataset path' + # TODO table = 'Your BigQuery table name' + + client = asset_v1.AssetServiceClient() + parent = "projects/{}".format(project_id) + content_type = asset_v1.ContentType.RESOURCE + output_config = asset_v1.OutputConfig() + output_config.bigquery_destination.dataset = dataset + output_config.bigquery_destination.table = table + output_config.bigquery_destination.force = True + response = client.export_assets( + request={ + "parent": parent, + "content_type": content_type, + "output_config": output_config + } + ) + print(response.result()) + # [END asset_quickstart_export_assets_bigquery] + + if __name__ == "__main__": parser = argparse.ArgumentParser( diff --git a/asset/snippets/snippets/quickstart_exportassets_test.py b/asset/snippets/snippets/quickstart_exportassets_test.py index 9c03d5d58a5b..af7cc07399af 100644 --- a/asset/snippets/snippets/quickstart_exportassets_test.py +++ b/asset/snippets/snippets/quickstart_exportassets_test.py @@ -17,6 +17,7 @@ import os import uuid +from google.cloud import bigquery from google.cloud import storage import pytest @@ -24,6 +25,7 @@ PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] BUCKET = "assets-{}".format(uuid.uuid4().hex) +DATASET = "assets_{}".format(int(uuid.uuid4())) @pytest.fixture(scope="module") @@ -31,6 +33,11 @@ def storage_client(): yield storage.Client() +@pytest.fixture(scope="module") +def bigquery_client(): + yield bigquery.Client() + + @pytest.fixture(scope="module") def asset_bucket(storage_client): bucket = storage_client.create_bucket(BUCKET) @@ -44,9 +51,27 @@ def asset_bucket(storage_client): raise e -def test_export_assets(asset_bucket, capsys): +@pytest.fixture(scope='module') +def dataset(bigquery_client): + dataset_id = "{}.{}".format(PROJECT, DATASET) + dataset = bigquery.Dataset(dataset_id) + dataset.location = "US" + dataset = bigquery_client.create_dataset(dataset) + + yield DATASET + + bigquery_client.delete_dataset( + dataset_id, delete_contents=True, not_found_ok=False) + + +def test_export_assets(asset_bucket, dataset, capsys): dump_file_path = "gs://{}/assets-dump.txt".format(asset_bucket) quickstart_exportassets.export_assets(PROJECT, dump_file_path) out, _ = capsys.readouterr() - assert dump_file_path in out + + dataset_id = "projects/{}/datasets/{}".format(PROJECT, dataset) + quickstart_exportassets.export_assets_bigquery( + PROJECT, dataset_id, "assettable") + out, _ = capsys.readouterr() + assert dataset_id in out