diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py index 05b81b00c152..58bd6689c80f 100644 --- a/bigquery/docs/snippets.py +++ b/bigquery/docs/snippets.py @@ -147,24 +147,6 @@ def test_create_client_json_credentials(): assert client is not None -def test_list_datasets(client): - """List datasets for a project.""" - # [START bigquery_list_datasets] - # from google.cloud import bigquery - # client = bigquery.Client() - - datasets = list(client.list_datasets()) - project = client.project - - if datasets: - print("Datasets in project {}:".format(project)) - for dataset in datasets: # API request(s) - print("\t{}".format(dataset.dataset_id)) - else: - print("{} project does not contain any datasets.".format(project)) - # [END bigquery_list_datasets] - - def test_list_datasets_by_label(client, to_delete): dataset_id = "list_datasets_by_label_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) @@ -192,51 +174,6 @@ def test_list_datasets_by_label(client, to_delete): assert dataset_id in found -def test_get_dataset_information(client, to_delete): - """View information about a dataset.""" - dataset_id = "get_dataset_{}".format(_millis()) - dataset_labels = {"color": "green"} - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - dataset.description = ORIGINAL_DESCRIPTION - dataset.labels = dataset_labels - dataset = client.create_dataset(dataset) # API request - to_delete.append(dataset) - - # [START bigquery_get_dataset] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - - dataset_ref = client.dataset(dataset_id) - dataset = client.get_dataset(dataset_ref) # API request - - # View dataset properties - print("Dataset ID: {}".format(dataset_id)) - print("Description: {}".format(dataset.description)) - print("Labels:") - labels = dataset.labels - if labels: - for label, value in labels.items(): - print("\t{}: {}".format(label, value)) - else: - print("\tDataset has no labels defined.") - - # View tables in dataset - print("Tables:") - tables = list(client.list_tables(dataset_ref)) # API request(s) - if tables: - for table in tables: - print("\t{}".format(table.table_id)) - else: - print("\tThis dataset does not contain any tables.") - # [END bigquery_get_dataset] - - assert dataset.description == ORIGINAL_DESCRIPTION - assert dataset.labels == dataset_labels - assert tables == [] - - # [START bigquery_dataset_exists] def dataset_exists(client, dataset_reference): """Return if a dataset exists. @@ -274,66 +211,6 @@ def test_dataset_exists(client, to_delete): assert not dataset_exists(client, client.dataset("i_dont_exist")) -@pytest.mark.skip( - reason=( - "update_dataset() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" - ) -) -def test_update_dataset_description(client, to_delete): - """Update a dataset's description.""" - dataset_id = "update_dataset_description_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.description = "Original description." - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_update_dataset_description] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - # dataset = client.get_dataset(dataset_ref) # API request - - assert dataset.description == "Original description." - dataset.description = "Updated description." - - dataset = client.update_dataset(dataset, ["description"]) # API request - - assert dataset.description == "Updated description." - # [END bigquery_update_dataset_description] - - -@pytest.mark.skip( - reason=( - "update_dataset() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" - ) -) -def test_update_dataset_default_table_expiration(client, to_delete): - """Update a dataset's default table expiration.""" - dataset_id = "update_dataset_default_expiration_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_update_dataset_expiration] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - # dataset = client.get_dataset(dataset_ref) # API request - - assert dataset.default_table_expiration_ms is None - one_day_ms = 24 * 60 * 60 * 1000 # in milliseconds - dataset.default_table_expiration_ms = one_day_ms - - dataset = client.update_dataset( - dataset, ["default_table_expiration_ms"] - ) # API request - - assert dataset.default_table_expiration_ms == one_day_ms - # [END bigquery_update_dataset_expiration] - - @pytest.mark.skip( reason=( "update_dataset() is flaky " @@ -397,79 +274,6 @@ def test_manage_dataset_labels(client, to_delete): # [END bigquery_delete_label_dataset] -@pytest.mark.skip( - reason=( - "update_dataset() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" - ) -) -def test_update_dataset_access(client, to_delete): - """Update a dataset's access controls.""" - dataset_id = "update_dataset_access_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_update_dataset_access] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset = client.get_dataset(client.dataset('my_dataset')) - - entry = bigquery.AccessEntry( - role="READER", - entity_type="userByEmail", - entity_id="sample.bigquery.dev@gmail.com", - ) - assert entry not in dataset.access_entries - entries = list(dataset.access_entries) - entries.append(entry) - dataset.access_entries = entries - - dataset = client.update_dataset(dataset, ["access_entries"]) # API request - - assert entry in dataset.access_entries - # [END bigquery_update_dataset_access] - - -def test_delete_dataset(client): - """Delete a dataset.""" - from google.cloud.exceptions import NotFound - - dataset1_id = "delete_dataset_{}".format(_millis()) - dataset1 = bigquery.Dataset(client.dataset(dataset1_id)) - client.create_dataset(dataset1) - - dataset2_id = "delete_dataset_with_tables{}".format(_millis()) - dataset2 = bigquery.Dataset(client.dataset(dataset2_id)) - client.create_dataset(dataset2) - - table = bigquery.Table(dataset2.table("new_table")) - client.create_table(table) - - # [START bigquery_delete_dataset] - # from google.cloud import bigquery - # client = bigquery.Client() - - # Delete a dataset that does not contain any tables - # dataset1_id = 'my_empty_dataset' - dataset1_ref = client.dataset(dataset1_id) - client.delete_dataset(dataset1_ref) # API request - - print("Dataset {} deleted.".format(dataset1_id)) - - # Use the delete_contents parameter to delete a dataset and its contents - # dataset2_id = 'my_dataset_with_tables' - dataset2_ref = client.dataset(dataset2_id) - client.delete_dataset(dataset2_ref, delete_contents=True) # API request - - print("Dataset {} deleted.".format(dataset2_id)) - # [END bigquery_delete_dataset] - - for dataset in [dataset1, dataset2]: - with pytest.raises(NotFound): - client.get_dataset(dataset) # API request - - def test_list_tables(client, to_delete): """List tables within a dataset.""" dataset_id = "list_tables_dataset_{}".format(_millis()) diff --git a/bigquery/docs/usage/datasets.rst b/bigquery/docs/usage/datasets.rst index dbcd834d5cd3..d5646355c00d 100644 --- a/bigquery/docs/usage/datasets.rst +++ b/bigquery/docs/usage/datasets.rst @@ -19,7 +19,7 @@ Listing Datasets List datasets for a project with the :func:`~google.cloud.bigquery.client.Client.list_datasets` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/list_datasets.py :language: python :dedent: 4 :start-after: [START bigquery_list_datasets] @@ -31,7 +31,7 @@ Getting a Dataset Get a dataset resource (to pick up changes made by another client) with the :func:`~google.cloud.bigquery.client.Client.get_dataset` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/get_dataset.py :language: python :dedent: 4 :start-after: [START bigquery_get_dataset] @@ -55,7 +55,7 @@ Updating a Dataset Update a property in a dataset's metadata with the :func:`~google.cloud.bigquery.client.Client.update_dataset` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/update_dataset_description.py :language: python :dedent: 4 :start-after: [START bigquery_update_dataset_description] @@ -64,7 +64,7 @@ Update a property in a dataset's metadata with the Modify user permissions on a dataset with the :func:`~google.cloud.bigquery.client.Client.update_dataset` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/update_dataset_access.py :language: python :dedent: 4 :start-after: [START bigquery_update_dataset_access] @@ -76,7 +76,7 @@ Deleting a Dataset Delete a dataset with the :func:`~google.cloud.bigquery.client.Client.delete_dataset` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/delete_dataset.py :language: python :dedent: 4 :start-after: [START bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py new file mode 100644 index 000000000000..ad04c3fb3664 --- /dev/null +++ b/bigquery/samples/delete_dataset.py @@ -0,0 +1,32 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def delete_dataset(client, dataset_id): + + # [START bigquery_delete_dataset] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set model_id to the ID of the model to fetch. + # dataset_id = 'your-project.your_dataset' + + # Use the delete_contents parameter to delete a dataset and its contents + # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. + client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) + # [END bigquery_delete_dataset] + + print("Deleted dataset '{}'.".format(dataset_id)) diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py new file mode 100644 index 000000000000..eeab2e088d2f --- /dev/null +++ b/bigquery/samples/get_dataset.py @@ -0,0 +1,56 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_dataset(client, dataset_id): + + # [START bigquery_get_dataset] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) + + full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) + friendly_name = dataset.friendly_name + print( + "Got dataset '{}' with friendly_name '{}'.".format( + full_dataset_id, friendly_name + ) + ) + + # View dataset properties + print("Description: {}".format(dataset.description)) + print("Labels:") + labels = dataset.labels + if labels: + for label, value in labels.items(): + print("\t{}: {}".format(label, value)) + else: + print("\tDataset has no labels defined.") + + # View tables in dataset + print("Tables:") + tables = list(client.list_tables(dataset)) # API request(s) + if tables: + for table in tables: + print("\t{}".format(table.table_id)) + else: + print("\tThis dataset does not contain any tables.") + + # [END bigquery_get_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py new file mode 100644 index 000000000000..c9ddf4f2523c --- /dev/null +++ b/bigquery/samples/list_datasets.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def list_datasets(client): + + # [START bigquery_list_datasets] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + datasets = list(client.list_datasets()) + project = client.project + + if datasets: + print("Datasets in project {}:".format(project)) + for dataset in datasets: # API request(s) + print("\t{}".format(dataset.dataset_id)) + else: + print("{} project does not contain any datasets.".format(project)) + # [END bigquery_list_datasets] diff --git a/bigquery/samples/tests/test_dataset_samples.py b/bigquery/samples/tests/test_create_dataset.py similarity index 100% rename from bigquery/samples/tests/test_dataset_samples.py rename to bigquery/samples/tests/test_create_dataset.py diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py new file mode 100644 index 000000000000..2b1b6ad06195 --- /dev/null +++ b/bigquery/samples/tests/test_delete_dataset.py @@ -0,0 +1,22 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import delete_dataset + + +def test_delete_dataset(capsys, client, dataset_id): + + delete_dataset.delete_dataset(client, dataset_id) + out, err = capsys.readouterr() + assert "Deleted dataset '{}'.".format(dataset_id) in out diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py new file mode 100644 index 000000000000..374f8835211a --- /dev/null +++ b/bigquery/samples/tests/test_get_dataset.py @@ -0,0 +1,22 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import get_dataset + + +def test_get_dataset(capsys, client, dataset_id): + + get_dataset.get_dataset(client, dataset_id) + out, err = capsys.readouterr() + assert "{}".format(dataset_id) in out diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py new file mode 100644 index 000000000000..4c66a24f9b1a --- /dev/null +++ b/bigquery/samples/tests/test_list_datasets.py @@ -0,0 +1,22 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import list_datasets + + +def test_list_datasets(capsys, client, dataset_id): + + list_datasets.list_datasets(client) + out, err = capsys.readouterr() + assert "Datasets in project {}:".format(client.project) in out diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py new file mode 100644 index 000000000000..ae33dbfe4a4c --- /dev/null +++ b/bigquery/samples/tests/test_update_dataset_access.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import update_dataset_access + + +def test_update_dataset_access(capsys, client, dataset_id): + + update_dataset_access.update_dataset_access(client, dataset_id) + out, err = capsys.readouterr() + assert ( + "Updated dataset '{}' with modified user permissions.".format(dataset_id) in out + ) diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py new file mode 100644 index 000000000000..0366b767fbe8 --- /dev/null +++ b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import update_dataset_default_table_expiration + + +def test_update_dataset_default_table_expiration(capsys, client, dataset_id): + + one_day_ms = 24 * 60 * 60 * 1000 # in milliseconds + + update_dataset_default_table_expiration.update_dataset_default_table_expiration( + client, dataset_id, one_day_ms + ) + out, err = capsys.readouterr() + assert ( + "Updated dataset {} with new expiration {}".format(dataset_id, one_day_ms) + in out + ) diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py new file mode 100644 index 000000000000..c6f8889f50da --- /dev/null +++ b/bigquery/samples/tests/test_update_dataset_description.py @@ -0,0 +1,22 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import update_dataset_description + + +def test_update_dataset_description(capsys, client, dataset_id): + + update_dataset_description.update_dataset_description(client, dataset_id) + out, err = capsys.readouterr() + assert "Updated description." in out diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py new file mode 100644 index 000000000000..aa316a38dff9 --- /dev/null +++ b/bigquery/samples/update_dataset_access.py @@ -0,0 +1,45 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_access(client, dataset_id): + + # [START bigquery_update_dataset_access] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) + + entry = bigquery.AccessEntry( + role="READER", + entity_type="userByEmail", + entity_id="sample.bigquery.dev@gmail.com", + ) + + entries = list(dataset.access_entries) + entries.append(entry) + dataset.access_entries = entries + + dataset = client.update_dataset(dataset, ["access_entries"]) # API request + + full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) + print( + "Updated dataset '{}' with modified user permissions.".format(full_dataset_id) + ) + # [END bigquery_update_dataset_access] diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py new file mode 100644 index 000000000000..a5ac38c01a99 --- /dev/null +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -0,0 +1,40 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_default_table_expiration(client, dataset_id): + + # [START bigquery_update_dataset_expiration] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) + dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds + + dataset = client.update_dataset( + dataset, ["default_table_expiration_ms"] + ) # API request + + full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) + print( + "Updated dataset {} with new expiration {}".format( + full_dataset_id, dataset.default_table_expiration_ms + ) + ) + # [END bigquery_update_dataset_expiration] diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py new file mode 100644 index 000000000000..70be80b7507e --- /dev/null +++ b/bigquery/samples/update_dataset_description.py @@ -0,0 +1,37 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_description(client, dataset_id): + + # [START bigquery_update_dataset_description] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) + dataset.description = "Updated description." + dataset = client.update_dataset(dataset, ["description"]) + + full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) + print( + "Updated dataset '{}' with description '{}'.".format( + full_dataset_id, dataset.description + ) + ) + # [END bigquery_update_dataset_description]