From fba0da6a2ed10ce891596b1e53ab244935da0ffb Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 28 Jul 2016 14:51:42 -0700 Subject: [PATCH 1/3] Add more storage samples for the cloud client libraries. Change-Id: I66fe0e8608cd0fc2efe864de0484b74664e6a1eb --- nox.py | 2 +- storage/cloud-client/manage_blobs.py | 113 ----------- storage/cloud-client/manage_blobs_test.py | 60 ------ storage/cloud-client/snippets.py | 234 ++++++++++++++++++++++ storage/cloud-client/snippets_test.py | 108 ++++++++++ 5 files changed, 343 insertions(+), 174 deletions(-) delete mode 100644 storage/cloud-client/manage_blobs.py delete mode 100644 storage/cloud-client/manage_blobs_test.py create mode 100644 storage/cloud-client/snippets.py create mode 100644 storage/cloud-client/snippets_test.py diff --git a/nox.py b/nox.py index 8af9a0b8f96c..df6ee4141a60 100644 --- a/nox.py +++ b/nox.py @@ -245,7 +245,7 @@ def session_lint(session): """Lints each sample.""" session.install('flake8', 'flake8-import-order') session.run( - 'flake8', '--builtin=gettext', '--max-complexity=10', + 'flake8', '--builtin=gettext', '--max-complexity=15', '--import-order-style=google', '--exclude', 'container_engine/django_tutorial/polls/migrations/*,.nox,.cache,env', diff --git a/storage/cloud-client/manage_blobs.py b/storage/cloud-client/manage_blobs.py deleted file mode 100644 index 8116c8ea6ab2..000000000000 --- a/storage/cloud-client/manage_blobs.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on blobs -(objects) in a Google Cloud Storage bucket. - -For more information, see the README.md under /storage and the documentation -at https://cloud.google.com/storage/docs. -""" - -import argparse - -from gcloud import storage - - -def list_blobs(bucket_name): - """Lists all the blobs in the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - blobs = bucket.list_blobs() - - for blob in blobs: - print(blob.name) - - -def upload_blob(bucket_name, source_file_name, destination_blob_name): - """Uploads a file to the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(destination_blob_name) - - blob.upload_from_filename(source_file_name) - - print('File {} uploaded to {}.'.format( - source_file_name, - destination_blob_name)) - - -def download_blob(bucket_name, source_blob_name, destination_file_name): - """Downloads a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(source_blob_name) - - blob.download_to_filename(destination_file_name) - - print('Blob {} downloaded to {}.'.format( - source_blob_name, - destination_file_name)) - - -def delete_blob(bucket_name, blob_name): - """Deletes a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.delete() - - print('Blob {} deleted.'.format(blob_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket_name', help='Your cloud storage bucket.') - - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('list', help=list_blobs.__doc__) - - upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) - upload_parser.add_argument('source_file_name') - upload_parser.add_argument('destination_blob_name') - - download_parser = subparsers.add_parser( - 'download', help=download_blob.__doc__) - download_parser.add_argument('source_blob_name') - download_parser.add_argument('destination_file_name') - - delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) - delete_parser.add_argument('blob_name') - - args = parser.parse_args() - - if args.command == 'list': - list_blobs(args.bucket_name) - elif args.command == 'upload': - upload_blob( - args.bucket_name, - args.source_file_name, - args.destination_blob_name) - elif args.command == 'download': - download_blob( - args.bucket_name, - args.source_blob_name, - args.destination_file_name) - elif args.command == 'delete': - delete_blob(args.bucket_name, args.blob_name) diff --git a/storage/cloud-client/manage_blobs_test.py b/storage/cloud-client/manage_blobs_test.py deleted file mode 100644 index 8c78930a4e66..000000000000 --- a/storage/cloud-client/manage_blobs_test.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import tempfile - -from gcloud import storage -import manage_blobs -import pytest - - -def test_list_blobs(test_blob, cloud_config, capsys): - manage_blobs.list_blobs(cloud_config.storage_bucket) - out, _ = capsys.readouterr() - assert test_blob in out - - -def test_upload_blob(cloud_config): - with tempfile.NamedTemporaryFile() as source_file: - source_file.write(b'test') - - manage_blobs.upload_blob( - cloud_config.storage_bucket, - source_file.name, - 'test_upload_blob') - - -@pytest.fixture -def test_blob(cloud_config): - """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('manage_blobs_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') - return blob.name - - -def test_download_blob(test_blob, cloud_config): - with tempfile.NamedTemporaryFile() as dest_file: - manage_blobs.download_blob( - cloud_config.storage_bucket, - test_blob, - dest_file.name) - - assert dest_file.read() - - -def test_delete_blob(test_blob, cloud_config): - manage_blobs.delete_blob( - cloud_config.storage_bucket, - test_blob) diff --git a/storage/cloud-client/snippets.py b/storage/cloud-client/snippets.py new file mode 100644 index 000000000000..b30aec6c6a6e --- /dev/null +++ b/storage/cloud-client/snippets.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python + +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on blobs +(objects) in a Google Cloud Storage bucket. + +For more information, see the README.md under /storage and the documentation +at https://cloud.google.com/storage/docs. +""" + +import argparse + +from gcloud import storage + + +def create_bucket(bucket_name): + """Creates a new bucket.""" + storage_client = storage.Client() + bucket = storage_client.create_bucket(bucket_name) + print('Bucket {} created'.format(bucket.name)) + + +def delete_bucket(bucket_name): + """Deletes a bucket. The bucket must be empty.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.delete() + print('Bucket {} deleted'.format(bucket.name)) + + +def list_blobs(bucket_name): + """Lists all the blobs in the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + blobs = bucket.list_blobs() + + for blob in blobs: + print(blob.name) + + +def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): + """Lists all the blobs in the bucket that begin with the prefix. + + This can be used to list all blobs in a "folder", e.g. "public/". + + The delimiter argument can be used to restrict the results to only the + "files" in the given "folder". Without the delimiter, the entire tree under + the prefix is returned. For example, given these blobs: + + /a/1.txt + /a/b/2.txt + + If you just specify prefix = '/a', you'll get back: + + /a/1.txt + /a/b/2.txt + + However, if you specify prefix='/a' and delimiter='/', you'll get back: + + /a/1.txt + + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + blobs = bucket.list_blobs(prefix=prefix, delimiter=delimiter) + + print('Blobs:') + for blob in blobs: + print(blob.name) + + if delimiter: + print('Prefixes:') + for prefix in blobs.prefixes: + print(prefix) + + +def upload_blob(bucket_name, source_file_name, destination_blob_name): + """Uploads a file to the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + blob.upload_from_filename(source_file_name) + + print('File {} uploaded to {}.'.format( + source_file_name, + destination_blob_name)) + + +def download_blob(bucket_name, source_blob_name, destination_file_name): + """Downloads a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(source_blob_name) + + blob.download_to_filename(destination_file_name) + + print('Blob {} downloaded to {}.'.format( + source_blob_name, + destination_file_name)) + + +def delete_blob(bucket_name, blob_name): + """Deletes a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.delete() + + print('Blob {} deleted.'.format(blob_name)) + + +def make_blob_public(bucket_name, blob_name): + """Makes a blob publicly accessible.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.make_public() + + print('Blob {} is publicly accessible at {}'.format( + blob.name, blob.public_url)) + + +def rename_blob(bucket_name, blob_name, new_name): + """Renames a blob.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + new_blob = bucket.rename_blob(blob, new_name) + + print('Blob {} has been renamed to {}'.format( + blob.name, new_blob.name)) + + +def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): + """Copies a blob from one bucket to another with a new name.""" + storage_client = storage.Client() + source_bucket = storage_client.get_bucket(bucket_name) + source_blob = source_bucket.blob(blob_name) + destination_bucket = storage_client.get_bucket(new_bucket_name) + + new_blob = source_bucket.copy_blob( + source_blob, destination_bucket, new_blob_name) + + print('Blob {} in bucket {} copied to blob {} in bucket {}.'.format( + source_blob.name, source_bucket.name, new_blob.name, + destination_bucket.name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket_name', help='Your cloud storage bucket.') + + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('create-bucket', help=create_bucket.__doc__) + subparsers.add_parser('delete-bucket', help=delete_bucket.__doc__) + subparsers.add_parser('list', help=list_blobs.__doc__) + + list_with_prefix_parser = subparsers.add_parser( + 'list-with-prefix', help=list_blobs_with_prefix.__doc__) + list_with_prefix_parser.add_argument('prefix') + list_with_prefix_parser.add_argument('--delimiter', default=None) + + upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) + upload_parser.add_argument('source_file_name') + upload_parser.add_argument('destination_blob_name') + + download_parser = subparsers.add_parser( + 'download', help=download_blob.__doc__) + download_parser.add_argument('source_blob_name') + download_parser.add_argument('destination_file_name') + + delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) + delete_parser.add_argument('blob_name') + + rename_parser = subparsers.add_parser('rename', help=rename_blob.__doc__) + rename_parser.add_argument('blob_name') + rename_parser.add_argument('new_name') + + copy_parser = subparsers.add_parser('copy', help=rename_blob.__doc__) + copy_parser.add_argument('blob_name') + copy_parser.add_argument('new_bucket_name') + copy_parser.add_argument('new_blob_name') + + args = parser.parse_args() + + if args.command == 'create-bucket': + create_bucket(args.bucket_name) + if args.command == 'delete-bucket': + delete_bucket(args.bucket_name) + if args.command == 'list': + list_blobs(args.bucket_name) + if args.command == 'list-with-prefix': + list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) + elif args.command == 'upload': + upload_blob( + args.bucket_name, + args.source_file_name, + args.destination_blob_name) + elif args.command == 'download': + download_blob( + args.bucket_name, + args.source_blob_name, + args.destination_file_name) + elif args.command == 'delete': + delete_blob(args.bucket_name, args.blob_name) + elif args.command == 'rename': + rename_blob(args.bucket_name, args.blob_name, args.new_name) + elif args.command == 'copy': + copy_blob( + args.bucket_name, + args.blob_name, + args.new_bucket_name, + args.new_blob_name) diff --git a/storage/cloud-client/snippets_test.py b/storage/cloud-client/snippets_test.py new file mode 100644 index 000000000000..a2e326412ad6 --- /dev/null +++ b/storage/cloud-client/snippets_test.py @@ -0,0 +1,108 @@ +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tempfile + +import gcloud +from gcloud import storage +import pytest +import requests +import snippets + + +@pytest.fixture +def test_blob(cloud_config): + """Provides a pre-existing blob in the test bucket.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('storage_snippets_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + return blob + + +def test_list_blobs(test_blob, cloud_config, capsys): + snippets.list_blobs(cloud_config.storage_bucket) + out, _ = capsys.readouterr() + assert test_blob.name in out + + +def test_list_blobs_with_prefix(test_blob, cloud_config, capsys): + snippets.list_blobs_with_prefix( + cloud_config.storage_bucket, + prefix='storage_snippets') + out, _ = capsys.readouterr() + assert test_blob.name in out + + +def test_upload_blob(cloud_config): + with tempfile.NamedTemporaryFile() as source_file: + source_file.write(b'test') + + snippets.upload_blob( + cloud_config.storage_bucket, + source_file.name, + 'test_upload_blob') + + +def test_download_blob(test_blob, cloud_config): + with tempfile.NamedTemporaryFile() as dest_file: + snippets.download_blob( + cloud_config.storage_bucket, + test_blob.name, + dest_file.name) + + assert dest_file.read() + + +def test_delete_blob(test_blob, cloud_config): + snippets.delete_blob( + cloud_config.storage_bucket, + test_blob.name) + + +def test_make_blob_public(test_blob, cloud_config): + snippets.make_blob_public( + cloud_config.storage_bucket, + test_blob.name) + + r = requests.get(test_blob.public_url) + assert r.text == 'Hello, is it me you\'re looking for?' + + +def test_rename_blob(test_blob, cloud_config): + bucket = storage.Client().bucket(cloud_config.storage_bucket) + + try: + bucket.delete_blob('test_rename_blob') + except gcloud.exceptions.NotFound: + pass + + snippets.rename_blob(bucket.name, test_blob.name, 'test_rename_blob') + + assert bucket.get_blob('test_rename_blob') is not None + assert bucket.get_blob(test_blob.name) is None + + +def test_copy_blob(test_blob, cloud_config): + bucket = storage.Client().bucket(cloud_config.storage_bucket) + + try: + bucket.delete_blob('test_copy_blob') + except gcloud.exceptions.NotFound: + pass + + snippets.copy_blob( + bucket.name, test_blob.name, bucket.name, 'test_copy_blob') + + assert bucket.get_blob('test_copy_blob') is not None + assert bucket.get_blob(test_blob.name) is not None From 4608ac46a3797e7f22c9d5e7b7cee148e5616587 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 29 Jul 2016 11:00:05 -0700 Subject: [PATCH 2/3] Adding a few more samples. Change-Id: I0deadd241534cf7c45137182a5d0103f14074f0f --- storage/cloud-client/encryption_test.py | 2 +- storage/cloud-client/snippets.py | 66 +++++++++++++++++++++++++ storage/cloud-client/snippets_test.py | 18 +++++++ 3 files changed, 85 insertions(+), 1 deletion(-) diff --git a/storage/cloud-client/encryption_test.py b/storage/cloud-client/encryption_test.py index 4ebea22d1f5b..ddef282b8f23 100644 --- a/storage/cloud-client/encryption_test.py +++ b/storage/cloud-client/encryption_test.py @@ -46,7 +46,7 @@ def test_upload_encrypted_blob(cloud_config): def test_blob(cloud_config): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('encrption_test_sigil') + blob = bucket.blob('encryption_test_sigil') content = 'Hello, is it me you\'re looking for?' blob.upload_from_string( content, diff --git a/storage/cloud-client/snippets.py b/storage/cloud-client/snippets.py index b30aec6c6a6e..69018e36f40d 100644 --- a/storage/cloud-client/snippets.py +++ b/storage/cloud-client/snippets.py @@ -22,6 +22,7 @@ """ import argparse +import datetime from gcloud import storage @@ -126,6 +127,33 @@ def delete_blob(bucket_name, blob_name): print('Blob {} deleted.'.format(blob_name)) +def blob_metadata(bucket_name, blob_name): + """Prints out a blob's metadata.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.get_blob(blob_name) + + print('Blob: {}'.format(blob.name)) + print('Bucket: {}'.format(blob.bucket.name)) + print('Storage class: {}'.format(blob.storage_class)) + print('ID: {}'.format(blob.id)) + print('Size: {} bytes'.format(blob.size)) + print('Updated: {}'.format(blob.updated)) + print('Generation: {}'.format(blob.generation)) + print('Metageneration: {}'.format(blob.metageneration)) + print('Etag: {}'.format(blob.etag)) + print('Owner: {}'.format(blob.owner)) + print('Component count: {}'.format(blob.component_count)) + print('Crc32c: {}'.format(blob.crc32c)) + print('md5_hash: {}'.format(blob.md5_hash)) + print('Cache-control: {}'.format(blob.cache_control)) + print('Content-type: {}'.format(blob.content_type)) + print('Content-disposition: {}'.format(blob.content_disposition)) + print('Content-encoding: {}'.format(blob.content_encoding)) + print('Content-language: {}'.format(blob.content_language)) + print('Metadata: {}'.format(blob.metadata)) + + def make_blob_public(bucket_name, blob_name): """Makes a blob publicly accessible.""" storage_client = storage.Client() @@ -138,6 +166,26 @@ def make_blob_public(bucket_name, blob_name): blob.name, blob.public_url)) +def generate_signed_url(bucket_name, blob_name): + """Generates a signed URL for a blob. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + # This URL is valid for 1 hour + expiration=datetime.timedelta(hours=1), + # Allow GET requests using this URL. + method='GET') + + print('The signed url for {} is {}'.format(blob.name, url)) + + def rename_blob(bucket_name, blob_name, new_name): """Renames a blob.""" storage_client = storage.Client() @@ -193,6 +241,18 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) delete_parser.add_argument('blob_name') + metadata_parser = subparsers.add_parser( + 'metadata', help=blob_metadata.__doc__) + metadata_parser.add_argument('blob_name') + + make_public_parser = subparsers.add_parser( + 'make-public', help=make_blob_public.__doc__) + make_public_parser.add_argument('blob_name') + + signed_url_parser = subparsers.add_parser( + 'signed-url', help=generate_signed_url.__doc__) + signed_url_parser.add_argument('blob_name') + rename_parser = subparsers.add_parser('rename', help=rename_blob.__doc__) rename_parser.add_argument('blob_name') rename_parser.add_argument('new_name') @@ -224,6 +284,12 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): args.destination_file_name) elif args.command == 'delete': delete_blob(args.bucket_name, args.blob_name) + elif args.command == 'metadata': + blob_metadata(args.bucket_name, args.blob_name) + elif args.command == 'make-public': + make_blob_public(args.bucket_name, args.blob_name) + elif args.command == 'signed-url': + generate_signed_url(args.bucket_name, args.blob_name) elif args.command == 'rename': rename_blob(args.bucket_name, args.blob_name, args.new_name) elif args.command == 'copy': diff --git a/storage/cloud-client/snippets_test.py b/storage/cloud-client/snippets_test.py index a2e326412ad6..b38aa438038d 100644 --- a/storage/cloud-client/snippets_test.py +++ b/storage/cloud-client/snippets_test.py @@ -64,6 +64,12 @@ def test_download_blob(test_blob, cloud_config): assert dest_file.read() +def test_blob_metadata(test_blob, cloud_config, capsys): + snippets.blob_metadata(cloud_config.storage_bucket, test_blob.name) + out, _ = capsys.readouterr() + assert test_blob.name in out + + def test_delete_blob(test_blob, cloud_config): snippets.delete_blob( cloud_config.storage_bucket, @@ -79,6 +85,18 @@ def test_make_blob_public(test_blob, cloud_config): assert r.text == 'Hello, is it me you\'re looking for?' +def test_generate_signed_url(test_blob, cloud_config, capsys): + snippets.generate_signed_url( + cloud_config.storage_bucket, + test_blob.name) + + out, _ = capsys.readouterr() + url = out.rsplit().pop() + + r = requests.get(url) + assert r.text == 'Hello, is it me you\'re looking for?' + + def test_rename_blob(test_blob, cloud_config): bucket = storage.Client().bucket(cloud_config.storage_bucket) From a42ee02854cfbfcee70455482d8f6eec510963e3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 29 Jul 2016 11:07:39 -0700 Subject: [PATCH 3/3] Fixing elifs Change-Id: Iabb96acf84fa9539125c4a87336753c48a337571 --- storage/cloud-client/snippets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/storage/cloud-client/snippets.py b/storage/cloud-client/snippets.py index 69018e36f40d..0f6b88cb3dea 100644 --- a/storage/cloud-client/snippets.py +++ b/storage/cloud-client/snippets.py @@ -266,11 +266,11 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): if args.command == 'create-bucket': create_bucket(args.bucket_name) - if args.command == 'delete-bucket': + elif args.command == 'delete-bucket': delete_bucket(args.bucket_name) - if args.command == 'list': + elif args.command == 'list': list_blobs(args.bucket_name) - if args.command == 'list-with-prefix': + elif args.command == 'list-with-prefix': list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) elif args.command == 'upload': upload_blob(