Skip to content

Commit

Permalink
Merge pull request #57 from epam/issue_11-pipe-cli-gcs-support
Browse files Browse the repository at this point in the history
Support GCP in pipe cli
  • Loading branch information
mzueva authored Apr 4, 2019
2 parents 7bf1a79 + 3ea1a8c commit 590e7c6
Show file tree
Hide file tree
Showing 16 changed files with 679 additions and 105 deletions.
1 change: 1 addition & 0 deletions pipe-cli/build_linux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ python2 $PYINSTALLER_PATH/pyinstaller/pyinstaller.py \
--hidden-import=functools \
--hidden-import=re \
--hidden-import=subprocess \
--additional-hooks-dir="$PIPE_CLI_SOURCES_DIR/hooks" \
-y \
--clean \
--runtime-tmpdir $PIPE_CLI_RUNTIME_TMP_DIR \
Expand Down
1 change: 1 addition & 0 deletions pipe-cli/build_windows.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ pyinstaller --add-data "/src/res/effective_tld_names.dat.txt;tld/res/" \
--hidden-import=functools \
--hidden-import=re \
--hidden-import=subprocess \
--additional-hooks-dir="$PIPE_CLI_SOURCES_DIR/hooks" \
-y \
--clean \
--workpath /tmp \
Expand Down
2 changes: 2 additions & 0 deletions pipe-cli/hooks/hook-google.resumable_media.requests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from PyInstaller.utils.hooks import copy_metadata
datas = copy_metadata('requests')
4 changes: 2 additions & 2 deletions pipe-cli/pipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,9 +665,9 @@ def storage():
@click.option('-c', '--on_cloud',
prompt='Do you want to create this storage on a cloud?',
help='Create bucket on a cloud', default=False, is_flag=True)
@click.option('-p', '--path', required=False, default='', help='The name of the new bucket.',
@click.option('-p', '--path', default='', help='The name of the new bucket.',
prompt='The name of the new bucket.')
@click.option('-r', '--region_id', required=False, type=int, help='Cloud region id where storage shall be created.',
@click.option('-r', '--region_id', default='default', help='Cloud region id where storage shall be created. ',
prompt='Cloud region id where storage shall be created.')
def create(name, description, short_term_storage, long_term_storage, versioning, backup_duration, type,
parent_folder, on_cloud, path, region_id):
Expand Down
1 change: 1 addition & 0 deletions pipe-cli/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ PyJWT==1.6.1
pypac==0.8.1
beautifulsoup4==4.6.1
azure-storage-blob==1.5.0
google-cloud-storage==1.14.0
setuptools
3 changes: 2 additions & 1 deletion pipe-cli/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@
'PyJWT==1.6.1',
'pypac==0.8.1',
'beautifulsoup4==4.6.1',
'azure-storage-blob==1.5.0'
'azure-storage-blob==1.5.0',
'google-cloud-storage==1.14.0'
],
entry_points='''
[console_scripts]
Expand Down
2 changes: 0 additions & 2 deletions pipe-cli/src/api/data_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
install_aliases()

from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError

import json
from src.model.data_storage_tmp_credentials_model import TemporaryCredentialsModel
Expand Down
1 change: 1 addition & 0 deletions pipe-cli/src/model/data_storage_item_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def __init__(self):
self.versions = []
self.latest = False
self.delete_marker = False
self.deleted = None

@classmethod
def load(cls, json):
Expand Down
2 changes: 1 addition & 1 deletion pipe-cli/src/model/data_storage_tmp_credentials_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ def load(cls, json):
instance.secret_key = json['accessKey']
instance.session_token = json['token']
instance.expiration = json['expiration']
instance.region = json['region']
instance.region = json['region'] if 'region' in json else None
return instance
78 changes: 56 additions & 22 deletions pipe-cli/src/model/data_storage_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from ..utilities.storage.s3 import S3BucketOperations
from ..utilities.storage.local import LocalOperations
from ..utilities.storage.azure import AzureListingManager, AzureDeleteManager, AzureBucketOperations
from ..utilities.storage.gs import GsRestoreManager, GsListingManager, GsDeleteManager, GsBucketOperations
from ..utilities.storage.common import StorageOperations
from .data_storage_wrapper_type import WrapperType
import shutil
Expand All @@ -55,6 +56,12 @@ class DataStorageWrapper(object):
(WrapperType.FTP, WrapperType.AZURE): AzureBucketOperations.get_transfer_from_http_or_ftp_manager,
(WrapperType.HTTP, WrapperType.AZURE): AzureBucketOperations.get_transfer_from_http_or_ftp_manager,

(WrapperType.GS, WrapperType.GS): GsBucketOperations.get_transfer_between_buckets_manager,
(WrapperType.GS, WrapperType.LOCAL): GsBucketOperations.get_download_manager,
(WrapperType.LOCAL, WrapperType.GS): GsBucketOperations.get_upload_manager,
(WrapperType.FTP, WrapperType.GS): GsBucketOperations.get_transfer_from_http_or_ftp_manager,
(WrapperType.HTTP, WrapperType.GS): GsBucketOperations.get_transfer_from_http_or_ftp_manager,

(WrapperType.FTP, WrapperType.LOCAL): LocalOperations.get_transfer_from_http_or_ftp_manager,
(WrapperType.HTTP, WrapperType.LOCAL): LocalOperations.get_transfer_from_http_or_ftp_manager
}
Expand Down Expand Up @@ -90,7 +97,8 @@ def get_cloud_wrapper_for_bucket(cls, bucket_model, relative_path):
def __get_storage_wrapper(cls, bucket, relative_path, *args, **kwargs):
_suppliers = {
WrapperType.S3: S3BucketWrapper.build_wrapper,
WrapperType.AZURE: AzureBucketWrapper.build_wrapper
WrapperType.AZURE: AzureBucketWrapper.build_wrapper,
WrapperType.GS: GsBucketWrapper.build_wrapper,
}
if bucket.type in _suppliers:
supplier = _suppliers[bucket.type]
Expand Down Expand Up @@ -214,12 +222,31 @@ def is_file(self):
def exists(self):
return self.exists_flag

def get_items(self):
return self.get_list_manager().get_items(self.path)

def is_empty(self, relative=None):
if not self.exists():
return True
if self.is_file():
return False
if relative:
delimiter = StorageOperations.PATH_SEPARATOR
path = self.path.rstrip(delimiter) + delimiter + relative
else:
path = self.path
return not self.get_list_manager().folder_exists(path)

@abstractmethod
def get_type(self):
pass

@abstractmethod
def get_restore_manager(self):
pass

@abstractmethod
def get_list_manager(self, show_versions):
def get_list_manager(self, show_versions=False):
pass

@abstractmethod
Expand Down Expand Up @@ -253,9 +280,6 @@ def is_empty(self, relative=None):
return not S3BucketOperations.path_exists(self, relative, session=self.session)
return self.is_empty_flag

def get_items(self):
return S3BucketOperations.get_items(self, session=self.session)

def get_file_download_uri(self, relative_path):
download_url_model = None
try:
Expand All @@ -278,7 +302,7 @@ def delete_item(self, relative_path):
def get_restore_manager(self):
return S3BucketOperations.get_restore_manager(self)

def get_list_manager(self, show_versions):
def get_list_manager(self, show_versions=False):
return S3BucketOperations.get_list_manager(self, show_versions=show_versions)

def get_delete_manager(self, versioning):
Expand All @@ -297,27 +321,12 @@ def build_wrapper(cls, root_bucket, relative_path, versioning=False, init=True):
raise RuntimeError('Versioning is not supported by AZURE cloud provider')
wrapper = AzureBucketWrapper(root_bucket, relative_path)
if init:
AzureBucketOperations.init_wrapper(wrapper)
StorageOperations.init_wrapper(wrapper, versioning=versioning)
return wrapper

def get_type(self):
return WrapperType.AZURE

def is_empty(self, relative=None):
if not self.exists():
return True
if self.is_file():
return False
if relative:
delimiter = StorageOperations.PATH_SEPARATOR
path = self.path.rstrip(delimiter) + delimiter + relative
else:
path = self.path
return not self.get_list_manager().folder_exists(path)

def get_items(self):
return self.get_list_manager().get_items(self.path)

def get_restore_manager(self):
raise RuntimeError('Versioning is not supported by AZURE cloud provider')

Expand All @@ -337,6 +346,31 @@ def _blob_service(self, read, write):
return self.service


class GsBucketWrapper(CloudDataStorageWrapper):

@classmethod
def build_wrapper(cls, root_bucket, relative_path, init=True, *args, **kwargs):
wrapper = GsBucketWrapper(root_bucket, relative_path)
if init:
StorageOperations.init_wrapper(wrapper, *args, **kwargs)
return wrapper

def get_type(self):
return WrapperType.GS

def get_restore_manager(self):
return GsRestoreManager(self._storage_client(write=True), self)

def get_list_manager(self, show_versions=False):
return GsListingManager(self._storage_client(), self.bucket, show_versions)

def get_delete_manager(self, versioning):
return GsDeleteManager(self._storage_client(write=True), self.bucket)

def _storage_client(self, read=True, write=False):
return GsBucketOperations.get_client(self.bucket, read=read, write=write)


class LocalFileSystemWrapper(DataStorageWrapper):

def __init__(self, path):
Expand Down
8 changes: 5 additions & 3 deletions pipe-cli/src/model/data_storage_wrapper_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,17 @@ class WrapperType(object):
LOCAL = 'LOCAL'
S3 = 'S3'
AZURE = 'AZ'
GS = 'GS'
FTP = 'FTP'
HTTP = 'HTTP'

__cloud_types = [S3, AZURE]
__cloud_types = [S3, AZURE, GS]
__dynamic_cloud_scheme = 'cp'
__s3_cloud_scheme = 's3'
__azure_cloud_scheme = 'az'
__cloud_schemes = [__dynamic_cloud_scheme, __s3_cloud_scheme, __azure_cloud_scheme]
__cloud_schemes_map = {S3: __s3_cloud_scheme, AZURE: __azure_cloud_scheme}
__gs_cloud_scheme = 'gs'
__cloud_schemes = [__dynamic_cloud_scheme, __s3_cloud_scheme, __azure_cloud_scheme, __gs_cloud_scheme]
__cloud_schemes_map = {S3: __s3_cloud_scheme, AZURE: __azure_cloud_scheme, GS: __gs_cloud_scheme}

@classmethod
def cloud_types(cls):
Expand Down
27 changes: 20 additions & 7 deletions pipe-cli/src/utilities/datastorage_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def storage_remove_item(cls, path, yes, version, hard_delete, recursive, exclude
if version and hard_delete:
click.echo('"version" argument should\'t be combined with "hard-delete" option', err=True)
sys.exit(1)
source_wrapper = DataStorageWrapper.get_cloud_wrapper(path, versioning=version or hard_delete)
source_wrapper = DataStorageWrapper.get_cloud_wrapper(path, versioning=version is not None or hard_delete)
if source_wrapper is None or not source_wrapper.exists():
click.echo('Storage path "{}" was not found'.format(path), err=True)
sys.exit(1)
Expand Down Expand Up @@ -154,6 +154,14 @@ def save_data_storage(cls, name, description, sts_duration, lts_duration, versio
click.echo("Error: Directory with name '{}' not found! "
"Check if it exists and you have permission to read it".format(parent_folder), err=True)
sys.exit(1)
if region_id == 'default':
region_id = None
else:
try:
region_id = int(region_id)
except ValueError:
click.echo("Error: Given region id '{}' is not a number.".format(region_id))
sys.exit(1)
try:
DataStorage.save(name, path, description, sts_duration, lts_duration, versioning, backup_duration, type,
directory.id if directory else None, on_cloud, region_id)
Expand Down Expand Up @@ -384,23 +392,28 @@ def __print_data_storage_contents(cls, bucket_model, relative_path,
labels = ''
if item.type is not None and item.type in WrapperType.cloud_types():
name = item.path
if item.changed is not None:
item_updated = item.deleted or item.changed
if item_updated is not None:
if bucket_model is None:
# need to wrap into datetime since bucket listing returns str
item_datetime = datetime.datetime.strptime(item.changed, '%Y-%m-%d %H:%M:%S')
item_datetime = datetime.datetime.strptime(item_updated, '%Y-%m-%d %H:%M:%S')
else:
item_datetime = item.changed
item_datetime = item_updated
changed = item_datetime.strftime('%Y-%m-%d %H:%M:%S')
if item.size is not None:
if item.size is not None and not item.deleted:
size = item.size
if item.labels is not None and len(item.labels) > 0:
if item.labels is not None and len(item.labels) > 0 and not item.deleted:
labels = ', '.join(map(lambda i: i.value, item.labels))
item_type = "-File" if item.delete_marker else item.type
item_type = "-File" if item.delete_marker or item.deleted else item.type
row = [item_type, labels, changed, size, name]
if show_versions:
row.append('')
items_table.add_row(row)
if show_versions and item.type == 'File':
if item.deleted:
# Additional synthetic delete version
row = ['-File', '', item.deleted.strftime('%Y-%m-%d %H:%M:%S'), size, name, '- (latest)']
items_table.add_row(row)
for version in item.versions:
version_type = "-File" if version.delete_marker else "+File"
version_label = "{} (latest)".format(version.version) if version.latest else version.version
Expand Down
Loading

0 comments on commit 590e7c6

Please sign in to comment.