Skip to content

Commit

Permalink
fix: remove circular dependency by moving parse_s3 method to its own …
Browse files Browse the repository at this point in the history
…util file (#5430)

* fix: remove circular dependency by moving parse_s3 method to its own util file

* add missing unit tests file
  • Loading branch information
mndeveci authored Jun 29, 2023
1 parent 743d389 commit 3952ff6
Show file tree
Hide file tree
Showing 12 changed files with 146 additions and 133 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from samcli.lib.providers.sam_function_provider import SamFunctionProvider
from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider
from samcli.lib.utils.packagetype import IMAGE
from samcli.lib.utils.s3 import parse_s3_url

# pylint: disable=E0401
if typing.TYPE_CHECKING: # pragma: no cover
Expand Down Expand Up @@ -112,7 +113,7 @@ def update_companion_stack(self) -> None:
self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix, no_progressbar=True
)
# TemplateUrl property requires S3 URL to be in path-style format
parts = S3Uploader.parse_s3_url(
parts = parse_s3_url(
s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version"
)

Expand Down
5 changes: 2 additions & 3 deletions samcli/lib/deploy/deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name, mktempfile
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.utils.colors import Colored, Colors
from samcli.lib.utils.s3 import parse_s3_url
from samcli.lib.utils.time import utc_to_timestamp

LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -203,9 +204,7 @@ def _process_kwargs(
temporary_file.flush()
remote_path = get_uploaded_s3_object_name(file_path=temporary_file.name, extension="template")
# TemplateUrl property requires S3 URL to be in path-style format
parts = S3Uploader.parse_s3_url(
s3_uploader.upload(temporary_file.name, remote_path), version_property="Version"
)
parts = parse_s3_url(s3_uploader.upload(temporary_file.name, remote_path), version_property="Version")
kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None))

# don't set these arguments if not specified to use existing values
Expand Down
6 changes: 3 additions & 3 deletions samcli/lib/package/artifact_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
ECRResource,
ResourceZip,
)
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.package.uploaders import Destination, Uploaders
from samcli.lib.package.utils import (
is_local_file,
Expand All @@ -47,6 +46,7 @@
AWS_SERVERLESS_FUNCTION,
RESOURCES_WITH_LOCAL_PATHS,
)
from samcli.lib.utils.s3 import parse_s3_url
from samcli.yamlhelper import yaml_dump, yaml_parse

# NOTE: sriram-mv, A cyclic dependency on `Template` needs to be broken.
Expand Down Expand Up @@ -99,7 +99,7 @@ def do_export(self, resource_id, resource_dict, parent_dir):
url = self.uploader.upload(temporary_file.name, remote_path)

# TemplateUrl property requires S3 URL to be in path-style format
parts = S3Uploader.parse_s3_url(url, version_property="Version")
parts = parse_s3_url(url, version_property="Version")
s3_path_url = self.uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None))
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url)

Expand Down Expand Up @@ -146,7 +146,7 @@ def do_export(self, resource_id, resource_dict, parent_dir):
url = self.uploader.upload(abs_template_path, remote_path)

# TemplateUrl property requires S3 URL to be in path-style format
parts = S3Uploader.parse_s3_url(url, version_property="Version")
parts = parse_s3_url(url, version_property="Version")
s3_path_url = self.uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None))
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url)

Expand Down
4 changes: 2 additions & 2 deletions samcli/lib/package/code_signer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import logging

from samcli.commands.exceptions import UserException
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.utils.s3 import parse_s3_url

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -60,7 +60,7 @@ def sign_package(self, resource_id, s3_url, s3_version):
profile_owner = signing_profile_for_resource["profile_owner"]

# parse given s3 url, and extract bucket and object key
parsed_s3_url = S3Uploader.parse_s3_url(s3_url)
parsed_s3_url = parse_s3_url(s3_url)
s3_bucket = parsed_s3_url["Bucket"]
s3_key = parsed_s3_url["Key"]
s3_target_prefix = s3_key.rsplit("/", 1)[0] + "/signed_"
Expand Down
5 changes: 3 additions & 2 deletions samcli/lib/package/packageable_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
RESOURCES_WITH_IMAGE_COMPONENT,
RESOURCES_WITH_LOCAL_PATHS,
)
from samcli.lib.utils.s3 import parse_s3_url

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -196,7 +197,7 @@ def get_property_value(self, resource_dict):
# artifact, as deletion of intrinsic ref function artifacts is not supported yet.
# TODO: Allow deletion of S3 artifacts with intrinsic ref functions.
if resource_path and isinstance(resource_path, str):
return self.uploader.parse_s3_url(resource_path)
return parse_s3_url(resource_path)
return {"Bucket": None, "Key": None}


Expand Down Expand Up @@ -340,7 +341,7 @@ def do_export(self, resource_id, resource_dict, parent_dir):
self.RESOURCE_TYPE, resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader
)

parsed_url = S3Uploader.parse_s3_url(
parsed_url = parse_s3_url(
artifact_s3_url,
bucket_name_property=self.BUCKET_NAME_PROPERTY,
object_key_property=self.OBJECT_KEY_PROPERTY,
Expand Down
77 changes: 3 additions & 74 deletions samcli/lib/package/s3_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@
import sys
import threading
from collections import abc
from typing import Any, Dict, Optional, cast
from urllib.parse import parse_qs, urlparse
from typing import Any, Optional, cast

import botocore
import botocore.exceptions
Expand All @@ -30,6 +29,7 @@

from samcli.commands.package.exceptions import BucketNotSpecifiedError, NoSuchBucketError
from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name
from samcli.lib.utils.s3 import parse_s3_url

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -234,85 +234,14 @@ def get_version_of_artifact(self, s3_url: str) -> str:
"""
Returns version information of the S3 object that is given as S3 URL
"""
parsed_s3_url = self.parse_s3_url(s3_url)
parsed_s3_url = parse_s3_url(s3_url)
s3_bucket = parsed_s3_url["Bucket"]
s3_key = parsed_s3_url["Key"]
s3_object_tagging = self.s3.get_object_tagging(Bucket=s3_bucket, Key=s3_key)
LOG.debug("S3 Object (%s) tagging information %s", s3_url, s3_object_tagging)
s3_object_version_id = s3_object_tagging["VersionId"]
return cast(str, s3_object_version_id)

@staticmethod
def parse_s3_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
version_property: Optional[str] = None,
) -> Dict:
if isinstance(url, str) and url.startswith("s3://"):
return S3Uploader._parse_s3_format_url(
url=url,
bucket_name_property=bucket_name_property,
object_key_property=object_key_property,
version_property=version_property,
)

if isinstance(url, str) and url.startswith("https://s3"):
return S3Uploader._parse_path_style_s3_url(
url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property
)

raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))

@staticmethod
def _parse_s3_format_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
version_property: Optional[str] = None,
) -> Dict:
"""
Method for parsing s3 urls that begin with s3://
e.g. s3://bucket/key
"""
parsed = urlparse(url)
query = parse_qs(parsed.query)
if parsed.netloc and parsed.path:
result = dict()
result[bucket_name_property] = parsed.netloc
result[object_key_property] = parsed.path.lstrip("/")

# If there is a query string that has a single versionId field,
# set the object version and return
if version_property is not None and "versionId" in query and len(query["versionId"]) == 1:
result[version_property] = query["versionId"][0]

return result

raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))

@staticmethod
def _parse_path_style_s3_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
) -> Dict:
"""
Static method for parsing path style s3 urls.
e.g. https://s3.us-east-1.amazonaws.com/bucket/key
"""
parsed = urlparse(url)
result = dict()
# parsed.path would point to /bucket/key
if parsed.path:
s3_bucket_key = parsed.path.split("/", 2)[1:]

result[bucket_name_property] = s3_bucket_key[0]
result[object_key_property] = s3_bucket_key[1]

return result
raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))


class ProgressPercentage:
# This class was copied directly from S3Transfer docs
Expand Down
3 changes: 2 additions & 1 deletion samcli/lib/package/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.utils.hash import dir_checksum
from samcli.lib.utils.resources import LAMBDA_LOCAL_RESOURCES
from samcli.lib.utils.s3 import parse_s3_url

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -68,7 +69,7 @@ def is_s3_protocol_url(url):
Check whether url is a valid path in the form of "s3://..."
"""
try:
S3Uploader.parse_s3_url(url)
parse_s3_url(url)
return True
except ValueError:
return False
Expand Down
74 changes: 74 additions & 0 deletions samcli/lib/utils/s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
"""Contains utility functions related to AWS S3 service"""
from typing import Any, Dict, Optional
from urllib.parse import parse_qs, urlparse


def parse_s3_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
version_property: Optional[str] = None,
) -> Dict:
if isinstance(url, str) and url.startswith("s3://"):
return _parse_s3_format_url(
url=url,
bucket_name_property=bucket_name_property,
object_key_property=object_key_property,
version_property=version_property,
)

if isinstance(url, str) and url.startswith("https://s3"):
return _parse_path_style_s3_url(
url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property
)

raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))


def _parse_s3_format_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
version_property: Optional[str] = None,
) -> Dict:
"""
Method for parsing s3 urls that begin with s3://
e.g. s3://bucket/key
"""
parsed = urlparse(url)
query = parse_qs(parsed.query)
if parsed.netloc and parsed.path:
result = dict()
result[bucket_name_property] = parsed.netloc
result[object_key_property] = parsed.path.lstrip("/")

# If there is a query string that has a single versionId field,
# set the object version and return
if version_property is not None and "versionId" in query and len(query["versionId"]) == 1:
result[version_property] = query["versionId"][0]

return result

raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))


def _parse_path_style_s3_url(
url: Any,
bucket_name_property: str = "Bucket",
object_key_property: str = "Key",
) -> Dict:
"""
Static method for parsing path style s3 urls.
e.g. https://s3.us-east-1.amazonaws.com/bucket/key
"""
parsed = urlparse(url)
result = dict()
# parsed.path would point to /bucket/key
if parsed.path:
s3_bucket_key = parsed.path.split("/", 2)[1:]

result[bucket_name_property] = s3_bucket_key[0]
result[object_key_property] = s3_bucket_key[1]

return result
raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url))
3 changes: 2 additions & 1 deletion tests/end_to_end/test_stages.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from samcli.cli.global_config import GlobalConfig
from filelock import FileLock
from samcli.lib.utils.s3 import parse_s3_url
from tests.end_to_end.end_to_end_context import EndToEndTestContext
from tests.testing_utils import CommandResult, run_command, run_command_with_input

Expand Down Expand Up @@ -102,7 +103,7 @@ def _download_packaged_file(self):
)

if zipped_fn_s3_loc:
s3_info = S3Uploader.parse_s3_url(zipped_fn_s3_loc)
s3_info = parse_s3_url(zipped_fn_s3_loc)
self.s3_client.download_file(s3_info["Bucket"], s3_info["Key"], str(zip_file_path))

with zipfile.ZipFile(zip_file_path, "r") as zip_refzip:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,10 @@ def test_set_functions(self):

@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile")
@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader")
@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.parse_s3_url")
def test_create_companion_stack(
self,
parse_s3_url_mock,
s3_uploader_mock,
mktempfile_mock,
):
Expand All @@ -70,8 +72,10 @@ def test_create_companion_stack(

@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile")
@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader")
@patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.parse_s3_url")
def test_update_companion_stack(
self,
parse_s3_url_mock,
s3_uploader_mock,
mktempfile_mock,
):
Expand Down
Loading

0 comments on commit 3952ff6

Please sign in to comment.