Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Default to virtual style addressing #1387

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changes/next-release/feature-s3-58965.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"type": "feature",
"category": "``s3``",
"description": "Default to virtual hosted addressing regardless of signature version (boto/botocore`#1387 <https://github.com/boto/botocore/issues/1387>`__)"
}
10 changes: 0 additions & 10 deletions botocore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,16 +189,6 @@ def _get_s3_addressing_handler(self, endpoint_url, s3_config,
logger.debug("Defaulting to S3 virtual host style addressing with "
"path style addressing fallback.")

# For dual stack mode, we need to clear the default endpoint url in
# order to use the existing netloc if the bucket is dns compatible.
# Also, the default_endpoint_url of 's3.amazonaws.com' only works
# if we're in the 'aws' partition. Anywhere else we should
# just use the existing netloc.
if s3_config.get('use_dualstack_endpoint', False) or \
partition != 'aws':
return functools.partial(
fix_s3_host, default_endpoint_url=None)

# By default, try to use virtual style with path fallback.
return fix_s3_host

Expand Down
19 changes: 17 additions & 2 deletions botocore/signers.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,8 @@ def generate_presigned_url(self, ClientMethod, Params=None, ExpiresIn=3600,
expires_in = ExpiresIn
http_method = HttpMethod
context = {
'is_presign_request': True
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
}

request_signer = self._request_signer
Expand Down Expand Up @@ -686,7 +687,12 @@ def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None,

# Prepare the request dict by including the client's endpoint url.
prepare_request_dict(
request_dict, endpoint_url=self.meta.endpoint_url)
request_dict, endpoint_url=self.meta.endpoint_url,
context={
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
},
)

# Append that the bucket name to the list of conditions.
conditions.append({'bucket': bucket})
Expand All @@ -704,3 +710,12 @@ def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None,
return post_presigner.generate_presigned_post(
request_dict=request_dict, fields=fields, conditions=conditions,
expires_in=expires_in)


def _should_use_global_endpoint(client):
use_dualstack_endpoint = False
if client.meta.config.s3 is not None:
use_dualstack_endpoint = client.meta.config.s3.get(
'use_dualstack_endpoint', False)
return (client.meta.partition == 'aws' and
not use_dualstack_endpoint)
46 changes: 20 additions & 26 deletions botocore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,6 @@
# Based on rfc2986, section 2.3
SAFE_CHARS = '-._~'
LABEL_RE = re.compile(r'[a-z0-9][a-z0-9\-]*[a-z0-9]')
RESTRICTED_REGIONS = [
'us-gov-west-1',
'fips-us-gov-west-1',
]
RETRYABLE_HTTP_ERRORS = (requests.Timeout, requests.ConnectionError)
S3_ACCELERATE_WHITELIST = ['dualstack']

Expand Down Expand Up @@ -672,23 +668,18 @@ def check_dns_name(bucket_name):


def fix_s3_host(request, signature_version, region_name,
default_endpoint_url='s3.amazonaws.com', **kwargs):
default_endpoint_url=None, **kwargs):
"""
This handler looks at S3 requests just before they are signed.
If there is a bucket name on the path (true for everything except
ListAllBuckets) it checks to see if that bucket name conforms to
the DNS naming conventions. If it does, it alters the request to
use ``virtual hosting`` style addressing rather than ``path-style``
addressing. This allows us to avoid 301 redirects for all
bucket names that can be CNAME'd.
addressing.

"""
# By default we do not use virtual hosted style addressing when
# signed with signature version 4.
if signature_version is not botocore.UNSIGNED and \
's3v4' in signature_version:
return
elif not _allowed_region(region_name):
return
if request.context.get('use_global_endpoint', False):
default_endpoint_url = 's3.amazonaws.com'
try:
switch_to_virtual_host_style(
request, signature_version, default_endpoint_url)
Expand Down Expand Up @@ -765,10 +756,6 @@ def _is_get_bucket_location_request(request):
return request.url.endswith('?location')


def _allowed_region(region_name):
return region_name not in RESTRICTED_REGIONS


def instance_cache(func):
"""Method decorator for caching method calls to a single instance.

Expand Down Expand Up @@ -904,14 +891,21 @@ def redirect_from_error(self, request_dict, response, operation, **kwargs):
error = response[1].get('Error', {})
error_code = error.get('Code')

if error_code == '301':
# A raw 301 error might be returned for several reasons, but we
# only want to try to redirect it if it's a HeadObject or
# HeadBucket because all other operations will return
# PermanentRedirect if region is incorrect.
if operation.name not in ['HeadObject', 'HeadBucket']:
return
elif error_code != 'PermanentRedirect':
# We have to account for 400 responses because
# if we sign a Head* request with the wrong region,
# we'll get a 400 Bad Request but we won't get a
# body saying it's an "AuthorizationHeaderMalformed".
is_special_head_object = (
error_code in ['301', '400'] and
operation.name in ['HeadObject', 'HeadBucket']
)
is_wrong_signing_region = (
error_code == 'AuthorizationHeaderMalformed' and
'Region' in error
)
is_permanent_redirect = error_code == 'PermanentRedirect'
if not any([is_special_head_object, is_wrong_signing_region,
is_permanent_redirect]):
return

bucket = request_dict['context']['signing']['bucket']
Expand Down
Loading