diff --git a/botocore/handlers.py b/botocore/handlers.py
index 9a7f78acef..9cb1d052c0 100644
--- a/botocore/handlers.py
+++ b/botocore/handlers.py
@@ -132,7 +132,6 @@ def escape_xml_payload(params, **kwargs):
def check_for_200_error(response, **kwargs):
- """This function has been deprecated, but is kept for backwards compatibility."""
# From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
# There are two opportunities for a copy request to return an error. One
# can occur when Amazon S3 receives the copy request and the other can
@@ -153,9 +152,7 @@ def check_for_200_error(response, **kwargs):
# trying to retrieve the response. See Endpoint._get_response().
return
http_response, parsed = response
- if _looks_like_special_case_error(
- http_response.status_code, http_response.content
- ):
+ if _looks_like_special_case_error(http_response):
logger.debug(
"Error found for response with 200 status code, "
"errors: %s, changing status code to "
@@ -165,13 +162,13 @@ def check_for_200_error(response, **kwargs):
http_response.status_code = 500
-def _looks_like_special_case_error(status_code, body):
- if status_code == 200 and body:
+def _looks_like_special_case_error(http_response):
+ if http_response.status_code == 200:
try:
parser = ETree.XMLParser(
target=ETree.TreeBuilder(), encoding='utf-8'
)
- parser.feed(body)
+ parser.feed(http_response.content)
root = parser.close()
except XMLParseError:
# In cases of network disruptions, we may end up with a partial
@@ -1242,35 +1239,6 @@ def document_expires_shape(section, event_name, **kwargs):
)
-def _handle_200_error(operation_model, response_dict, **kwargs):
- # S3 can return a 200 response with an error embedded in the body.
- # Convert the 200 to a 500 for retry resolution in ``_update_status_code``.
- if not response_dict or operation_model.has_streaming_output:
- # Operations with streaming response blobs are excluded as they
- # can't be reliably distinguished from an S3 error.
- return
- if _looks_like_special_case_error(
- response_dict['status_code'], response_dict['body']
- ):
- response_dict['status_code'] = 500
- logger.debug(
- f"Error found for response with 200 status code: {response_dict['body']}."
- )
-
-
-def _update_status_code(response, **kwargs):
- # Update the http_response status code when the parsed response has been
- # modified in a handler. This enables retries for cases like ``_handle_200_error``.
- if response is None:
- return
- http_response, parsed = response
- parsed_status_code = parsed.get('ResponseMetadata', {}).get(
- 'HTTPStatusCode', http_response.status_code
- )
- if http_response.status_code != parsed_status_code:
- http_response.status_code = parsed_status_code
-
-
# This is a list of (event_name, handler).
# When a Session is created, everything in this list will be
# automatically registered with that Session.
@@ -1301,7 +1269,6 @@ def _update_status_code(response, **kwargs):
('after-call.cloudformation.GetTemplate', json_decode_template_body),
('after-call.s3.GetBucketLocation', parse_get_bucket_location),
('before-parse.s3.*', handle_expires_header),
- ('before-parse.s3.*', _handle_200_error, REGISTER_FIRST),
('before-parameter-build', generate_idempotent_uuid),
('before-parameter-build.s3', validate_bucket_name),
('before-parameter-build.s3', remove_bucket_from_url_paths_from_model),
@@ -1345,7 +1312,13 @@ def _update_status_code(response, **kwargs):
('before-call.ec2.CopySnapshot', inject_presigned_url_ec2),
('request-created', add_retry_headers),
('request-created.machinelearning.Predict', switch_host_machinelearning),
- ('needs-retry.s3.*', _update_status_code, REGISTER_FIRST),
+ ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
+ ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
+ (
+ 'needs-retry.s3.CompleteMultipartUpload',
+ check_for_200_error,
+ REGISTER_FIRST,
+ ),
('choose-signer.cognito-identity.GetId', disable_signing),
('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
diff --git a/tests/functional/test_s3.py b/tests/functional/test_s3.py
index 613ef1c016..04fc32aa7b 100644
--- a/tests/functional/test_s3.py
+++ b/tests/functional/test_s3.py
@@ -28,6 +28,7 @@
UnsupportedS3AccesspointConfigurationError,
UnsupportedS3ConfigurationError,
)
+from botocore.parsers import ResponseParserError
from tests import (
BaseSessionTest,
ClientHTTPStubber,
@@ -434,12 +435,12 @@ def create_stubbed_s3_client(self, **kwargs):
http_stubber.start()
return client, http_stubber
- def test_s3_copy_object_with_incomplete_response(self):
+ def test_s3_copy_object_with_empty_response(self):
self.client, self.http_stubber = self.create_stubbed_s3_client(
region_name="us-east-1"
)
- incomplete_body = b'\n\n\n'
+ empty_body = b""
complete_body = (
b'\n\n'
b"2020-04-21T21:03:31.000Z"
b""s0mEcH3cK5uM""
)
- self.http_stubber.add_response(status=200, body=incomplete_body)
+
+ self.http_stubber.add_response(status=200, body=empty_body)
self.http_stubber.add_response(status=200, body=complete_body)
response = self.client.copy_object(
Bucket="bucket",
@@ -460,86 +462,19 @@ def test_s3_copy_object_with_incomplete_response(self):
self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200)
self.assertTrue("CopyObjectResult" in response)
-
-class TestS3200ErrorResponse(BaseS3OperationTest):
- def create_s3_client(self, **kwargs):
- client_kwargs = {"region_name": self.region}
- client_kwargs.update(kwargs)
- return self.session.create_client("s3", **client_kwargs)
-
- def create_stubbed_s3_client(self, **kwargs):
- client = self.create_s3_client(**kwargs)
- http_stubber = ClientHTTPStubber(client)
- http_stubber.start()
- return client, http_stubber
-
- def test_s3_200_with_error_response(self):
+ def test_s3_copy_object_with_incomplete_response(self):
self.client, self.http_stubber = self.create_stubbed_s3_client(
region_name="us-east-1"
)
- error_body = (
- b""
- b"SlowDown
"
- b"Please reduce your request rate."
- b""
- )
- # Populate 5 attempts for SlowDown to validate
- # we reached four max retries and raised an exception.
- for i in range(5):
- self.http_stubber.add_response(status=200, body=error_body)
- with self.assertRaises(botocore.exceptions.ClientError) as context:
+
+ incomplete_body = b'\n\n\n'
+ self.http_stubber.add_response(status=200, body=incomplete_body)
+ with self.assertRaises(ResponseParserError):
self.client.copy_object(
Bucket="bucket",
CopySource="other-bucket/test.txt",
Key="test.txt",
)
- self.assertEqual(len(self.http_stubber.requests), 5)
- self.assertEqual(
- context.exception.response["ResponseMetadata"]["HTTPStatusCode"],
- 500,
- )
- self.assertEqual(
- context.exception.response["Error"]["Code"], "SlowDown"
- )
-
- def test_s3_200_with_no_error_response(self):
- self.client, self.http_stubber = self.create_stubbed_s3_client(
- region_name="us-east-1"
- )
- self.http_stubber.add_response(status=200, body=b"")
-
- response = self.client.copy_object(
- Bucket="bucket",
- CopySource="other-bucket/test.txt",
- Key="test.txt",
- )
-
- # Validate that the status code remains 200.
- self.assertEqual(len(self.http_stubber.requests), 1)
- self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200)
-
- def test_s3_200_with_error_response_on_streaming_operation(self):
- self.client, self.http_stubber = self.create_stubbed_s3_client(
- region_name="us-east-1"
- )
- self.http_stubber.add_response(status=200, body=b"")
- response = self.client.get_object(Bucket="bucket", Key="test.txt")
-
- # Validate that the status code remains 200 because we don't
- # process 200-with-error responses on streaming operations.
- self.assertEqual(len(self.http_stubber.requests), 1)
- self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200)
-
- def test_s3_200_response_with_no_body(self):
- self.client, self.http_stubber = self.create_stubbed_s3_client(
- region_name="us-east-1"
- )
- self.http_stubber.add_response(status=200)
- response = self.client.head_object(Bucket="bucket", Key="test.txt")
-
- # Validate that the status code remains 200 on operations without a body.
- self.assertEqual(len(self.http_stubber.requests), 1)
- self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200)
class TestAccesspointArn(BaseS3ClientConfigurationTest):
diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py
index 924abed8af..7e28356442 100644
--- a/tests/unit/test_handlers.py
+++ b/tests/unit/test_handlers.py
@@ -1202,14 +1202,14 @@ def test_s3_special_case_is_before_other_retry(self):
caught_exception=None,
)
# This is implementation specific, but we're trying to verify that
- # the _update_status_code is before any of the retry logic in
+ # the check_for_200_error is before any of the retry logic in
# botocore.retryhandlers.
# Technically, as long as the relative order is preserved, we don't
# care about the absolute order.
names = self.get_handler_names(responses)
- self.assertIn('_update_status_code', names)
+ self.assertIn('check_for_200_error', names)
self.assertIn('RetryHandler', names)
- s3_200_handler = names.index('_update_status_code')
+ s3_200_handler = names.index('check_for_200_error')
general_retry_handler = names.index('RetryHandler')
self.assertTrue(
s3_200_handler < general_retry_handler,