Skip to content

Commit

Permalink
Set prefix param even when empty
Browse files Browse the repository at this point in the history
We have never set values which are empty on the request
because they are perhaps not useful in the List query,
but this assumption is wrong when there are restricted
policies for a given user, because empty is actually
a valid value in IAM or Bucket policy conditions.

For example following condition would never work with our
ListObjects call and AWS cli would work fine.
```json
            "Condition": {
                "StringEquals": {
                    "s3:prefix": [
                        "",
                        "data/",
                        "data"
                    ],
                    "s3:delimiter": [
                        "/",
                        ""
                    ]
                }
            }
```

The reason is empty or not `prefix` and `delimiter` should be
added to the query param in List operation, such that server
can use the value to validate the policies for the incoming
request.
  • Loading branch information
harshavardhana authored and minio-trusted committed Jan 24, 2019
1 parent fe37605 commit 700ae50
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 58 deletions.
72 changes: 43 additions & 29 deletions minio/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
get_sha256_hexdigest, get_md5_base64digest, Hasher,
optimal_part_info,
is_valid_bucket_name, PartMetadata, read_full,
get_s3_region_from_endpoint, is_valid_sse_object, is_valid_sse_c_object,
get_s3_region_from_endpoint, is_valid_sse_object, is_valid_sse_c_object,
is_valid_source_sse_object,
is_valid_bucket_notification_config, is_valid_policy_type,
get_s3_region_from_endpoint,
Expand Down Expand Up @@ -496,6 +496,14 @@ def listen_bucket_notification(self, bucket_name, prefix='', suffix='',
"""
is_valid_bucket_name(bucket_name)

# If someone explicitly set prefix to None convert it to empty string.
if prefix is None:
prefix = ''

# If someone explicitly set suffix to None convert it to empty string.
if suffix is None:
suffix = ''

url_components = urlsplit(self._endpoint_url)
if url_components.hostname == 's3.amazonaws.com':
raise InvalidArgumentError(
Expand Down Expand Up @@ -697,7 +705,7 @@ def copy_object(self, bucket_name, object_name, object_source,
:param object_source: Source object to be copied.
:param conditions: :class:`CopyConditions` object. Collection of
supported CopyObject conditions.
:param metadata: Any user-defined metadata to be copied along with
:param metadata: Any user-defined metadata to be copied along with
destination object.
"""
is_valid_bucket_name(bucket_name)
Expand All @@ -710,16 +718,16 @@ def copy_object(self, bucket_name, object_name, object_source,
if metadata is not None:
headers = amzprefix_user_metadata(metadata)
headers["x-amz-metadata-directive"] = "REPLACE"

if conditions:
for k, v in conditions.items():
headers[k] = v
headers[k] = v

# Source argument to copy_object can only be of type copy_SSE_C
if source_sse:
is_valid_source_sse_object(source_sse)
headers.update(source_sse.marshal())

#Destination argument to copy_object cannot be of type copy_SSE_C
if sse:
is_valid_sse_object(sse)
Expand All @@ -732,10 +740,10 @@ def copy_object(self, bucket_name, object_name, object_source,
headers=headers)

return parse_copy_object(bucket_name, object_name, response.data)

def put_object(self, bucket_name, object_name, data, length,
content_type='application/octet-stream',
metadata=None,
metadata=None,
sse=None,
):
"""
Expand Down Expand Up @@ -796,7 +804,7 @@ def put_object(self, bucket_name, object_name, data, length,
metadata=metadata,
sse=sse)

def list_objects(self, bucket_name, prefix=None, recursive=False):
def list_objects(self, bucket_name, prefix='', recursive=False):
"""
List objects in the given bucket.
Expand Down Expand Up @@ -835,17 +843,18 @@ def list_objects(self, bucket_name, prefix=None, recursive=False):
"""
is_valid_bucket_name(bucket_name)

# If someone explicitly set prefix to None convert it to empty string.
if prefix is None:
prefix = ''

method = 'GET'

# Initialize query parameters.
query = {
'max-keys': '1000'
'max-keys': '1000',
'prefix': prefix
}

# Add if prefix present.
if prefix:
query['prefix'] = prefix

# Delimited by default.
if not recursive:
query['delimiter'] = '/'
Expand All @@ -865,7 +874,7 @@ def list_objects(self, bucket_name, prefix=None, recursive=False):
for obj in objects:
yield obj

def list_objects_v2(self, bucket_name, prefix=None, recursive=False):
def list_objects_v2(self, bucket_name, prefix='', recursive=False):
"""
List objects in the given bucket using the List objects V2 API.
Expand Down Expand Up @@ -904,13 +913,15 @@ def list_objects_v2(self, bucket_name, prefix=None, recursive=False):
"""
is_valid_bucket_name(bucket_name)

# If someone explicitly set prefix to None convert it to empty string.
if prefix is None:
prefix = ''

# Initialize query parameters.
query = {
'list-type': '2'
'list-type': '2',
'prefix': prefix
}
# Add if prefix present.
if prefix:
query['prefix'] = prefix

# Delimited by default.
if not recursive:
Expand Down Expand Up @@ -1058,7 +1069,7 @@ def remove_objects(self, bucket_name, objects_iter):
# clear batch for next set of items
obj_batch = []

def list_incomplete_uploads(self, bucket_name, prefix=None,
def list_incomplete_uploads(self, bucket_name, prefix='',
recursive=False):
"""
List all in-complete uploads for a given bucket.
Expand Down Expand Up @@ -1104,7 +1115,7 @@ def list_incomplete_uploads(self, bucket_name, prefix=None,

return self._list_incomplete_uploads(bucket_name, prefix, recursive)

def _list_incomplete_uploads(self, bucket_name, prefix=None,
def _list_incomplete_uploads(self, bucket_name, prefix='',
recursive=False, is_aggregate_size=True):
"""
List incomplete uploads list all previously uploaded incomplete multipart objects.
Expand All @@ -1116,18 +1127,21 @@ def _list_incomplete_uploads(self, bucket_name, prefix=None,
"""
is_valid_bucket_name(bucket_name)

# If someone explicitly set prefix to None convert it to empty string.
if prefix is None:
prefix = ''

# Initialize query parameters.
query = {
'uploads': '',
'max-uploads': '1000'
'max-uploads': '1000',
'prefix': prefix
}

if prefix:
query['prefix'] = prefix
if not recursive:
query['delimiter'] = '/'

key_marker, upload_id_marker = None, None
key_marker, upload_id_marker = '', ''
is_truncated = True
while is_truncated:
if key_marker:
Expand Down Expand Up @@ -1181,7 +1195,7 @@ def _list_object_parts(self, bucket_name, object_name, upload_id):
}

is_truncated = True
part_number_marker = None
part_number_marker = ''
while is_truncated:
if part_number_marker:
query['part-number-marker'] = str(part_number_marker)
Expand Down Expand Up @@ -1423,7 +1437,7 @@ def _get_partial_object(self, bucket_name, object_name,

if sse:
headers.update(sse.marshal())

return self._url_open('GET',
bucket_name=bucket_name,
object_name=object_name,
Expand Down Expand Up @@ -1458,8 +1472,8 @@ def _do_put_object(self, bucket_name, object_name, part_data,
'Content-Length': part_size,
}

md5_base64 = None
sha256_hex = None
md5_base64 = ''
sha256_hex = ''
if self._is_ssl:
md5_base64 = get_md5_base64digest(part_data)
sha256_hex = _UNSIGNED_PAYLOAD
Expand Down
20 changes: 7 additions & 13 deletions minio/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,20 +255,14 @@ def get_target_url(endpoint_url, bucket_name=None, object_name=None,
ordered_query = collections.OrderedDict(sorted(query.items()))
query_components = []
for component_key in ordered_query:
if ordered_query[component_key] is not None:
if isinstance(ordered_query[component_key], list):
for value in ordered_query[component_key]:
query_components.append(component_key+'='+
queryencode(value))
else:
query_components.append(
component_key+'='+
queryencode(
ordered_query[component_key]
)
)
if isinstance(ordered_query[component_key], list):
for value in ordered_query[component_key]:
query_components.append(component_key+'='+
queryencode(value))
else:
query_components.append(component_key)
query_components.append(
component_key+'='+
queryencode(ordered_query.get(component_key, '')))

query_string = '&'.join(query_components)
if query_string:
Expand Down
2 changes: 2 additions & 0 deletions minio/signer.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,8 @@ def presign_v4(method, url, access_key, secret_key, region=None,
single_component.append(
queryencode(ordered_query[component_key])
)
else:
single_component.append('=')
query_components.append(''.join(single_component))

query_string = '&'.join(query_components)
Expand Down
10 changes: 5 additions & 5 deletions tests/functional/tests.py
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ def test_copy_object_with_metadata(client, log_output):
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)

# Perform a server side copy of an object
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source,metadata=metadata)
Expand All @@ -411,7 +411,7 @@ def test_copy_object_with_metadata(client, log_output):
raise Exception(err)
# Test passes
print(log_output.json_report())

def test_copy_object_etag_match(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
Expand Down Expand Up @@ -578,7 +578,7 @@ def normalize_metadata(meta_data):
norm_dict = {k.lower(): v for k, v in meta_data.items()}
return norm_dict


def test_put_object(client, log_output, sse=None):
# default value for log_output.function attribute is;
# log_output.function = "put_object(bucket_name, object_name, data, length, content_type, metadata)"
Expand Down Expand Up @@ -907,7 +907,7 @@ def test_list_objects(client, log_output):
client.put_object(bucket_name, object_name+"-2", MB_1_reader, MB_1)
# List all object paths in bucket.
log_output.args['recursive'] = is_recursive = True
objects = client.list_objects(bucket_name, None, is_recursive)
objects = client.list_objects(bucket_name, '', is_recursive)
for obj in objects:
_, _, _, _, _, _ = obj.bucket_name,\
obj.object_name,\
Expand Down Expand Up @@ -1081,7 +1081,7 @@ def test_list_objects_v2(client, log_output):
client.put_object(bucket_name, object_name+"-2", MB_1_reader, MB_1)
# List all object paths in bucket using V2 API.
log_output.args['recursive'] = is_recursive = True
objects = client.list_objects_v2(bucket_name, None, is_recursive)
objects = client.list_objects_v2(bucket_name, '', is_recursive)
for obj in objects:
_, _, _, _, _, _ = obj.bucket_name,\
obj.object_name,\
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/list_incomplete_uploads_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def test_empty_list_uploads_test(self, mock_connection):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('GET',
'https://localhost:9000/bucket/?max-uploads=1000&uploads=',
'https://localhost:9000/bucket/?max-uploads=1000&prefix=&uploads=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
upload_iter = client._list_incomplete_uploads('bucket', '', True, False)
Expand Down Expand Up @@ -102,7 +102,7 @@ def test_list_uploads_works(self, mock_connection):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('GET',
'https://localhost:9000/bucket/?delimiter=%2F&max-uploads=1000&uploads=',
'https://localhost:9000/bucket/?delimiter=%2F&max-uploads=1000&prefix=&uploads=',
{'User-Agent': _DEFAULT_USER_AGENT},
200, content=mock_data))

Expand Down Expand Up @@ -203,7 +203,7 @@ def test_list_multipart_uploads_works(self, mock_connection):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('GET',
'https://localhost:9000/bucket/?max-uploads=1000&uploads=',
'https://localhost:9000/bucket/?max-uploads=1000&prefix=&uploads=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data1))

client = Minio('localhost:9000')
Expand All @@ -213,7 +213,7 @@ def test_list_multipart_uploads_works(self, mock_connection):
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?'
'key-marker=keymarker&'
'max-uploads=1000&'
'max-uploads=1000&prefix=&'
'upload-id-marker=uploadidmarker&uploads=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data2))
uploads.append(upload)
Expand Down
10 changes: 5 additions & 5 deletions tests/unit/list_objects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_empty_list_objects_works(self, mock_connection):
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?max-keys=1000',
'https://localhost:9000/bucket/?max-keys=1000&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket', recursive=True)
Expand Down Expand Up @@ -87,15 +87,15 @@ def test_list_objects_works(self, mock_connection):
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000',
'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket')
buckets = []
for bucket in bucket_iter:
# cause an xml exception and fail if we try retrieving again
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000',
'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=''))
buckets.append(bucket)

Expand Down Expand Up @@ -172,13 +172,13 @@ def test_list_objects_works_well(self, mock_connection):
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?max-keys=1000',
'https://localhost:9000/bucket/?max-keys=1000&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data1))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket', recursive=True)
buckets = []
for bucket in bucket_iter:
url = 'https://localhost:9000/bucket/?marker=marker&max-keys=1000'
url = 'https://localhost:9000/bucket/?marker=marker&max-keys=1000&prefix='
mock_server.mock_add_request(MockResponse('GET', url,
{'User-Agent': _DEFAULT_USER_AGENT}, 200,
content=mock_data2))
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/list_objects_v2_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def test_empty_list_objects_works(self, mock_connection):
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?list-type=2',
'https://localhost:9000/bucket/?list-type=2&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
object_iter = client.list_objects_v2('bucket', recursive=True)
Expand Down Expand Up @@ -79,7 +79,7 @@ def test_list_objects_works(self, mock_connection):
mock_server.mock_add_request(
MockResponse(
'GET',
'https://localhost:9000/bucket/?delimiter=%2F&list-type=2',
'https://localhost:9000/bucket/?delimiter=%2F&list-type=2&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200,
content=mock_data
)
Expand Down

0 comments on commit 700ae50

Please sign in to comment.