Skip to content

Commit

Permalink
Fix S3 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
dlpzx committed Jun 10, 2024
1 parent 9f28c43 commit 0fed0b1
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 35 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ check-security: upgrade-pip install-backend install-cdkproxy

test:
export PYTHONPATH=./backend:/./tests && \
python -m pytest -v -ra tests/modules/
python -m pytest -v -ra tests/

integration-tests: upgrade-pip install-integration-tests
export PYTHONPATH=./backend:/./tests_new && \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ def check_target_role_access_policy(self) -> None:
version_id, policy_document = IAM.get_managed_policy_default_version(
self.target_environment.AwsAccountId, self.target_environment.region, share_resource_policy_name
)
logger.info(f'Policy... {policy_document}')

s3_statement_index = SharePolicyService._get_statement_by_sid(
policy_document, f'{IAM_S3_ACCESS_POINTS_STATEMENT_SID}S3'
Expand Down
3 changes: 2 additions & 1 deletion tests/modules/s3_datasets/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from dataall.core.organizations.db.organization_models import Organization
from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository
from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, DatasetTable, S3Dataset
from dataall.modules.datasets_base.db.dataset_models import DatasetLock
from dataall.modules.datasets_base.db.dataset_models import DatasetLock, DatasetBase
from tests.core.stacks.test_stack import update_stack_query

from dataall.modules.datasets_base.services.datasets_enums import ConfidentialityClassification
Expand Down Expand Up @@ -352,6 +352,7 @@ def test_delete_dataset(client, dataset, env_fixture, org_fixture, db, module_mo
with db.scoped_session() as session:
session.query(DatasetLock).delete()
session.query(S3Dataset).delete()
session.query(DatasetBase).delete()
session.commit()
deleted_dataset = dataset(org=org_fixture, env=env_fixture, name='dataset1', owner=user.username, group=group.name)
response = client.query(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,7 @@ def mock_iam_client(mocker, account_id, role_name):
return mock_client


@pytest.fixture(scope='module')
def target_dataset_access_control_policy(request):
def _create_target_dataset_access_control_policy(bucket_name, access_point_name):
iam_policy = {
'Version': '2012-10-17',
'Statement': [
Expand All @@ -240,17 +239,17 @@ def target_dataset_access_control_policy(request):
'Effect': 'Allow',
'Action': ['s3:*'],
'Resource': [
f'arn:aws:s3:::{request.param[0]}',
f'arn:aws:s3:::{request.param[0]}/*',
f'arn:aws:s3:eu-west-1:{request.param[1]}:accesspoint/{request.param[2]}',
f'arn:aws:s3:eu-west-1:{request.param[1]}:accesspoint/{request.param[2]}/*',
f'arn:aws:s3:::{bucket_name}',
f'arn:aws:s3:::{bucket_name}/*',
f'arn:aws:s3:eu-west-1:{SOURCE_ENV_ACCOUNT}:accesspoint/{access_point_name}',
f'arn:aws:s3:eu-west-1:{SOURCE_ENV_ACCOUNT}:accesspoint/{access_point_name}/*',
],
},
{
'Sid': f'{IAM_S3_ACCESS_POINTS_STATEMENT_SID}KMS',
'Effect': 'Allow',
'Action': ['kms:*'],
'Resource': [f'arn:aws:kms:eu-west-1:{request.param[1]}:key/some-key-2112'],
'Resource': [f'arn:aws:kms:eu-west-1:{SOURCE_ENV_ACCOUNT}:key/some-key-2112'],
},
],
}
Expand Down Expand Up @@ -391,14 +390,13 @@ def test_grant_target_role_access_policy_test_empty_policy(
)


@pytest.mark.parametrize(
'target_dataset_access_control_policy', ([('bucketname', 'aws_account_id', 'access_point_name')]), indirect=True
)
def test_grant_target_role_access_policy_existing_policy_bucket_not_included(
mocker, dataset1, location1, target_dataset_access_control_policy, share_manager
mocker, dataset1, location1, share_manager
):
# Given
iam_policy = target_dataset_access_control_policy
iam_policy = _create_target_dataset_access_control_policy(
share_manager.bucket_name, share_manager.access_point_name
)

mocker.patch('dataall.base.aws.iam.IAM.get_managed_policy_default_version', return_value=('v1', iam_policy))
mocker.patch(
Expand Down Expand Up @@ -445,14 +443,11 @@ def test_grant_target_role_access_policy_existing_policy_bucket_not_included(
s3_index = SharePolicyService._get_statement_by_sid(policy=policy_object, sid=f'{IAM_S3_BUCKETS_STATEMENT_SID}S3')


@pytest.mark.parametrize(
'target_dataset_access_control_policy', ([('dataset1', SOURCE_ENV_ACCOUNT, 'test')]), indirect=True
)
def test_grant_target_role_access_policy_existing_policy_bucket_included(
mocker, target_dataset_access_control_policy, share_manager
):
def test_grant_target_role_access_policy_existing_policy_bucket_included(mocker, share_manager):
# Given
iam_policy = target_dataset_access_control_policy
iam_policy = _create_target_dataset_access_control_policy(
share_manager.bucket_name, share_manager.access_point_name
)

mocker.patch('dataall.base.aws.iam.IAM.get_managed_policy_default_version', return_value=('v1', iam_policy))

Expand Down Expand Up @@ -1201,10 +1196,7 @@ def test_check_bucket_policy_missing_sid(mocker, base_bucket_policy, share_manag
assert len(share_manager.folder_errors) == 1


@pytest.mark.parametrize(
'target_dataset_access_control_policy', ([('dataset1', SOURCE_ENV_ACCOUNT, 'location1')]), indirect=True
)
def test_check_target_role_access_policy(mocker, target_dataset_access_control_policy, share_manager):
def test_check_target_role_access_policy(mocker, share_manager):
# Given
mocker.patch(
'dataall.modules.s3_datasets_shares.services.managed_share_policy_service.SharePolicyService.check_if_policy_exists',
Expand All @@ -1218,7 +1210,10 @@ def test_check_target_role_access_policy(mocker, target_dataset_access_control_p

iam_get_policy_mock = mocker.patch(
'dataall.base.aws.iam.IAM.get_managed_policy_default_version',
return_value=('v1', target_dataset_access_control_policy),
return_value=(
'v1',
_create_target_dataset_access_control_policy(share_manager.bucket_name, share_manager.access_point_name),
),
)

mocker.patch(
Expand All @@ -1234,16 +1229,10 @@ def test_check_target_role_access_policy(mocker, target_dataset_access_control_p
# Then
iam_get_policy_mock.assert_called()
kms_client().get_key_id.assert_called()
assert share_manager.folder_errors == 'some'
assert len(share_manager.folder_errors) == 0


@pytest.mark.parametrize(
'target_dataset_access_control_policy', ([('bucketname', 'aws_account_id', 'access_point_name')]), indirect=True
)
def test_check_target_role_access_policy_existing_policy_bucket_and_key_not_included(
mocker, target_dataset_access_control_policy, share_manager
):
def test_check_target_role_access_policy_existing_policy_bucket_and_key_not_included(mocker, share_manager):
# Given
mocker.patch(
'dataall.modules.s3_datasets_shares.services.managed_share_policy_service.SharePolicyService.check_if_policy_exists',
Expand All @@ -1258,7 +1247,10 @@ def test_check_target_role_access_policy_existing_policy_bucket_and_key_not_incl
# Gets policy with other S3 and KMS
iam_get_policy_mock = mocker.patch(
'dataall.base.aws.iam.IAM.get_managed_policy_default_version',
return_value=('v1', target_dataset_access_control_policy),
return_value=(
'v1',
_create_target_dataset_access_control_policy(share_manager.bucket_name, share_manager.access_point_name),
),
)

mocker.patch(
Expand All @@ -1274,7 +1266,7 @@ def test_check_target_role_access_policy_existing_policy_bucket_and_key_not_incl
# Then
iam_get_policy_mock.assert_called()
kms_client().get_key_id.assert_called()
assert len(share_manager.folder_errors) == 2
assert len(share_manager.folder_errors) == 1


def test_check_target_role_access_policy_test_no_policy(mocker, share_manager):
Expand Down

0 comments on commit 0fed0b1

Please sign in to comment.