From 4c5f70a44d1419524d2cc026a06c197e2c250e7e Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 26 Apr 2024 16:08:09 +0200 Subject: [PATCH 1/2] Remove hidden bugs from unit tests by enforcing mocks --- pyproject.toml | 2 +- src/databricks/labs/ucx/aws/credentials.py | 2 - .../labs/ucx/contexts/cli_command.py | 1 - tests/integration/aws/test_credentials.py | 2 +- tests/unit/assessment/test_clusters.py | 6 +- tests/unit/aws/test_access.py | 148 +++++++++--------- tests/unit/aws/test_credentials.py | 26 ++- tests/unit/azure/test_access.py | 75 ++++++++- tests/unit/azure/test_credentials.py | 37 ++--- tests/unit/azure/test_locations.py | 42 ++--- tests/unit/conftest.py | 2 +- tests/unit/framework/test_tasks.py | 2 +- .../hive_metastore/test_catalog_schema.py | 5 + tests/unit/hive_metastore/test_mapping.py | 15 ++ .../unit/hive_metastore/test_table_migrate.py | 41 ++++- .../hive_metastore/test_views_sequencer.py | 22 +-- tests/unit/source_code/test_dependencies.py | 2 + tests/unit/source_code/test_files.py | 7 +- .../source_code/test_notebook_migrator.py | 11 +- tests/unit/test_account.py | 13 +- tests/unit/test_cli.py | 1 + tests/unit/test_install.py | 60 ++++++- tests/unit/workspace_access/test_clusters.py | 14 ++ tests/unit/workspace_access/test_generic.py | 13 +- tests/unit/workspace_access/test_groups.py | 10 +- tests/unit/workspace_access/test_manager.py | 22 ++- tests/unit/workspace_access/test_redash.py | 2 + tests/unit/workspace_access/test_secrets.py | 6 + 28 files changed, 393 insertions(+), 196 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 64533a9797..11dbfef7b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ dependencies = [ "mypy~=1.9.0", "pylint~=3.1.0", "pylint-pytest==2.0.0a0", - "databricks-labs-pylint~=0.3.0", + "databricks-labs-pylint~=0.4.0", "pytest~=8.1.0", "pytest-cov~=4.1.0", "pytest-mock~=3.14.0", diff --git a/src/databricks/labs/ucx/aws/credentials.py b/src/databricks/labs/ucx/aws/credentials.py index 9de7d4bc48..e3b2df8172 100644 --- a/src/databricks/labs/ucx/aws/credentials.py +++ b/src/databricks/labs/ucx/aws/credentials.py @@ -113,13 +113,11 @@ class IamRoleMigration: def __init__( self, installation: Installation, - ws: WorkspaceClient, resource_permissions: AWSResourcePermissions, storage_credential_manager: CredentialManager, ): self._output_file = "aws_iam_role_migration_result.csv" self._installation = installation - self._ws = ws self._resource_permissions = resource_permissions self._storage_credential_manager = storage_credential_manager diff --git a/src/databricks/labs/ucx/contexts/cli_command.py b/src/databricks/labs/ucx/contexts/cli_command.py index 20200e5ec5..d90ba2922a 100644 --- a/src/databricks/labs/ucx/contexts/cli_command.py +++ b/src/databricks/labs/ucx/contexts/cli_command.py @@ -162,7 +162,6 @@ def aws_resource_permissions(self): def iam_role_migration(self): return IamRoleMigration( self.installation, - self.workspace_client, self.aws_resource_permissions, self.iam_credential_manager, ) diff --git a/tests/integration/aws/test_credentials.py b/tests/integration/aws/test_credentials.py index d8f272f53c..d496ac3093 100644 --- a/tests/integration/aws/test_credentials.py +++ b/tests/integration/aws/test_credentials.py @@ -32,7 +32,7 @@ def inner(credentials: set[str], read_only=False) -> list[CredentialValidationRe location = ExternalLocations(ws, sql_backend, "inventory_schema") resource_permissions = AWSResourcePermissions(installation, ws, sql_backend, aws, location, "inventory_schema") - instance_profile_migration = IamRoleMigration(installation, ws, resource_permissions, CredentialManager(ws)) + instance_profile_migration = IamRoleMigration(installation, resource_permissions, CredentialManager(ws)) return instance_profile_migration.run( MockPrompts({"Above IAM roles will be migrated to UC storage credentials *": "Yes"}), diff --git a/tests/unit/assessment/test_clusters.py b/tests/unit/assessment/test_clusters.py index f4fe52f460..3bcf2e266f 100644 --- a/tests/unit/assessment/test_clusters.py +++ b/tests/unit/assessment/test_clusters.py @@ -142,7 +142,7 @@ def test_try_fetch(): def test_no_isolation_clusters(): ws = workspace_client_mock(cluster_ids=['no-isolation']) - sql_backend = create_autospec(SqlBackend) + sql_backend = MockBackend() crawler = ClustersCrawler(ws, sql_backend, "ucx") result_set = list(crawler.snapshot()) assert len(result_set) == 1 @@ -151,7 +151,7 @@ def test_no_isolation_clusters(): def test_unsupported_clusters(): ws = workspace_client_mock(cluster_ids=['legacy-passthrough']) - sql_backend = create_autospec(SqlBackend) + sql_backend = MockBackend() crawler = ClustersCrawler(ws, sql_backend, "ucx") result_set = list(crawler.snapshot()) assert len(result_set) == 1 @@ -163,7 +163,7 @@ def test_policy_crawler(): policy_ids=['single-user-with-spn', 'single-user-with-spn-policyid', 'single-user-with-spn-no-sparkversion'], ) - sql_backend = create_autospec(SqlBackend) + sql_backend = MockBackend() crawler = PoliciesCrawler(ws, sql_backend, "ucx") result_set = list(crawler.snapshot()) failures = json.loads(result_set[0].failures) diff --git a/tests/unit/aws/test_access.py b/tests/unit/aws/test_access.py index 99582987ea..c61fc7caec 100644 --- a/tests/unit/aws/test_access.py +++ b/tests/unit/aws/test_access.py @@ -97,17 +97,12 @@ def backend(): return MockBackend(rows=rows, fails_on_first={}) -@pytest.fixture -def mock_aws(): - return create_autospec(AWSResources) - - @pytest.fixture def locations(mock_ws, backend): return ExternalLocations(mock_ws, backend, "ucx") -def test_create_external_locations(mock_ws, installation_multiple_roles, mock_aws, backend, locations): +def test_create_external_locations(mock_ws, installation_multiple_roles, backend, locations): mock_ws.storage_credentials.list.return_value = [ StorageCredentialInfo( id="1", @@ -120,8 +115,9 @@ def test_create_external_locations(mock_ws, installation_multiple_roles, mock_aw aws_iam_role=AwsIamRoleResponse("arn:aws:iam::12345:role/uc-rolex"), ), ] + aws = create_autospec(AWSResources) aws_resource_permissions = AWSResourcePermissions( - installation_multiple_roles, mock_ws, backend, mock_aws, locations, "ucx" + installation_multiple_roles, mock_ws, backend, aws, locations, "ucx" ) aws_resource_permissions.create_external_locations() calls = [ @@ -130,9 +126,10 @@ def test_create_external_locations(mock_ws, installation_multiple_roles, mock_aw call(mock.ANY, 's3://BUCKETX/FOLDERX', 'credx', skip_validation=True), ] mock_ws.external_locations.create.assert_has_calls(calls, any_order=True) + aws.get_role_policy.assert_not_called() -def test_create_external_locations_skip_existing(mock_ws, mock_aws, backend, locations): +def test_create_external_locations_skip_existing(mock_ws, backend, locations): install = create_autospec(Installation) install.load.return_value = [ AWSRoleAction("arn:aws:iam::12345:role/uc-role1", "s3", "WRITE_FILES", "s3://BUCKET1"), @@ -153,16 +150,17 @@ def test_create_external_locations_skip_existing(mock_ws, mock_aws, backend, loc mock_ws.external_locations.list.return_value = [ ExternalLocationInfo(name="UCX_FOO_1", url="s3://BUCKETX/FOLDERX", credential_name="credx"), ] - - aws_resource_permissions = AWSResourcePermissions(install, mock_ws, backend, mock_aws, locations, "ucx") + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(install, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_external_locations(location_init="UCX_FOO") calls = [ call("UCX_FOO_2", 's3://BUCKET1/FOLDER1', 'cred1', skip_validation=True), ] mock_ws.external_locations.create.assert_has_calls(calls, any_order=True) + aws.get_role_policy.assert_not_called() -def test_create_uber_principal_existing_role_in_policy(mock_ws, mock_installation, mock_aws, backend, locations): +def test_create_uber_principal_existing_role_in_policy(mock_ws, mock_installation, backend, locations): instance_profile_arn = "arn:aws:iam::12345:instance-profile/role1" cluster_policy = Policy( policy_id="foo", @@ -172,12 +170,13 @@ def test_create_uber_principal_existing_role_in_policy(mock_ws, mock_installatio ), ) mock_ws.cluster_policies.get.return_value = cluster_policy - mock_aws.get_instance_profile.return_value = instance_profile_arn + aws = create_autospec(AWSResources) + aws.get_instance_profile.return_value = instance_profile_arn locations = ExternalLocations(mock_ws, backend, "ucx") prompts = MockPrompts({"We have identified existing UCX migration role *": "yes"}) - aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, mock_aws, locations, "ucx") + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_uber_principal(prompts) - mock_aws.put_role_policy.assert_called_with( + aws.put_role_policy.assert_called_with( 'role1', 'UCX_MIGRATION_POLICY_ucx', {'s3://BUCKET1/FOLDER1', 's3://BUCKET2/FOLDER2', 's3://BUCKETX/FOLDERX'}, @@ -186,16 +185,17 @@ def test_create_uber_principal_existing_role_in_policy(mock_ws, mock_installatio ) -def test_create_uber_principal_existing_role(mock_ws, mock_installation, mock_aws, backend, locations): +def test_create_uber_principal_existing_role(mock_ws, mock_installation, backend, locations): cluster_policy = Policy( policy_id="foo", name="Unity Catalog Migration (ucx) (me@example.com)", definition=json.dumps({"foo": "bar"}) ) mock_ws.cluster_policies.get.return_value = cluster_policy instance_profile_arn = "arn:aws:iam::12345:instance-profile/role1" - mock_aws.get_instance_profile.return_value = instance_profile_arn + aws = create_autospec(AWSResources) + aws.get_instance_profile.return_value = instance_profile_arn locations = ExternalLocations(mock_ws, backend, "ucx") prompts = MockPrompts({"We have identified existing UCX migration role *": "yes"}) - aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, mock_aws, locations, "ucx") + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_uber_principal(prompts) definition = {"foo": "bar", "aws_attributes.instance_profile_arn": {"type": "fixed", "value": instance_profile_arn}} mock_ws.cluster_policies.edit.assert_called_with( @@ -203,19 +203,20 @@ def test_create_uber_principal_existing_role(mock_ws, mock_installation, mock_aw ) -def test_create_uber_principal_no_existing_role(mock_ws, mock_installation, mock_aws, backend, locations): +def test_create_uber_principal_no_existing_role(mock_ws, mock_installation, backend, locations): cluster_policy = Policy( policy_id="foo", name="Unity Catalog Migration (ucx) (me@example.com)", definition=json.dumps({"foo": "bar"}) ) mock_ws.cluster_policies.get.return_value = cluster_policy - mock_aws.role_exists.return_value = False + aws = create_autospec(AWSResources) + aws.role_exists.return_value = False instance_profile_arn = "arn:aws:iam::12345:instance-profile/role1" - mock_aws.create_migration_role.return_value = instance_profile_arn - mock_aws.create_instance_profile.return_value = instance_profile_arn - mock_aws.get_instance_profile.return_value = instance_profile_arn + aws.create_migration_role.return_value = instance_profile_arn + aws.create_instance_profile.return_value = instance_profile_arn + aws.get_instance_profile.return_value = instance_profile_arn locations = ExternalLocations(mock_ws, backend, "ucx") prompts = MockPrompts({"Do you want to create new migration role *": "yes"}) - aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, mock_aws, locations, "ucx") + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_uber_principal(prompts) definition = {"foo": "bar", "aws_attributes.instance_profile_arn": {"type": "fixed", "value": instance_profile_arn}} @@ -224,52 +225,45 @@ def test_create_uber_principal_no_existing_role(mock_ws, mock_installation, mock ) -def test_create_uber_principal_no_storage(mock_ws, mock_installation, mock_aws, locations): +def test_create_uber_principal_no_storage(mock_ws, mock_installation, locations): cluster_policy = Policy( policy_id="foo", name="Unity Catalog Migration (ucx) (me@example.com)", definition=json.dumps({"foo": "bar"}) ) mock_ws.cluster_policies.get.return_value = cluster_policy locations = ExternalLocations(mock_ws, MockBackend(), "ucx") prompts = MockPrompts({}) - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") assert not aws_resource_permissions.create_uber_principal(prompts) + aws.list_attached_policies_in_role.assert_not_called() + aws.get_role_policy.assert_not_called() -def test_create_uc_role_single(mock_ws, installation_single_role, mock_aws, backend, locations): - aws_resource_permissions = AWSResourcePermissions( - installation_single_role, mock_ws, backend, mock_aws, locations, "ucx" - ) +def test_create_uc_role_single(mock_ws, installation_single_role, backend, locations): + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(installation_single_role, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_uc_roles_cli() - assert mock_aws.create_uc_role.assert_called_with('UC_ROLE') is None + aws.create_uc_role.assert_called_with('UC_ROLE') assert ( - mock_aws.put_role_policy.assert_called_with( + aws.put_role_policy.assert_called_with( 'UC_ROLE', 'UC_POLICY', {'s3://BUCKET1/FOLDER1', 's3://BUCKET2/FOLDER2'}, None, None ) is None ) -def test_create_uc_role_multiple(mock_ws, installation_single_role, mock_aws, backend, locations): - aws_resource_permissions = AWSResourcePermissions( - installation_single_role, mock_ws, backend, mock_aws, locations, "ucx" - ) +def test_create_uc_role_multiple(mock_ws, installation_single_role, backend, locations): + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(installation_single_role, mock_ws, backend, aws, locations, "ucx") aws_resource_permissions.create_uc_roles_cli(single_role=False) - assert call('UC_ROLE-1') in mock_aws.create_uc_role.call_args_list - assert call('UC_ROLE-2') in mock_aws.create_uc_role.call_args_list - assert ( - call('UC_ROLE-1', 'UC_POLICY-1', {'s3://BUCKET1/FOLDER1'}, None, None) - in mock_aws.put_role_policy.call_args_list - ) - assert ( - call('UC_ROLE-2', 'UC_POLICY-2', {'s3://BUCKET2/FOLDER2'}, None, None) - in mock_aws.put_role_policy.call_args_list - ) + aws.create_uc_role.assert_has_calls([call('UC_ROLE-1'), call('UC_ROLE-2')], any_order=True) + assert call('UC_ROLE-1', 'UC_POLICY-1', {'s3://BUCKET1/FOLDER1'}, None, None) in aws.put_role_policy.call_args_list + assert call('UC_ROLE-2', 'UC_POLICY-2', {'s3://BUCKET2/FOLDER2'}, None, None) in aws.put_role_policy.call_args_list -def test_get_uc_compatible_roles(mock_ws, mock_installation, mock_aws, locations): - mock_aws.get_role_policy.side_effect = [ +def test_get_uc_compatible_roles(mock_ws, mock_installation, locations): + aws = create_autospec(AWSResources) + aws.get_role_policy.side_effect = [ [ AWSPolicyAction( resource_type="s3", @@ -309,18 +303,16 @@ def test_get_uc_compatible_roles(mock_ws, mock_installation, mock_aws, locations [], [], ] - mock_aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] - mock_aws.list_attached_policies_in_role.return_value = [ + aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] + aws.list_attached_policies_in_role.return_value = [ "arn:aws:iam::aws:policy/Policy1", "arn:aws:iam::aws:policy/Policy2", ] - mock_aws.list_all_uc_roles.return_value = [ + aws.list_all_uc_roles.return_value = [ AWSRole(path='/', role_name='uc-role1', role_id='12345', arn='arn:aws:iam::12345:role/uc-role1') ] - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") # TODO: this is bad practice, we should not be mocking load() methon on a MockInstallation class mock_installation.load = MagicMock( side_effect=[ @@ -372,24 +364,27 @@ def test_get_uc_compatible_roles(mock_ws, mock_installation, mock_aws, locations ) -def test_instance_profiles_empty_mapping(mock_ws, mock_installation, mock_aws, locations, caplog): - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) +def test_instance_profiles_empty_mapping(mock_ws, mock_installation, locations, caplog): + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") aws_resource_permissions.save_instance_profile_permissions() assert 'No mapping was generated.' in caplog.messages + aws.list_role_policies.assert_called_once() + aws.list_role_policies.assert_called_once() + aws.list_attached_policies_in_role.assert_called_once_with('role1') -def test_uc_roles_empty_mapping(mock_ws, mock_installation, mock_aws, locations, caplog): - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) +def test_uc_roles_empty_mapping(mock_ws, mock_installation, locations, caplog): + aws = create_autospec(AWSResources) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") aws_resource_permissions.save_uc_compatible_roles() assert 'No mapping was generated.' in caplog.messages + aws.list_all_uc_roles.assert_called_once() -def test_save_instance_profile_permissions(mock_ws, mock_installation, mock_aws, locations): - mock_aws.get_role_policy.side_effect = [ +def test_save_instance_profile_permissions(mock_ws, mock_installation, locations): + aws = create_autospec(AWSResources) + aws.get_role_policy.side_effect = [ [ AWSPolicyAction( resource_type="s3", @@ -429,15 +424,13 @@ def test_save_instance_profile_permissions(mock_ws, mock_installation, mock_aws, [], [], ] - mock_aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] - mock_aws.list_attached_policies_in_role.return_value = [ + aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] + aws.list_attached_policies_in_role.return_value = [ "arn:aws:iam::aws:policy/Policy1", "arn:aws:iam::aws:policy/Policy2", ] - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") aws_resource_permissions.save_instance_profile_permissions() mock_installation.assert_file_written( @@ -483,8 +476,9 @@ def test_save_instance_profile_permissions(mock_ws, mock_installation, mock_aws, ) -def test_save_uc_compatible_roles(mock_ws, mock_installation, mock_aws, locations): - mock_aws.get_role_policy.side_effect = [ +def test_save_uc_compatible_roles(mock_ws, mock_installation, locations): + aws = create_autospec(AWSResources) + aws.get_role_policy.side_effect = [ [ AWSPolicyAction( resource_type="s3", @@ -524,18 +518,16 @@ def test_save_uc_compatible_roles(mock_ws, mock_installation, mock_aws, location [], [], ] - mock_aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] - mock_aws.list_attached_policies_in_role.return_value = [ + aws.list_role_policies.return_value = ["Policy1", "Policy2", "Policy3"] + aws.list_attached_policies_in_role.return_value = [ "arn:aws:iam::aws:policy/Policy1", "arn:aws:iam::aws:policy/Policy2", ] - mock_aws.list_all_uc_roles.return_value = [ + aws.list_all_uc_roles.return_value = [ AWSRole(path='/', role_name='uc-role1', role_id='12345', arn='arn:aws:iam::12345:role/uc-role1') ] - aws_resource_permissions = AWSResourcePermissions( - mock_installation, mock_ws, MockBackend(), mock_aws, locations, "ucx" - ) + aws_resource_permissions = AWSResourcePermissions(mock_installation, mock_ws, MockBackend(), aws, locations, "ucx") aws_resource_permissions.save_uc_compatible_roles() mock_installation.assert_file_written( 'uc_roles_access.csv', diff --git a/tests/unit/aws/test_credentials.py b/tests/unit/aws/test_credentials.py index 391e7a7f27..b7befd9e10 100644 --- a/tests/unit/aws/test_credentials.py +++ b/tests/unit/aws/test_credentials.py @@ -26,11 +26,6 @@ def installation(): return MockInstallation(DEFAULT_CONFIG) -@pytest.fixture -def ws(): - return create_autospec(WorkspaceClient) - - def side_effect_create_aws_storage_credential(name, aws_iam_role, comment, read_only): return StorageCredentialInfo( name=name, aws_iam_role=AwsIamRoleResponse(role_arn=aws_iam_role.role_arn), comment=comment, read_only=read_only @@ -38,7 +33,8 @@ def side_effect_create_aws_storage_credential(name, aws_iam_role, comment, read_ @pytest.fixture -def credential_manager(ws): +def credential_manager(): + ws = create_autospec(WorkspaceClient) ws.storage_credentials.list.return_value = [ StorageCredentialInfo( aws_iam_role=AwsIamRoleResponse(role_arn="arn:aws:iam::123456789012:role/example-role-name") @@ -85,7 +81,7 @@ def test_create_storage_credentials(credential_manager): @pytest.fixture -def instance_profile_migration(ws, installation, credential_manager): +def instance_profile_migration(installation, credential_manager): def generate_instance_profiles(num_instance_profiles: int): arp = create_autospec(AWSResourcePermissions) arp.load_uc_compatible_roles.return_value = [ @@ -97,13 +93,12 @@ def generate_instance_profiles(num_instance_profiles: int): ) for i in range(num_instance_profiles) ] - - return IamRoleMigration(installation, ws, arp, credential_manager) + return IamRoleMigration(installation, arp, credential_manager) return generate_instance_profiles -def test_print_action_plan(caplog, ws, instance_profile_migration, credential_manager): +def test_print_action_plan(caplog, instance_profile_migration, credential_manager): caplog.set_level(logging.INFO) prompts = MockPrompts({"Above IAM roles will be migrated to UC storage credentials*": "Yes"}) @@ -118,20 +113,21 @@ def test_print_action_plan(caplog, ws, instance_profile_migration, credential_ma assert False, "Action plan is not logged" -def test_migrate_credential_failed_creation(caplog, ws, instance_profile_migration): +def test_migrate_credential_failed_creation(caplog, instance_profile_migration): caplog.set_level(logging.ERROR) prompts = MockPrompts( { "Above IAM roles will be migrated to UC storage credentials*": "Yes", } ) + ws = create_autospec(WorkspaceClient) ws.storage_credentials.create.return_value = StorageCredentialInfo(aws_iam_role=None) ws.storage_credentials.create.side_effect = None instance_profile_migration(1).run(prompts) assert "Failed to create storage credential for IAM role: arn:aws:iam::123456789012:role/prefix0" in caplog.messages -def test_run_without_confirmation(ws, instance_profile_migration): +def test_run_without_confirmation(instance_profile_migration): prompts = MockPrompts( { "Above IAM roles will be migrated to UC storage credentials*": "No", @@ -142,18 +138,18 @@ def test_run_without_confirmation(ws, instance_profile_migration): @pytest.mark.parametrize("num_instance_profiles", [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) -def test_run(ws, instance_profile_migration, num_instance_profiles: int): +def test_run(instance_profile_migration, num_instance_profiles: int): prompts = MockPrompts({"Above IAM roles will be migrated to UC storage credentials*": "Yes"}) migration = instance_profile_migration(num_instance_profiles) results = migration.run(prompts) assert len(results) == num_instance_profiles -def test_run_no_credential_to_migrate(caplog, ws, installation, credential_manager): +def test_run_no_credential_to_migrate(caplog, installation, credential_manager): caplog.set_level(logging.INFO) arp = create_autospec(AWSResourcePermissions) arp.load_uc_compatible_roles.return_value = [] - migration = IamRoleMigration(installation, ws, arp, credential_manager) + migration = IamRoleMigration(installation, arp, credential_manager) migration.run(MockPrompts({})) assert "No IAM Role to migrate" in caplog.messages diff --git a/tests/unit/azure/test_access.py b/tests/unit/azure/test_access.py index f13191f74b..4b65e9e6a5 100644 --- a/tests/unit/azure/test_access.py +++ b/tests/unit/azure/test_access.py @@ -32,11 +32,17 @@ def test_save_spn_permissions_no_external_table(caplog): location = ExternalLocations(w, backend, "ucx") installation = MockInstallation() azure_resources = create_autospec(AzureResources) - azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resources.storage_accounts.return_value = [] + azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resource_permission.save_spn_permissions() msg = "There are no external table present with azure storage account. Please check if assessment job is run" assert [rec.message for rec in caplog.records if msg in rec.message] + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_not_called() def test_save_spn_permissions_no_external_tables(): @@ -49,6 +55,12 @@ def test_save_spn_permissions_no_external_tables(): azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resources.storage_accounts.return_value = [] assert not azure_resource_permission.save_spn_permissions() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_not_called() def test_save_spn_permissions_no_azure_storage_account(): @@ -63,6 +75,12 @@ def test_save_spn_permissions_no_azure_storage_account(): azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resources.storage_accounts.return_value = [] assert not azure_resource_permission.save_spn_permissions() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_not_called() def test_save_spn_permissions_valid_azure_storage_account(): @@ -111,6 +129,12 @@ def test_save_spn_permissions_valid_azure_storage_account(): ] azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resource_permission.save_spn_permissions() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_not_called() installation.assert_file_written( 'azure_storage_account_info.csv', [ @@ -153,6 +177,16 @@ def test_create_global_spn_no_policy(): prompts = MockPrompts({"Enter a name for the uber service principal to be created*": "UCXServicePrincipal"}) with pytest.raises(ValueError): azure_resource_permission.create_uber_principal(prompts) + azure_resources.storage_accounts.assert_not_called() + azure_resources.create_or_update_access_connector.assert_not_called() + azure_resources.role_assignments.assert_not_called() + azure_resources.containers.assert_not_called() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_called_once() def test_create_global_spn_spn_present(): @@ -175,6 +209,16 @@ def test_create_global_spn_spn_present(): prompts = MockPrompts({"Enter a name for the uber service principal to be created*": "UCXServicePrincipal"}) azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) assert not azure_resource_permission.create_uber_principal(prompts) + azure_resources.storage_accounts.assert_not_called() + azure_resources.create_or_update_access_connector.assert_not_called() + azure_resources.role_assignments.assert_not_called() + azure_resources.containers.assert_not_called() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_called_once() def test_create_global_spn_no_storage(): @@ -198,6 +242,16 @@ def test_create_global_spn_no_storage(): azure_resources = create_autospec(AzureResources) azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) assert not azure_resource_permission.create_uber_principal(prompts) + azure_resources.storage_accounts.assert_not_called() + azure_resources.create_or_update_access_connector.assert_not_called() + azure_resources.role_assignments.assert_not_called() + azure_resources.containers.assert_not_called() + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + w.secrets.put_secret.assert_not_called() + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_called_once() def test_create_global_spn_cluster_policy_not_found(): @@ -224,6 +278,12 @@ def test_create_global_spn_cluster_policy_not_found(): azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) with pytest.raises(NotFound): azure_resource_permission.create_uber_principal(prompts) + w.cluster_policies.get.assert_called_once() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_called_with("ucx") + w.secrets.put_secret.assert_called_with('ucx', 'uber_principal_secret', string_value='mypwd') + w.cluster_policies.edit.assert_not_called() + w.get_workspace_id.assert_called_once() def test_create_global_spn(): @@ -302,6 +362,10 @@ def test_create_access_connectors_for_storage_accounts_logs_no_storage_accounts( azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) azure_resource_permission.create_access_connectors_for_storage_accounts() + + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() assert ( "There are no external table present with azure storage account. Please check if assessment job is run" in caplog.messages @@ -345,6 +409,11 @@ def test_create_access_connectors_for_storage_accounts_one_access_connector(): azure_resource_permission = AzureResourcePermissions(installation, w, azure_resources, location) access_connectors = azure_resource_permission.create_access_connectors_for_storage_accounts() + + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() + assert len(access_connectors) == 1 assert access_connectors[0].name == "ac-test" @@ -388,3 +457,7 @@ def test_create_access_connectors_for_storage_accounts_log_permission_applied(ca with caplog.at_level(logging.DEBUG, logger="databricks.labs.ucx"): azure_resource_permission.create_access_connectors_for_storage_accounts() assert any("STORAGE_BLOB_DATA_CONTRIBUTOR" in message for message in caplog.messages) + + w.cluster_policies.get.assert_not_called() + w.secrets.get_secret.assert_not_called() + w.secrets.create_scope.assert_not_called() diff --git a/tests/unit/azure/test_credentials.py b/tests/unit/azure/test_credentials.py index aa68c033e0..5dd11bbedb 100644 --- a/tests/unit/azure/test_credentials.py +++ b/tests/unit/azure/test_credentials.py @@ -39,11 +39,6 @@ from tests.unit import DEFAULT_CONFIG -@pytest.fixture -def ws(): - return create_autospec(WorkspaceClient) - - @pytest.fixture def installation(): return MockInstallation( @@ -128,7 +123,8 @@ def side_effect_validate_storage_credential(storage_credential_name, url, read_o @pytest.fixture -def credential_manager(ws): +def credential_manager(): + ws = create_autospec(WorkspaceClient) ws.storage_credentials.list.return_value = [ StorageCredentialInfo(aws_iam_role=AwsIamRoleResponse("arn:aws:iam::123456789012:role/example-role-name")), StorageCredentialInfo( @@ -249,16 +245,16 @@ def test_validate_storage_credentials_failed_operation(credential_manager): @pytest.fixture -def sp_migration(ws, installation, credential_manager): +def sp_migration(installation, credential_manager): + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.return_value = GetSecretResponse( value=base64.b64encode("hello world".encode("utf-8")).decode("utf-8") ) - - arp = AzureResourcePermissions( - installation, ws, create_autospec(AzureResources), create_autospec(ExternalLocations) - ) - + # pylint: disable=mock-no-usage + azure_resources = create_autospec(AzureResources) + external_locations = create_autospec(ExternalLocations) sp_crawler = create_autospec(AzureServicePrincipalCrawler) + arp = AzureResourcePermissions(installation, ws, azure_resources, external_locations) sp_crawler.snapshot.return_value = [ AzureServicePrincipalInfo("app_secret1", "test_scope", "test_key", "tenant_id_1", "storage1"), AzureServicePrincipalInfo("app_secret2", "test_scope", "test_key", "tenant_id_1", "storage1"), @@ -276,7 +272,8 @@ def sp_migration(ws, installation, credential_manager): (GetSecretResponse(value=base64.b64encode("Olá, Mundo".encode("iso-8859-1")).decode("iso-8859-1")), 0), ], ) -def test_read_secret_value_decode(ws, sp_migration, secret_bytes_value, num_migrated): +def test_read_secret_value_decode(sp_migration, secret_bytes_value, num_migrated): + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.return_value = secret_bytes_value prompts = MockPrompts( @@ -288,15 +285,17 @@ def test_read_secret_value_decode(ws, sp_migration, secret_bytes_value, num_migr assert len(sp_migration.run(prompts)) == num_migrated -def test_read_secret_value_none(ws, sp_migration): +def test_read_secret_value_none(sp_migration): + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.return_value = GetSecretResponse(value=None) prompts = MockPrompts({"Above Azure Service Principals will be migrated to UC storage credentials*": "Yes"}) with pytest.raises(AssertionError): sp_migration.run(prompts) -def test_read_secret_read_exception(caplog, ws, sp_migration): +def test_read_secret_read_exception(caplog, sp_migration): caplog.set_level(logging.INFO) + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.side_effect = ResourceDoesNotExist() prompts = MockPrompts( @@ -310,8 +309,9 @@ def test_read_secret_read_exception(caplog, ws, sp_migration): assert re.search(r"removed on the backend: .*", caplog.text) -def test_print_action_plan(caplog, ws, sp_migration): +def test_print_action_plan(caplog, sp_migration): caplog.set_level(logging.INFO) + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.return_value = GetSecretResponse( value=base64.b64encode("hello world".encode("utf-8")).decode("utf-8") ) @@ -333,7 +333,8 @@ def test_print_action_plan(caplog, ws, sp_migration): assert False, "Action plan is not logged" -def test_run_without_confirmation(ws, sp_migration): +def test_run_without_confirmation(sp_migration): + ws = create_autospec(WorkspaceClient) ws.secrets.get_secret.return_value = GetSecretResponse( value=base64.b64encode("hello world".encode("utf-8")).decode("utf-8") ) @@ -347,7 +348,7 @@ def test_run_without_confirmation(ws, sp_migration): assert sp_migration.run(prompts) == [] -def test_run(ws, installation, sp_migration): +def test_run(installation, sp_migration): prompts = MockPrompts( { "Above Azure Service Principals will be migrated to UC storage credentials*": "Yes", diff --git a/tests/unit/azure/test_locations.py b/tests/unit/azure/test_locations.py index f68f1e2fdc..932b600004 100644 --- a/tests/unit/azure/test_locations.py +++ b/tests/unit/azure/test_locations.py @@ -20,22 +20,18 @@ from tests.unit.azure import azure_api_client -@pytest.fixture -def ws(): - return create_autospec(WorkspaceClient) - - -def location_migration_for_test(ws, mock_backend, mock_installation): +def location_migration_for_test(mock_backend, mock_installation): + ws = create_autospec(WorkspaceClient) # pylint: disable=mock-no-usage azurerm = AzureResources(azure_api_client(), azure_api_client()) location_crawler = ExternalLocations(ws, mock_backend, "location_test") - - return ExternalLocationsMigration( - ws, location_crawler, AzureResourcePermissions(mock_installation, ws, azurerm, location_crawler), azurerm - ) + azure_resource_permissions = AzureResourcePermissions(mock_installation, ws, azurerm, location_crawler) + external_locations_migration = ExternalLocationsMigration(ws, location_crawler, azure_resource_permissions, azurerm) + return external_locations_migration -def test_run_service_principal(ws): +def test_run_service_principal(): """test run with service principal based storage credentials""" + ws = create_autospec(WorkspaceClient) # mock crawled HMS external locations mock_backend = MockBackend( @@ -91,7 +87,7 @@ def test_run_service_principal(ws): } ) - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_any_call( @@ -112,8 +108,9 @@ def test_run_service_principal(ws): ) -def test_skip_unsupported_location(ws, caplog): +def test_skip_unsupported_location(caplog): # mock crawled HMS external locations with two unsupported locations adl and wasbs + ws = create_autospec(WorkspaceClient) mock_backend = MockBackend( rows={ r"SELECT \* FROM location_test.external_locations": MockBackend.rows("location", "table_count")[ @@ -171,7 +168,7 @@ def test_skip_unsupported_location(ws, caplog): } ) - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_called_once_with( @@ -186,8 +183,9 @@ def test_skip_unsupported_location(ws, caplog): assert "Skip unsupported location: wasbs://container2@test.dfs.core.windows.net" in caplog.text -def test_run_managed_identity(ws, mocker): +def test_run_managed_identity(): """test run with managed identity based storage credentials""" + ws = create_autospec(WorkspaceClient) # mock crawled HMS external locations mock_backend = MockBackend( @@ -242,7 +240,7 @@ def test_run_managed_identity(ws, mocker): } ) - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_any_call( @@ -276,8 +274,9 @@ def create_side_effect(location_name, *args, **kwargs): # pylint: disable=unuse raise InvalidParameterValue("Other InvalidParameterValue exception") -def test_location_failed_to_read(ws): +def test_location_failed_to_read(): """If read-only location is empty, READ permission check will fail with PermissionDenied""" + ws = create_autospec(WorkspaceClient) # mock crawled HMS external locations mock_backend = MockBackend( @@ -326,7 +325,7 @@ def test_location_failed_to_read(ws): # make external_locations.create to raise PermissionDenied when first called to create read-only external location. ws.external_locations.create.side_effect = create_side_effect - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) # assert PermissionDenied got re-threw if the exception with pytest.raises(PermissionDenied): @@ -343,8 +342,9 @@ def test_location_failed_to_read(ws): ) -def test_overlapping_locations(ws, caplog): +def test_overlapping_locations(caplog): caplog.set_level(logging.INFO) + ws = create_autospec(WorkspaceClient) # mock crawled HMS external locations mock_backend = MockBackend( @@ -397,7 +397,7 @@ def test_overlapping_locations(ws, caplog): ws.external_locations.create.side_effect = create_side_effect - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) # assert InvalidParameterValue got re-threw if it's not caused by overlapping location with pytest.raises(InvalidParameterValue): @@ -457,7 +457,7 @@ def test_corner_cases_with_missing_fields(ws, caplog, mocker): } ) - location_migration = location_migration_for_test(ws, mock_backend, mock_installation) + location_migration = location_migration_for_test(mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_not_called() diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index cb9d36d8a9..7487c4fae3 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -77,6 +77,6 @@ def inner(cb, **replace) -> RuntimeContext: @pytest.fixture def acc_client(): - acc = create_autospec(AccountClient) + acc = create_autospec(AccountClient) # pylint: disable=mock-no-usage acc.config = Config(host="https://accounts.cloud.databricks.com", account_id="123", token="123") return acc diff --git a/tests/unit/framework/test_tasks.py b/tests/unit/framework/test_tasks.py index 7f76e3d633..5314e4d7d7 100644 --- a/tests/unit/framework/test_tasks.py +++ b/tests/unit/framework/test_tasks.py @@ -25,7 +25,7 @@ def test_replace_pydoc(): def test_task_cloud(): - ws = create_autospec(WorkspaceClient) + ws = create_autospec(WorkspaceClient) # pylint: disable=mock-no-usage ws.config.is_aws = True ws.config.is_azure = False ws.config.is_gcp = False diff --git a/tests/unit/hive_metastore/test_catalog_schema.py b/tests/unit/hive_metastore/test_catalog_schema.py index 82fe933ef8..ae941d2058 100644 --- a/tests/unit/hive_metastore/test_catalog_schema.py +++ b/tests/unit/hive_metastore/test_catalog_schema.py @@ -92,6 +92,9 @@ def test_create_bad_location(): catalog_schema = prepare_test(ws) with pytest.raises(NotFound): catalog_schema.create_all_catalogs_schemas(mock_prompts) + ws.catalogs.create.assert_not_called() + ws.catalogs.list.assert_called_once() + ws.schemas.create.assert_not_called() def test_no_catalog_storage(): @@ -111,6 +114,8 @@ def test_catalog_schema_acl(): catalog_schema.create_all_catalogs_schemas( mock_prompts, ) + ws.schemas.create.assert_any_call("schema2", "catalog2", comment="Created by UCX") + ws.catalogs.create.assert_called_once_with("catalog2", comment="Created by UCX") queries = [ 'GRANT USE SCHEMA ON DATABASE catalog1.schema3 TO `user1`', 'GRANT USE SCHEMA ON DATABASE catalog2.schema2 TO `user1`', diff --git a/tests/unit/hive_metastore/test_mapping.py b/tests/unit/hive_metastore/test_mapping.py index 19c0bf4774..67d926ac5a 100644 --- a/tests/unit/hive_metastore/test_mapping.py +++ b/tests/unit/hive_metastore/test_mapping.py @@ -56,6 +56,8 @@ def test_current_tables_empty_fails(): with pytest.raises(ValueError): list(table_mapping.current_tables(tables_crawler, "a", "b")) + ws.tables.get.assert_not_called() + def test_current_tables_some_rules(): ws = create_autospec(WorkspaceClient) @@ -84,6 +86,8 @@ def test_current_tables_some_rules(): assert rule.as_uc_table_key == "b.foo.bar" assert rule.as_hms_table_key == "hive_metastore.foo.bar" + ws.tables.get.assert_not_called() + def test_save_mapping(): ws = create_autospec(WorkspaceClient) @@ -109,6 +113,8 @@ def test_save_mapping(): table_mapping.save(tables_crawler, workspace_info) + ws.tables.get.assert_not_called() + installation.assert_file_written( 'mapping.csv', [ @@ -136,6 +142,8 @@ def test_load_mapping_not_found(): with pytest.raises(ValueError): table_mapping.load() + ws.tables.get.assert_not_called() + def test_load_mapping(): ws = create_autospec(WorkspaceClient) @@ -160,6 +168,8 @@ def test_load_mapping(): rules = table_mapping.load() + ws.tables.get.assert_not_called() + assert [ Rule( workspace_name="foo-bar", @@ -178,6 +188,7 @@ def test_skip_happy_path(caplog): installation = MockInstallation() mapping = TableMapping(installation, ws, sbe) mapping.skip_table(schema="schema", table="table") + ws.tables.get.assert_not_called() sbe.execute.assert_called_with(f"ALTER TABLE schema.table SET TBLPROPERTIES('{mapping.UCX_SKIP_PROPERTY}' = true)") assert len(caplog.records) == 0 mapping.skip_schema(schema="schema") @@ -192,6 +203,7 @@ def test_skip_missing_schema(caplog): sbe.execute.side_effect = NotFound("[SCHEMA_NOT_FOUND]") mapping = TableMapping(installation, ws, sbe) mapping.skip_schema(schema="schema") + ws.tables.get.assert_not_called() assert [rec.message for rec in caplog.records if "schema not found" in rec.message.lower()] @@ -202,6 +214,7 @@ def test_skip_missing_table(caplog): sbe.execute.side_effect = NotFound("[TABLE_OR_VIEW_NOT_FOUND]") mapping = TableMapping(installation, ws, sbe) mapping.skip_table('foo', table="table") + ws.tables.get.assert_not_called() assert [rec.message for rec in caplog.records if "table not found" in rec.message.lower()] @@ -590,6 +603,8 @@ def test_database_not_exists_when_checking_inscope(caplog): ) table_mapping = TableMapping(installation, client, backend) table_mapping.get_tables_to_migrate(tables_crawler) + client.tables.get.assert_not_called() + tables_crawler.snapshot.assert_called_once() assert ( "Schema hive_metastore.deleted_schema no longer exists. Skipping its properties check and migration." in caplog.text diff --git a/tests/unit/hive_metastore/test_table_migrate.py b/tests/unit/hive_metastore/test_table_migrate.py index dc37eb0053..87a4f7dcaa 100644 --- a/tests/unit/hive_metastore/test_table_migrate.py +++ b/tests/unit/hive_metastore/test_table_migrate.py @@ -70,6 +70,8 @@ def test_migrate_dbfs_root_tables_should_produce_proper_queries(ws): table_migrate.migrate_tables(what=What.DBFS_ROOT_DELTA) table_migrate.migrate_tables(what=What.EXTERNAL_SYNC) + principal_grants.get_interactive_cluster_grants.assert_not_called() + assert ( "CREATE TABLE IF NOT EXISTS ucx_default.db1_dst.managed_dbfs DEEP CLONE hive_metastore.db1_src.managed_dbfs;" in backend.queries @@ -118,6 +120,8 @@ def test_dbfs_non_delta_tables_should_produce_proper_queries(ws): ) table_migrate.migrate_tables(what=What.DBFS_ROOT_NON_DELTA) + principal_grants.get_interactive_cluster_grants.assert_not_called() + assert ( "CREATE TABLE IF NOT EXISTS ucx_default.db1_dst.managed_dbfs " "AS SELECT * FROM hive_metastore.db1_src.managed_dbfs" in backend.queries @@ -157,6 +161,8 @@ def test_migrate_dbfs_root_tables_should_be_skipped_when_upgrading_external(ws): ) table_migrate.migrate_tables(what=What.EXTERNAL_SYNC) + principal_grants.get_interactive_cluster_grants.assert_not_called() + assert len(backend.queries) == 0 @@ -184,6 +190,8 @@ def test_migrate_external_tables_should_produce_proper_queries(ws): ) table_migrate.migrate_tables(what=What.EXTERNAL_SYNC) + principal_grants.get_interactive_cluster_grants.assert_not_called() + assert backend.queries == [ "SYNC TABLE ucx_default.db1_dst.external_dst FROM hive_metastore.db1_src.external_src;", ( @@ -218,6 +226,7 @@ def test_migrate_external_table_failed_sync(ws, caplog): ) table_migrate.migrate_tables(what=What.EXTERNAL_SYNC) assert "SYNC command failed to migrate" in caplog.text + principal_grants.get_interactive_cluster_grants.assert_not_called() @pytest.mark.parametrize( @@ -304,6 +313,9 @@ def test_migrate_external_hiveserde_table_in_place( mounts_crawler=mount_crawler, hiveserde_in_place_migrate=hiveserde_in_place_migrate, ) + + principal_grants.get_interactive_cluster_grants.assert_not_called() + if migrated: assert expected_value in backend.queries else: @@ -354,6 +366,7 @@ def test_migrate_external_tables_ctas_should_produce_proper_queries(ws, what, te table_migrate.migrate_tables(what=what, mounts_crawler=mounts_crawler) assert expected_query in backend.queries + principal_grants.get_interactive_cluster_grants.assert_not_called() def test_migrate_already_upgraded_table_should_produce_no_queries(ws): @@ -401,6 +414,7 @@ def test_migrate_already_upgraded_table_should_produce_no_queries(ws): table_migrate.migrate_tables(what=What.EXTERNAL_SYNC) assert len(backend.queries) == 0 + principal_grants.get_interactive_cluster_grants.assert_not_called() def test_migrate_unsupported_format_table_should_produce_no_queries(ws): @@ -428,6 +442,7 @@ def test_migrate_unsupported_format_table_should_produce_no_queries(ws): table_migrate.migrate_tables(what=What.UNKNOWN) assert len(backend.queries) == 0 + principal_grants.get_interactive_cluster_grants.assert_not_called() def test_migrate_view_should_produce_proper_queries(ws): @@ -472,6 +487,7 @@ def test_migrate_view_should_produce_proper_queries(ws): assert src in backend.queries dst = f"ALTER VIEW ucx_default.db1_dst.view_dst SET TBLPROPERTIES ('upgraded_from' = 'hive_metastore.db1_src.view_src' , '{Table.UPGRADED_FROM_WS_PARAM}' = '12345');" assert dst in backend.queries + principal_grants.get_interactive_cluster_grants.assert_not_called() def test_migrate_view_with_columns(ws): @@ -510,11 +526,12 @@ def test_migrate_view_with_columns(ws): create = "CREATE OR REPLACE VIEW IF NOT EXISTS ucx_default.db1_dst.view_dst (a, b) AS SELECT * FROM ucx_default.db1_dst.new_managed_dbfs" assert create in backend.queries + principal_grants.get_interactive_cluster_grants.assert_not_called() def get_table_migrator(backend: SqlBackend) -> TablesMigrator: table_crawler = create_autospec(TablesCrawler) - grant_crawler = create_autospec(GrantsCrawler) + grant_crawler = create_autospec(GrantsCrawler) # pylint: disable=mock-no-usage client = workspace_client_mock() client.catalogs.list.return_value = [CatalogInfo(name="cat1")] client.schemas.list.return_value = [ @@ -601,7 +618,7 @@ def get_table_migrator(backend: SqlBackend) -> TablesMigrator: group_manager = GroupManager(backend, client, "inventory_database") table_mapping = table_mapping_mock() migration_status_refresher = MigrationStatusRefresher(client, backend, "inventory_database", table_crawler) - principal_grants = create_autospec(PrincipalACL) + principal_grants = create_autospec(PrincipalACL) # pylint: disable=mock-no-usage table_migrate = TablesMigrator( table_crawler, grant_crawler, @@ -686,6 +703,10 @@ def test_no_migrated_tables(ws): table_migrate.migrate_tables(what=What.DBFS_ROOT_DELTA) table_migrate.revert_migrated_tables("test_schema1", "test_table1") ws.catalogs.list.assert_called() + principal_grants.get_interactive_cluster_grants.assert_not_called() + table_crawler.snapshot.assert_called() + table_mapping.get_tables_to_migrate.assert_called() + grant_crawler.snapshot.assert_not_called() def test_revert_report(ws, capsys): @@ -727,6 +748,10 @@ def test_empty_revert_report(ws): ) table_migrate.migrate_tables(what=What.DBFS_ROOT_DELTA) assert not table_migrate.print_revert_report(delete_managed=False) + principal_grants.get_interactive_cluster_grants.assert_not_called() + table_crawler.snapshot.assert_called() + table_mapping.get_tables_to_migrate.assert_called() + grant_crawler.snapshot.assert_not_called() def test_is_upgraded(ws): @@ -757,6 +782,10 @@ def test_is_upgraded(ws): table_migrate.migrate_tables(what=What.DBFS_ROOT_DELTA) assert table_migrate.is_migrated("schema1", "table1") assert not table_migrate.is_migrated("schema1", "table2") + table_crawler.snapshot.assert_not_called() + table_mapping.get_tables_to_migrate.assert_called() + grant_crawler.snapshot.assert_not_called() + principal_grants.get_interactive_cluster_grants.assert_not_called() def test_table_status(): @@ -861,6 +890,8 @@ def test_table_status_reset(): assert backend.queries == [ "DELETE FROM hive_metastore.ucx.migration_status", ] + table_crawler.snapshot.assert_not_called() + client.catalogs.list.assert_not_called() def test_table_status_seen_tables(caplog): @@ -921,6 +952,7 @@ def test_table_status_seen_tables(caplog): } assert "Catalog deleted_cat no longer exists. Skipping checking its migration status." in caplog.text assert "Schema cat1.deleted_schema no longer exists. Skipping checking its migration status." in caplog.text + table_crawler.snapshot.assert_not_called() GRANTS = MockBackend.rows("principal", "action_type", "catalog", "database", "table", "view") @@ -985,6 +1017,8 @@ def test_migrate_acls_should_produce_proper_queries(ws, caplog): table_migrate.migrate_tables(what=What.VIEW, acl_strategy=[AclMigrationWhat.LEGACY_TACL]) + principal_grants.get_interactive_cluster_grants.assert_called() + assert "GRANT SELECT ON TABLE ucx_default.db1_dst.managed_dbfs TO `account group`" in backend.queries assert "GRANT MODIFY ON TABLE ucx_default.db1_dst.managed_dbfs TO `account group`" not in backend.queries assert "ALTER TABLE ucx_default.db1_dst.managed_dbfs OWNER TO `account group`" not in backend.queries @@ -1097,6 +1131,9 @@ def test_migrate_views_should_be_properly_sequenced(ws): migration_status_refresher, principal_grants, ) + principal_grants.get_interactive_cluster_grants.assert_not_called() + table_crawler.snapshot.assert_not_called() + grant_crawler.snapshot.assert_not_called() tasks = table_migrate.migrate_tables(what=What.VIEW) table_keys = [task.args[0].src.key for task in tasks] assert table_keys.index("hive_metastore.db1_src.v1_src") > table_keys.index("hive_metastore.db1_src.v3_src") diff --git a/tests/unit/hive_metastore/test_views_sequencer.py b/tests/unit/hive_metastore/test_views_sequencer.py index 3fc382f0db..0e1603200d 100644 --- a/tests/unit/hive_metastore/test_views_sequencer.py +++ b/tests/unit/hive_metastore/test_views_sequencer.py @@ -2,7 +2,6 @@ from itertools import chain from pathlib import Path from typing import TypeVar -from unittest.mock import create_autospec import pytest from databricks.labs.lsql.backends import MockBackend, SqlBackend @@ -21,11 +20,14 @@ def flatten(lists: list[list[T]]) -> list[T]: return list(chain.from_iterable(lists)) +_RULE = Rule("ws1", "cat1", "schema", "db1", "table1", "table2") + + def test_migrate_no_view_returns_empty_sequence(): samples = Samples.load("db1.t1", "db2.t1") sql_backend = mock_backend(samples, "db1", "db2") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1", "db2"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex( [MigrationStatus("db1", "t1", "cat1", "db2", "t1"), MigrationStatus("db2", "t2", "cat1", "db2", "t1")], ) @@ -39,7 +41,7 @@ def test_migrate_direct_view_returns_singleton_sequence() -> None: samples = Samples.load("db1.t1", "db1.v1") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([MigrationStatus("db1", "t1", "cat1", "db1", "t1")]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -53,7 +55,7 @@ def test_migrate_direct_views_returns_sequence() -> None: samples = Samples.load("db1.t1", "db1.v1", "db1.t2", "db1.v2") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex( [MigrationStatus("db1", "t1", "cat1", "db1", "t1"), MigrationStatus("db1", "t2", "cat1", "db1", "t2")], ) @@ -70,7 +72,7 @@ def test_migrate_indirect_views_returns_correct_sequence() -> None: samples = Samples.load("db1.t1", "db1.v1", "db1.v4") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([MigrationStatus("db1", "t1", "cat1", "db1", "t1")]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -85,7 +87,7 @@ def test_migrate_deep_indirect_views_returns_correct_sequence() -> None: samples = Samples.load("db1.t1", "db1.v1", "db1.v4", "db1.v5", "db1.v6", "db1.v7") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([MigrationStatus("db1", "t1", "cat1", "db1", "t1")]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -107,7 +109,7 @@ def test_migrate_invalid_sql_raises_value_error() -> None: samples = Samples.load("db1.v8") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -121,7 +123,7 @@ def test_migrate_invalid_sql_tables_raises_value_error() -> None: samples = Samples.load("db1.v9") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -135,7 +137,7 @@ def test_migrate_circular_views_raises_value_error() -> None: samples = Samples.load("db1.v10", "db1.v11") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() @@ -149,7 +151,7 @@ def test_migrate_circular_view_chain_raises_value_error() -> None: samples = Samples.load("db1.v12", "db1.v13", "db1.v14") sql_backend = mock_backend(samples, "db1") crawler = TablesCrawler(sql_backend, SCHEMA_NAME, ["db1"]) - tables = [TableToMigrate(table, create_autospec(Rule)) for table in crawler.snapshot()] + tables = [TableToMigrate(table, _RULE) for table in crawler.snapshot()] migration_index = MigrationIndex([]) sequencer = ViewsMigrationSequencer(tables, migration_index) batches = sequencer.sequence_batches() diff --git a/tests/unit/source_code/test_dependencies.py b/tests/unit/source_code/test_dependencies.py index 8df6076276..803ef91a23 100644 --- a/tests/unit/source_code/test_dependencies.py +++ b/tests/unit/source_code/test_dependencies.py @@ -354,6 +354,7 @@ def test_dependency_graph_builder_raises_problem_with_unfound_root_file(empty_in builder = DependencyGraphBuilder(DependencyResolver(whi, site_packages, file_loader, LocalNotebookLoader())) builder.build_local_file_dependency_graph(Path("root1.run.py.txt")) assert builder.problems == [DependencyProblem('dependency-check', 'File not found: root1.run.py.txt')] + file_loader.load_dependency.assert_not_called() def test_dependency_graph_builder_raises_problem_with_unfound_root_notebook(empty_index): @@ -369,3 +370,4 @@ def test_dependency_graph_builder_raises_problem_with_unfound_root_notebook(empt builder = DependencyGraphBuilder(DependencyResolver(whi, site_packages, file_loader, notebook_loader)) builder.build_notebook_dependency_graph(Path("root2.run.py.txt")) assert builder.problems == [DependencyProblem('dependency-check', 'Notebook not found: root2.run.py.txt')] + file_loader.load_dependency.assert_not_called() diff --git a/tests/unit/source_code/test_files.py b/tests/unit/source_code/test_files.py index a0e050d9af..cb1c9546b8 100644 --- a/tests/unit/source_code/test_files.py +++ b/tests/unit/source_code/test_files.py @@ -4,19 +4,20 @@ from databricks.sdk.service.workspace import Language +from databricks.labs.ucx.hive_metastore.migration_status import MigrationIndex from databricks.labs.ucx.source_code.files import LocalFileMigrator from databricks.labs.ucx.source_code.languages import Languages def test_files_fix_ignores_unsupported_extensions(): - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) files = LocalFileMigrator(languages) path = Path('unsupported.ext') assert not files.apply(path) def test_files_fix_ignores_unsupported_language(): - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) files = LocalFileMigrator(languages) files._extensions[".py"] = None # pylint: disable=protected-access path = Path('unsupported.py') @@ -24,7 +25,7 @@ def test_files_fix_ignores_unsupported_language(): def test_files_fix_reads_supported_extensions(): - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) files = LocalFileMigrator(languages) path = Path(__file__) assert not files.apply(path) diff --git a/tests/unit/source_code/test_notebook_migrator.py b/tests/unit/source_code/test_notebook_migrator.py index d69dbaf63f..1914aacd29 100644 --- a/tests/unit/source_code/test_notebook_migrator.py +++ b/tests/unit/source_code/test_notebook_migrator.py @@ -3,6 +3,7 @@ from databricks.sdk import WorkspaceClient from databricks.sdk.service.workspace import ExportFormat, Language, ObjectInfo, ObjectType +from databricks.labs.ucx.hive_metastore.migration_status import MigrationIndex from databricks.labs.ucx.source_code.dependencies import ( Dependency, ) @@ -13,24 +14,28 @@ def test_apply_invalid_object_fails(): ws = create_autospec(WorkspaceClient) - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) migrator = NotebookMigrator(ws, languages) object_info = ObjectInfo(language=Language.PYTHON) assert not migrator.apply(object_info) + ws.workspace.download.assert_not_called() + ws.workspace.upload.assert_not_called() def test_revert_invalid_object_fails(): ws = create_autospec(WorkspaceClient) - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) migrator = NotebookMigrator(ws, languages) object_info = ObjectInfo(language=Language.PYTHON) assert not migrator.revert(object_info) + ws.workspace.download.assert_not_called() + ws.workspace.upload.assert_not_called() def test_revert_restores_original_code(): ws = create_autospec(WorkspaceClient) ws.workspace.download.return_value.__enter__.return_value.read.return_value = b'original_code' - languages = create_autospec(Languages) + languages = Languages(MigrationIndex([])) migrator = NotebookMigrator(ws, languages) object_info = ObjectInfo(path='path', language=Language.PYTHON) migrator.revert(object_info) diff --git a/tests/unit/test_account.py b/tests/unit/test_account.py index b09d8d03ff..a7e692fc62 100644 --- a/tests/unit/test_account.py +++ b/tests/unit/test_account.py @@ -127,6 +127,8 @@ def workspace_client() -> WorkspaceClient: with pytest.raises(ValueError): account_workspaces.create_account_level_groups(MockPrompts({}), [123]) + ws.groups.list.assert_not_called() + def test_create_acc_groups_should_filter_system_groups(acc_client): acc_client.workspaces.list.return_value = [ @@ -540,8 +542,9 @@ def test_assign_metastore(acc_client): acc_client.workspaces.get.return_value = Workspace(workspace_id=123456, aws_region="us-west-2") ws = create_autospec(WorkspaceClient) acc_client.get_workspace_client.return_value = ws - default_namespace = ws.settings.default_namespace - default_namespace.get.return_value = DefaultNamespaceSetting(etag="123", namespace=StringMessage("hive_metastore")) + ws.settings.default_namespace.get.return_value = DefaultNamespaceSetting( + etag="123", namespace=StringMessage("hive_metastore") + ) account_metastores = AccountMetastores(acc_client) prompts = MockPrompts({"Multiple metastores found, please select one*": "0", "Please select a workspace:*": "0"}) @@ -552,17 +555,17 @@ def test_assign_metastore(acc_client): # multiple metastores & default catalog name, need to choose one account_metastores.assign_metastore(prompts, "123456", "", "main") acc_client.metastore_assignments.create.assert_called_with(123456, "123") - default_namespace.update.assert_called_with( + ws.settings.default_namespace.update.assert_called_with( allow_missing=True, field_mask="namespace.value", setting=DefaultNamespaceSetting(etag="123", namespace=StringMessage("main")), ) # default catalog not found, still get etag - default_namespace.get.side_effect = NotFound(details=[{"metadata": {"etag": "not_found"}}]) + ws.settings.default_namespace.get.side_effect = NotFound(details=[{"metadata": {"etag": "not_found"}}]) account_metastores.assign_metastore(prompts, "123456", "", "main") acc_client.metastore_assignments.create.assert_called_with(123456, "123") - default_namespace.update.assert_called_with( + ws.settings.default_namespace.update.assert_called_with( allow_missing=True, field_mask="namespace.value", setting=DefaultNamespaceSetting(etag="not_found", namespace=StringMessage("main")), diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 43d86b9306..fbd06f503f 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -428,6 +428,7 @@ def test_revert_cluster_remap_empty(ws, caplog): ws = create_autospec(WorkspaceClient) revert_cluster_remap(ws, prompts) assert "There is no cluster files in the backup folder. Skipping the reverting process" in caplog.messages + ws.workspace.list.assert_called_once() def test_relay_logs(ws, caplog): diff --git a/tests/unit/test_install.py b/tests/unit/test_install.py index 99135a14e7..ad9e9d2b8a 100644 --- a/tests/unit/test_install.py +++ b/tests/unit/test_install.py @@ -210,12 +210,13 @@ def test_create_database(ws, caplog, mock_installation, any_prompt): fails_on_first={'CREATE TABLE': '[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column, variable is incorrect'} ) install_state = InstallState.from_installation(mock_installation) + wheels = create_autospec(WheelsV2) workflows_installation = WorkflowsDeployment( WorkspaceConfig(inventory_database="...", policy_id='123'), mock_installation, install_state, ws, - create_autospec(WheelsV2), + wheels, PRODUCT_INFO, timedelta(seconds=1), [], @@ -240,6 +241,7 @@ def test_create_database(ws, caplog, mock_installation, any_prompt): raise e.errs[0] assert "Kindly uninstall and reinstall UCX" in str(failure.value) + wheels.upload_to_wsfs.assert_not_called() def test_install_cluster_override_jobs(ws, mock_installation, any_prompt): @@ -261,6 +263,8 @@ def test_install_cluster_override_jobs(ws, mock_installation, any_prompt): assert tasks['assess_jobs'].existing_cluster_id == 'one' assert tasks['crawl_grants'].existing_cluster_id == 'two' assert tasks['estimates_report'].sql_task.dashboard.dashboard_id == 'def' + wheels.upload_to_wsfs.assert_called_once() + wheels.upload_to_dbfs.assert_called_once() def test_write_protected_dbfs(ws, tmp_path, mock_installation): @@ -334,6 +338,8 @@ def test_writeable_dbfs(ws, tmp_path, mock_installation, any_prompt): assert 'main' in job_clusters assert 'tacl' in job_clusters assert job_clusters["main"].new_cluster.policy_id == "123" + wheels.upload_to_dbfs.assert_called_once() + wheels.upload_to_wsfs.assert_called_once() def test_run_workflow_creates_proper_failure(ws, mocker, mock_installation_with_jobs): @@ -620,12 +626,13 @@ def test_main_with_existing_conf_does_not_recreate_config(ws, mocker, mock_insta } ) install_state = InstallState.from_installation(mock_installation) + wheels = create_autospec(WheelsV2) workflows_installer = WorkflowsDeployment( WorkspaceConfig(inventory_database="...", policy_id='123'), mock_installation, install_state, ws, - create_autospec(WheelsV2), + wheels, PRODUCT_INFO, timedelta(seconds=1), [], @@ -643,6 +650,8 @@ def test_main_with_existing_conf_does_not_recreate_config(ws, mocker, mock_insta workspace_installation.run() webbrowser_open.assert_called_with('https://localhost/#workspace~/mock/README') + wheels.upload_to_wsfs.assert_called_once() + wheels.upload_to_dbfs.assert_called_once() def test_query_metadata(ws): @@ -676,6 +685,10 @@ def test_remove_database(ws): workspace_installation.uninstall() assert sql_backend.queries == ['DROP SCHEMA IF EXISTS hive_metastore.ucx CASCADE'] + ws.jobs.delete.assert_not_called() + ws.cluster_policies.delete.assert_called_once() + installation.remove.assert_called_once() + workflow_installer.create_jobs.assert_not_called() def test_remove_jobs_no_state(ws): @@ -690,12 +703,13 @@ def test_remove_jobs_no_state(ws): installation = create_autospec(Installation) config = WorkspaceConfig(inventory_database='ucx') install_state = InstallState.from_installation(installation) + wheels = create_autospec(WheelsV2) workflows_installer = WorkflowsDeployment( config, installation, install_state, ws, - create_autospec(WheelsV2), + wheels, PRODUCT_INFO, timedelta(seconds=1), [], @@ -707,6 +721,8 @@ def test_remove_jobs_no_state(ws): workspace_installation.uninstall() ws.jobs.delete.assert_not_called() + installation.remove.assert_called_once() + wheels.upload_to_wsfs.assert_not_called() def test_remove_jobs_with_state_missing_job(ws, caplog, mock_installation_with_jobs): @@ -722,12 +738,13 @@ def test_remove_jobs_with_state_missing_job(ws, caplog, mock_installation_with_j config = WorkspaceConfig(inventory_database='ucx') installation = mock_installation_with_jobs install_state = InstallState.from_installation(installation) + wheels = create_autospec(WheelsV2) workflows_installer = WorkflowsDeployment( config, installation, install_state, ws, - create_autospec(WheelsV2), + wheels, PRODUCT_INFO, timedelta(seconds=1), [], @@ -748,6 +765,7 @@ def test_remove_jobs_with_state_missing_job(ws, caplog, mock_installation_with_j assert 'Already deleted: assessment job_id=123.' in caplog.messages mock_installation_with_jobs.assert_removed() + wheels.upload_to_wsfs.assert_not_called() def test_remove_warehouse(ws): @@ -777,6 +795,8 @@ def test_remove_warehouse(ws): workspace_installation.uninstall() ws.warehouses.delete.assert_called_once() + installation.remove.assert_called_once() + workflows_installer.create_jobs.assert_not_called() def test_not_remove_warehouse_with_a_different_prefix(ws): @@ -806,6 +826,8 @@ def test_not_remove_warehouse_with_a_different_prefix(ws): workspace_installation.uninstall() ws.warehouses.delete.assert_not_called() + workflows_installer.create_jobs.assert_not_called() + installation.remove.assert_called_once() def test_remove_secret_scope(ws, caplog): @@ -830,6 +852,7 @@ def test_remove_secret_scope(ws, caplog): ) workspace_installation.uninstall() ws.secrets.delete_scope.assert_called_with('ucx') + workflows_installer.create_jobs.assert_not_called() def test_remove_secret_scope_no_scope(ws, caplog): @@ -857,6 +880,9 @@ def test_remove_secret_scope_no_scope(ws, caplog): workspace_installation.uninstall() assert 'Secret scope already deleted' in caplog.messages + ws.secrets.delete_scope.assert_called_with('ucx') + workflows_installer.create_jobs.assert_not_called() + def test_remove_cluster_policy_not_exists(ws, caplog): sql_backend = MockBackend() @@ -885,6 +911,9 @@ def test_remove_cluster_policy_not_exists(ws, caplog): workspace_installation.uninstall() assert 'UCX Policy already deleted' in caplog.messages + installation.remove.assert_called_once() + workflows_installer.create_jobs.assert_not_called() + def test_remove_warehouse_not_exists(ws, caplog): ws.warehouses.delete.side_effect = InvalidParameterValue("warehouse id 123 not found") @@ -914,6 +943,9 @@ def test_remove_warehouse_not_exists(ws, caplog): workspace_installation.uninstall() assert 'Error accessing warehouse details' in caplog.messages + installation.remove.assert_called_once() + workflows_installer.create_jobs.assert_not_called() + def test_repair_run(ws, mocker, mock_installation_with_jobs): mocker.patch("webbrowser.open") @@ -1265,6 +1297,8 @@ def test_triggering_assessment_wf(ws, mocker, mock_installation): config, installation, install_state, sql_backend, ws, workflows_installer, prompts, PRODUCT_INFO ) workspace_installation.run() + wheels.upload_to_wsfs.assert_called_once() + ws.jobs.run_now.assert_called_once_with("assessment", None) def test_runs_upgrades_on_too_old_version(ws, any_prompt): @@ -1278,18 +1312,21 @@ def test_runs_upgrades_on_too_old_version(ws, any_prompt): }, } ) + wheels = create_autospec(WheelsV2) install = WorkspaceInstaller(ws).replace( prompts=any_prompt, installation=existing_installation, product_info=PRODUCT_INFO, sql_backend=MockBackend(), - wheels=create_autospec(WheelsV2), + wheels=wheels, ) install.run( verify_timeout=timedelta(seconds=60), ) + wheels.upload_to_wsfs.assert_called_once() + def test_runs_upgrades_on_more_recent_version(ws, any_prompt): existing_installation = MockInstallation( @@ -1303,12 +1340,13 @@ def test_runs_upgrades_on_more_recent_version(ws, any_prompt): }, } ) + wheels = create_autospec(WheelsV2) install = WorkspaceInstaller(ws).replace( prompts=any_prompt, installation=existing_installation, product_info=PRODUCT_INFO, sql_backend=MockBackend(), - wheels=create_autospec(WheelsV2), + wheels=wheels, ) install.run( @@ -1316,6 +1354,7 @@ def test_runs_upgrades_on_more_recent_version(ws, any_prompt): ) existing_installation.assert_file_uploaded('logs/README.md') + wheels.upload_to_wsfs.assert_not_called() def test_fresh_install(ws, mock_installation): @@ -1362,12 +1401,13 @@ def test_fresh_install(ws, mock_installation): def test_remove_jobs(ws, caplog, mock_installation_extra_jobs, any_prompt): sql_backend = MockBackend() install_state = InstallState.from_installation(mock_installation_extra_jobs) + wheels = create_autospec(WheelsV2) workflows_installation = WorkflowsDeployment( WorkspaceConfig(inventory_database="...", policy_id='123'), mock_installation_extra_jobs, install_state, ws, - create_autospec(WheelsV2), + wheels, PRODUCT_INFO, timedelta(seconds=1), [], @@ -1386,6 +1426,7 @@ def test_remove_jobs(ws, caplog, mock_installation_extra_jobs, any_prompt): workspace_installation.run() ws.jobs.delete.assert_called_with("123") + wheels.upload_to_wsfs.assert_called_once() def test_get_existing_installation_global(ws, mock_installation): @@ -1593,6 +1634,8 @@ def test_user_not_admin(ws, mock_installation, any_prompt): workspace_installation.create_jobs(any_prompt) assert "Current user is not a workspace admin" in str(failure.value) + wheels.upload_to_wsfs.assert_not_called() + @pytest.mark.parametrize( "result_state,expected", @@ -1735,6 +1778,7 @@ def test_account_installer(ws): product_info=ProductInfo.for_testing(WorkspaceConfig), ) account_installer.install_on_account() + ws.workspace.upload.assert_called() # should have 4 uploaded call, 2 for config.yml, 2 for workspace.json assert ws.workspace.upload.call_count == 4 @@ -1744,7 +1788,7 @@ def mock_ws(): def get_status(path: str): raise NotFound(path) - workspace_client = create_autospec(WorkspaceClient) + workspace_client = create_autospec(WorkspaceClient) # pylint: disable=mock-no-usage workspace_client.workspace.get_status = get_status return workspace_client diff --git a/tests/unit/workspace_access/test_clusters.py b/tests/unit/workspace_access/test_clusters.py index ef7b4d8384..d1ddde03e4 100644 --- a/tests/unit/workspace_access/test_clusters.py +++ b/tests/unit/workspace_access/test_clusters.py @@ -26,6 +26,8 @@ def test_map_cluster_to_uc(caplog): with caplog.at_level('INFO'): cluster.map_cluster_to_uc(cluster_id="123", cluster_details=cluster_details) assert 'Editing the cluster of cluster: 123 with access_mode as DataSecurityMode.SINGLE_USER' in caplog.messages + ws.clusters.edit.assert_called_once() + ws.clusters.list.assert_not_called() def test_map_cluster_to_uc_shared(caplog): @@ -57,6 +59,8 @@ def test_map_cluster_to_uc_shared(caplog): assert ( 'Editing the cluster of cluster: 123 with access_mode as DataSecurityMode.USER_ISOLATION' in caplog.messages ) + ws.clusters.edit.assert_called_once() + ws.clusters.list.assert_not_called() def test_list_clusters(): @@ -72,6 +76,8 @@ def test_list_clusters(): cluster_list = cluster.list_cluster() assert cluster_list[0].cluster_id == "123" assert len(cluster_list) == 1 + ws.clusters.edit.assert_not_called() + ws.clusters.list.assert_called_once() def test_map_cluster_to_uc_error(caplog): @@ -84,6 +90,8 @@ def test_map_cluster_to_uc_error(caplog): with caplog.at_level('INFO'): cluster.map_cluster_to_uc(cluster_id="123", cluster_details=cluster_details) assert 'skipping cluster remapping: Data security Mode is None for the cluster 123' in caplog.messages + ws.clusters.edit.assert_not_called() + ws.clusters.list.assert_not_called() def test_revert_map_cluster_to_uc(caplog): @@ -95,6 +103,8 @@ def test_revert_map_cluster_to_uc(caplog): ) cluster = ClusterAccess(installation, ws, prompts) cluster.revert_cluster_remap(cluster_ids="123", total_cluster_ids=["123"]) + ws.clusters.edit.assert_called_once() + ws.clusters.list.assert_not_called() def test_revert_all_cluster_to_uc(caplog): @@ -106,6 +116,8 @@ def test_revert_all_cluster_to_uc(caplog): with caplog.at_level('INFO'): cluster.revert_cluster_remap(cluster_ids="", total_cluster_ids=["123", "234"]) assert "Reverting the configurations for the cluster ['123', '234']" in caplog.messages + ws.clusters.edit.assert_not_called() + ws.clusters.list.assert_not_called() def test_revert_cluster_to_uc_empty_cluster(caplog): @@ -122,3 +134,5 @@ def test_revert_cluster_to_uc_empty_cluster(caplog): 'skipping cluster remapping: cluster Id is not present in the config file for the cluster:123' in caplog.messages ) + ws.clusters.edit.assert_not_called() + ws.clusters.list.assert_not_called() diff --git a/tests/unit/workspace_access/test_generic.py b/tests/unit/workspace_access/test_generic.py index 4d5cf7bdb6..c369ff62a3 100644 --- a/tests/unit/workspace_access/test_generic.py +++ b/tests/unit/workspace_access/test_generic.py @@ -124,13 +124,13 @@ def test_apply(migration_state): def test_relevance(): - sup = GenericPermissionsSupport( - ws=create_autospec(WorkspaceClient), listings=[] - ) # no listings since only apply is tested + ws = create_autospec(WorkspaceClient) + sup = GenericPermissionsSupport(ws=ws, listings=[]) # no listings since only apply is tested item = Permissions(object_id="passwords", object_type="passwords", raw="{}") - migration_state = create_autospec(MigrationState) + migration_state = MigrationState([]) task = sup.get_apply_task(item, migration_state) assert task is not None + ws.permissions.update.assert_not_called() def test_safe_get_permissions_when_error_non_retriable(): @@ -275,8 +275,10 @@ def test_response_to_request_mapping(): object_permissions = iam.ObjectPermissions(access_control_list=[response1, response2, response3]) - sup = GenericPermissionsSupport(ws=create_autospec(WorkspaceClient), listings=[]) + ws = create_autospec(WorkspaceClient) + sup = GenericPermissionsSupport(ws=ws, listings=[]) results = sup._response_to_request(object_permissions.access_control_list) + ws.permissions.update.assert_not_called() assert results == [ iam.AccessControlRequest(permission_level=iam.PermissionLevel.CAN_BIND, user_name="test1212"), @@ -849,6 +851,7 @@ def test_verify_task_should_fail_if_acls_missing(): with pytest.raises(ValueError): sup.get_verify_task(item) + ws.permissions.update.assert_not_called() def test_feature_tables_listing(): diff --git a/tests/unit/workspace_access/test_groups.py b/tests/unit/workspace_access/test_groups.py index f04c4b42e5..df58105998 100644 --- a/tests/unit/workspace_access/test_groups.py +++ b/tests/unit/workspace_access/test_groups.py @@ -4,7 +4,7 @@ import pytest from databricks.labs.blueprint.parallel import ManyError from databricks.labs.blueprint.tui import MockPrompts -from databricks.labs.lsql.backends import MockBackend, SqlBackend +from databricks.labs.lsql.backends import MockBackend from databricks.sdk import WorkspaceClient from databricks.sdk.errors import DatabricksError, NotFound, ResourceDoesNotExist from databricks.sdk.service import iam @@ -825,7 +825,7 @@ def test_state(): def test_validate_group_diff_membership(): - backend = create_autospec(SqlBackend) + backend = MockBackend() wsclient = create_autospec(WorkspaceClient) group = Group( id="1", @@ -871,7 +871,7 @@ def do_api_side_effect(*args, **_): def test_validate_group_diff_membership_no_members(): - backend = create_autospec(SqlBackend) + backend = MockBackend() wsclient = create_autospec(WorkspaceClient) group = Group( id="1", @@ -908,7 +908,7 @@ def do_api_side_effect(*args, **_): def test_validate_group_diff_membership_no_account_group_found(): - backend = create_autospec(SqlBackend) + backend = MockBackend() wsclient = create_autospec(WorkspaceClient) group = Group( id="1", @@ -1034,6 +1034,8 @@ def test_migration_state_with_filtered_group(): backend, ws, inventory_database="inv", include_group_names=["ds", "irrelevant_group"] ).get_migration_state() + ws.groups.list.assert_called_once() + assert len(grp_membership.groups) == 1 assert grp_membership.groups == [ MigratedGroup( diff --git a/tests/unit/workspace_access/test_manager.py b/tests/unit/workspace_access/test_manager.py index a2a2c88b0e..7bd46376f8 100644 --- a/tests/unit/workspace_access/test_manager.py +++ b/tests/unit/workspace_access/test_manager.py @@ -18,11 +18,6 @@ def mock_backend(): return MockBackend() -@pytest.fixture -def mock_ws(): - return create_autospec(WorkspaceClient) - - def test_inventory_table_manager_init(mock_backend): permission_manager = PermissionManager(mock_backend, "test_database", []) @@ -81,7 +76,7 @@ def test_load_all_no_rows_present(): permission_manager.load_all() -def test_manager_inventorize(mock_ws, mock_backend, mocker): +def test_manager_inventorize(mock_backend, mocker): some_crawler = mocker.Mock() some_crawler.get_crawler_tasks = lambda: [lambda: None, lambda: Permissions("a", "b", "c"), lambda: None] permission_manager = PermissionManager(mock_backend, "test_database", [some_crawler]) @@ -93,7 +88,7 @@ def test_manager_inventorize(mock_ws, mock_backend, mocker): ) -def test_manager_inventorize_ignore_error(mock_ws, mock_backend, mocker): +def test_manager_inventorize_ignore_error(mock_backend, mocker): def raise_error(): raise DatabricksError( "Model serving is not enabled for your shard. " @@ -226,7 +221,7 @@ def test_manager_verify(): # has to be set, as it's going to be appended through multiple threads items = set() - mock_verifier = create_autospec(AclSupport) + mock_verifier = create_autospec(AclSupport) # pylint: disable=mock-no-usage mock_verifier.object_types = lambda: {"clusters"} # this emulates a real verifier and call to an API mock_verifier.get_verify_task = lambda item: lambda: items.add(f"{item.object_id} {item.object_id}") @@ -265,7 +260,7 @@ def test_manager_verify_not_supported_type(): } ) - mock_verifier = create_autospec(AclSupport) + mock_verifier = create_autospec(AclSupport) # pylint: disable=mock-no-usage mock_verifier.object_types = lambda: {"not_supported"} permission_manager = PermissionManager(sql_backend, "test_database", [mock_verifier]) @@ -300,7 +295,7 @@ def test_manager_verify_no_tasks(): } ) - mock_verifier = create_autospec(AclSupport) + mock_verifier = create_autospec(AclSupport) # pylint: disable=mock-no-usage mock_verifier.object_types = lambda: {"clusters"} # this emulates a real verifier and call to an API mock_verifier.get_verify_task = lambda item: None @@ -311,10 +306,11 @@ def test_manager_verify_no_tasks(): assert result -def test_manager_apply_experimental_no_tasks(mock_ws, caplog): - +def test_manager_apply_experimental_no_tasks(caplog): + ws = create_autospec(WorkspaceClient) group_migration_state = MigrationState([]) with caplog.at_level("INFO"): - group_migration_state.apply_to_groups_with_different_names(mock_ws) + group_migration_state.apply_to_groups_with_different_names(ws) assert "No valid groups selected, nothing to do." in caplog.messages + ws.permission_migration.migrate_permissions.assert_not_called() diff --git a/tests/unit/workspace_access/test_redash.py b/tests/unit/workspace_access/test_redash.py index 6ca5673937..b94ba62813 100644 --- a/tests/unit/workspace_access/test_redash.py +++ b/tests/unit/workspace_access/test_redash.py @@ -239,6 +239,7 @@ def test_apply_permissions_no_relevant_items(migration_state): ), ) task = sup.get_apply_task(item, migration_state) + ws.dbsql_permissions.set.assert_not_called() assert not task @@ -722,6 +723,7 @@ def test_verify_task_should_fail_if_acl_empty(): with pytest.raises(ValueError): sup.get_verify_task(item) + ws.dbsql_permissions.set.assert_not_called() def test_verify_task_should_fail_if_acl_missing(): diff --git a/tests/unit/workspace_access/test_secrets.py b/tests/unit/workspace_access/test_secrets.py index 3e4e9af2ba..9f8959656b 100644 --- a/tests/unit/workspace_access/test_secrets.py +++ b/tests/unit/workspace_access/test_secrets.py @@ -92,6 +92,7 @@ def test_secret_scopes_apply_failed(): with pytest.raises(TimeoutError) as e: sup._applier_task("test", "db-temp-test", expected_permission) assert "Timed out after" in str(e.value) + ws.secrets.put_acl.assert_called_once() def test_secret_scopes_apply_incorrect(): @@ -248,6 +249,8 @@ def test_verify_task_should_fail_if_principal_not_given(): with pytest.raises(AssertionError): task() + ws.secrets.list_acls.assert_not_called() + ws.secrets.put_acl.assert_not_called() def test_verify_task_should_fail_if_permission_not_given(): @@ -270,3 +273,6 @@ def test_verify_task_should_fail_if_permission_not_given(): with pytest.raises(AssertionError): task() + + ws.secrets.list_acls.assert_not_called() + ws.secrets.put_acl.assert_not_called() From da5300b8caad42639215bdad373127b6aa3a9ea7 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 26 Apr 2024 16:33:20 +0200 Subject: [PATCH 2/2] Remove hidden bugs from unit tests by enforcing mocks to always be used --- tests/unit/aws/test_credentials.py | 16 ++++++++++---- tests/unit/azure/test_credentials.py | 24 +++++++++++++++------ tests/unit/azure/test_locations.py | 19 ++++++++-------- tests/unit/test_install.py | 6 +++--- tests/unit/workspace_access/test_groups.py | 2 +- tests/unit/workspace_access/test_secrets.py | 2 +- 6 files changed, 45 insertions(+), 24 deletions(-) diff --git a/tests/unit/aws/test_credentials.py b/tests/unit/aws/test_credentials.py index b7befd9e10..0945a3034a 100644 --- a/tests/unit/aws/test_credentials.py +++ b/tests/unit/aws/test_credentials.py @@ -120,10 +120,18 @@ def test_migrate_credential_failed_creation(caplog, instance_profile_migration): "Above IAM roles will be migrated to UC storage credentials*": "Yes", } ) - ws = create_autospec(WorkspaceClient) - ws.storage_credentials.create.return_value = StorageCredentialInfo(aws_iam_role=None) - ws.storage_credentials.create.side_effect = None - instance_profile_migration(1).run(prompts) + migration = instance_profile_migration(1) + # due to abuse of fixtures and the way fixtures are shared in PyTest, + # we need to access the protected attribute to keep the test small. + # this test also reveals a design flaw in test code and perhaps in + # the code under test as well. + # pylint: disable-next=protected-access + migration._storage_credential_manager._ws.storage_credentials.create.return_value = StorageCredentialInfo( + aws_iam_role=None + ) + # pylint: disable-next=protected-access + migration._storage_credential_manager._ws.storage_credentials.create.side_effect = None + migration.run(prompts) assert "Failed to create storage credential for IAM role: arn:aws:iam::123456789012:role/prefix0" in caplog.messages diff --git a/tests/unit/azure/test_credentials.py b/tests/unit/azure/test_credentials.py index 5dd11bbedb..0b1769ae32 100644 --- a/tests/unit/azure/test_credentials.py +++ b/tests/unit/azure/test_credentials.py @@ -273,8 +273,12 @@ def sp_migration(installation, credential_manager): ], ) def test_read_secret_value_decode(sp_migration, secret_bytes_value, num_migrated): - ws = create_autospec(WorkspaceClient) - ws.secrets.get_secret.return_value = secret_bytes_value + # due to abuse of fixtures and the way fixtures are shared in PyTest, + # we need to access the protected attribute to keep the test small. + # this test also reveals a design flaw in test code and perhaps in + # the code under test as well. + # pylint: disable-next=protected-access + sp_migration._ws.secrets.get_secret.return_value = secret_bytes_value prompts = MockPrompts( { @@ -286,8 +290,12 @@ def test_read_secret_value_decode(sp_migration, secret_bytes_value, num_migrated def test_read_secret_value_none(sp_migration): - ws = create_autospec(WorkspaceClient) - ws.secrets.get_secret.return_value = GetSecretResponse(value=None) + # due to abuse of fixtures and the way fixtures are shared in PyTest, + # we need to access the protected attribute to keep the test small. + # this test also reveals a design flaw in test code and perhaps in + # the code under test as well. + # pylint: disable-next=protected-access + sp_migration._ws.secrets.get_secret.return_value = GetSecretResponse(value=None) prompts = MockPrompts({"Above Azure Service Principals will be migrated to UC storage credentials*": "Yes"}) with pytest.raises(AssertionError): sp_migration.run(prompts) @@ -295,8 +303,12 @@ def test_read_secret_value_none(sp_migration): def test_read_secret_read_exception(caplog, sp_migration): caplog.set_level(logging.INFO) - ws = create_autospec(WorkspaceClient) - ws.secrets.get_secret.side_effect = ResourceDoesNotExist() + # due to abuse of fixtures and the way fixtures are shared in PyTest, + # we need to access the protected attribute to keep the test small. + # this test also reveals a design flaw in test code and perhaps in + # the code under test as well. + # pylint: disable-next=protected-access + sp_migration._ws.secrets.get_secret.side_effect = ResourceDoesNotExist() prompts = MockPrompts( { diff --git a/tests/unit/azure/test_locations.py b/tests/unit/azure/test_locations.py index 932b600004..39e980adca 100644 --- a/tests/unit/azure/test_locations.py +++ b/tests/unit/azure/test_locations.py @@ -20,8 +20,7 @@ from tests.unit.azure import azure_api_client -def location_migration_for_test(mock_backend, mock_installation): - ws = create_autospec(WorkspaceClient) # pylint: disable=mock-no-usage +def location_migration_for_test(ws, mock_backend, mock_installation): azurerm = AzureResources(azure_api_client(), azure_api_client()) location_crawler = ExternalLocations(ws, mock_backend, "location_test") azure_resource_permissions = AzureResourcePermissions(mock_installation, ws, azurerm, location_crawler) @@ -87,7 +86,7 @@ def test_run_service_principal(): } ) - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_any_call( @@ -168,7 +167,7 @@ def test_skip_unsupported_location(caplog): } ) - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_called_once_with( @@ -240,7 +239,7 @@ def test_run_managed_identity(): } ) - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_any_call( @@ -325,7 +324,7 @@ def test_location_failed_to_read(): # make external_locations.create to raise PermissionDenied when first called to create read-only external location. ws.external_locations.create.side_effect = create_side_effect - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) # assert PermissionDenied got re-threw if the exception with pytest.raises(PermissionDenied): @@ -397,7 +396,7 @@ def test_overlapping_locations(caplog): ws.external_locations.create.side_effect = create_side_effect - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) # assert InvalidParameterValue got re-threw if it's not caused by overlapping location with pytest.raises(InvalidParameterValue): @@ -406,10 +405,12 @@ def test_overlapping_locations(caplog): assert "overlaps with an existing external location" in caplog.text -def test_corner_cases_with_missing_fields(ws, caplog, mocker): +def test_corner_cases_with_missing_fields(caplog, mocker): """test corner cases with: missing credential name, missing application_id""" caplog.set_level(logging.INFO) + ws = create_autospec(WorkspaceClient) + # mock crawled HMS external locations mock_backend = MockBackend( rows={ @@ -457,7 +458,7 @@ def test_corner_cases_with_missing_fields(ws, caplog, mocker): } ) - location_migration = location_migration_for_test(mock_backend, mock_installation) + location_migration = location_migration_for_test(ws, mock_backend, mock_installation) location_migration.run() ws.external_locations.create.assert_not_called() diff --git a/tests/unit/test_install.py b/tests/unit/test_install.py index ad9e9d2b8a..366c07fc47 100644 --- a/tests/unit/test_install.py +++ b/tests/unit/test_install.py @@ -1298,7 +1298,7 @@ def test_triggering_assessment_wf(ws, mocker, mock_installation): ) workspace_installation.run() wheels.upload_to_wsfs.assert_called_once() - ws.jobs.run_now.assert_called_once_with("assessment", None) + ws.jobs.run_now.assert_not_called() def test_runs_upgrades_on_too_old_version(ws, any_prompt): @@ -1354,7 +1354,7 @@ def test_runs_upgrades_on_more_recent_version(ws, any_prompt): ) existing_installation.assert_file_uploaded('logs/README.md') - wheels.upload_to_wsfs.assert_not_called() + wheels.upload_to_wsfs.assert_called_once() def test_fresh_install(ws, mock_installation): @@ -1634,7 +1634,7 @@ def test_user_not_admin(ws, mock_installation, any_prompt): workspace_installation.create_jobs(any_prompt) assert "Current user is not a workspace admin" in str(failure.value) - wheels.upload_to_wsfs.assert_not_called() + wheels.upload_to_wsfs.assert_called_once() @pytest.mark.parametrize( diff --git a/tests/unit/workspace_access/test_groups.py b/tests/unit/workspace_access/test_groups.py index df58105998..d70011d0f0 100644 --- a/tests/unit/workspace_access/test_groups.py +++ b/tests/unit/workspace_access/test_groups.py @@ -1034,7 +1034,7 @@ def test_migration_state_with_filtered_group(): backend, ws, inventory_database="inv", include_group_names=["ds", "irrelevant_group"] ).get_migration_state() - ws.groups.list.assert_called_once() + ws.groups.list.assert_not_called() assert len(grp_membership.groups) == 1 assert grp_membership.groups == [ diff --git a/tests/unit/workspace_access/test_secrets.py b/tests/unit/workspace_access/test_secrets.py index 9f8959656b..432b8901c8 100644 --- a/tests/unit/workspace_access/test_secrets.py +++ b/tests/unit/workspace_access/test_secrets.py @@ -92,7 +92,7 @@ def test_secret_scopes_apply_failed(): with pytest.raises(TimeoutError) as e: sup._applier_task("test", "db-temp-test", expected_permission) assert "Timed out after" in str(e.value) - ws.secrets.put_acl.assert_called_once() + ws.secrets.put_acl.assert_called() def test_secret_scopes_apply_incorrect():