Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AIRFLOW-5241] Make test class names consistent by starting with Test #5847

Merged
merged 1 commit into from
Aug 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/api/common/experimental/test_trigger_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from airflow.api.common.experimental.trigger_dag import _trigger_dag


class TriggerDagTests(unittest.TestCase):
class TestTriggerDag(unittest.TestCase):

@mock.patch('airflow.models.DagRun')
@mock.patch('airflow.models.DagBag')
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/hooks/test_cassandra_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from tests.compat import mock, patch


class CassandraHookTest(unittest.TestCase):
class TestCassandraHook(unittest.TestCase):
def setUp(self):
db.merge_conn(
Connection(
Expand Down
6 changes: 3 additions & 3 deletions tests/contrib/hooks/test_databricks_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def setup_mock_requests(mock_requests,
[side_effect] * error_count + [create_valid_response_mock(response_content)]


class DatabricksHookTest(unittest.TestCase):
class TestDatabricksHook(unittest.TestCase):
"""
Tests for DatabricksHook.
"""
Expand Down Expand Up @@ -390,7 +390,7 @@ def test_terminate_cluster(self, mock_requests):
timeout=self.hook.timeout_seconds)


class DatabricksHookTokenTest(unittest.TestCase):
class TestDatabricksHookToken(unittest.TestCase):
"""
Tests for DatabricksHook when auth is done with token.
"""
Expand Down Expand Up @@ -424,7 +424,7 @@ def test_submit_run(self, mock_requests):
self.assertEqual(kwargs['auth'].token, TOKEN)


class RunStateTest(unittest.TestCase):
class TestRunState(unittest.TestCase):
def test_is_terminal_true(self):
terminal_states = ['TERMINATED', 'SKIPPED', 'INTERNAL_ERROR']
for state in terminal_states:
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/hooks/test_gcp_api_base_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def test_restrict_positional_arguments(self):
ENV_VALUE = "/tmp/a"


class ProvideGcpCredentialFileTestCase(unittest.TestCase):
class TestProvideGcpCredentialFile(unittest.TestCase):
def setUp(self):
with mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__',
Expand Down
4 changes: 2 additions & 2 deletions tests/contrib/hooks/test_gcp_dataproc_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def mock_init(self, gcp_conn_id, delegate_to=None): # pylint: disable=unused-ar
pass


class DataProcHookTest(unittest.TestCase):
class TestDataProcHook(unittest.TestCase):
def setUp(self):
with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'),
new=mock_init):
Expand All @@ -53,7 +53,7 @@ def test_submit(self, job_mock):
job_error_states=mock.ANY, num_retries=mock.ANY)


class DataProcJobTest(unittest.TestCase):
class TestDataProcJob(unittest.TestCase):
UUID = '12345678'
JOB_TO_SUBMIT = {
'job':
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/hooks/test_qubole_check_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from airflow.contrib.hooks.qubole_check_hook import parse_first_row


class QuboleCheckHookTest(unittest.TestCase):
class TestQuboleCheckHook(unittest.TestCase):
def test_single_row_bool(self):
query_result = ['true\ttrue']
record_list = parse_first_row(query_result)
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/hooks/test_sftp_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
SFTP_CONNECTION_USER = "root"


class SFTPHookTest(unittest.TestCase):
class TestSFTPHook(unittest.TestCase):

@provide_session
def update_connection(self, login, session=None):
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/hooks/test_ssh_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"""


class SSHHookTest(unittest.TestCase):
class TestSSHHook(unittest.TestCase):
@mock.patch('airflow.contrib.hooks.ssh_hook.paramiko.SSHClient')
def test_ssh_connection_with_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_adls_list_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
"test/path/PARQUET.parquet", "test/path/PIC.png"]


class AzureDataLakeStorageListOperatorTest(unittest.TestCase):
class TestAzureDataLakeStorageListOperator(unittest.TestCase):

@mock.patch('airflow.contrib.operators.adls_list_operator.AzureDataLakeHook')
def test_execute(self, mock_hook):
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_adls_to_gcs_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
GCS_CONN_ID = 'google_cloud_default'


class AdlsToGoogleCloudStorageOperatorTest(unittest.TestCase):
class TestAdlsToGoogleCloudStorageOperator(unittest.TestCase):
def test_init(self):
"""Test AdlsToGoogleCloudStorageOperator instance is properly initialized."""

Expand Down
26 changes: 13 additions & 13 deletions tests/contrib/operators/test_bigquery_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
TEST_DAG_ID = 'test-bigquery-operators'


class BigQueryCreateEmptyTableOperatorTest(unittest.TestCase):
class TestBigQueryCreateEmptyTableOperator(unittest.TestCase):

@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
Expand All @@ -76,7 +76,7 @@ def test_execute(self, mock_hook):
)


class BigQueryCreateExternalTableOperatorTest(unittest.TestCase):
class TestBigQueryCreateExternalTableOperator(unittest.TestCase):

@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
Expand Down Expand Up @@ -117,7 +117,7 @@ def test_execute(self, mock_hook):
)


class BigQueryDeleteDatasetOperatorTest(unittest.TestCase):
class TestBigQueryDeleteDatasetOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryDeleteDatasetOperator(
Expand All @@ -139,7 +139,7 @@ def test_execute(self, mock_hook):
)


class BigQueryCreateEmptyDatasetOperatorTest(unittest.TestCase):
class TestBigQueryCreateEmptyDatasetOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryCreateEmptyDatasetOperator(
Expand All @@ -160,7 +160,7 @@ def test_execute(self, mock_hook):
)


class BigQueryGetDatasetOperatorTest(unittest.TestCase):
class TestBigQueryGetDatasetOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryGetDatasetOperator(
Expand All @@ -180,7 +180,7 @@ def test_execute(self, mock_hook):
)


class BigQueryPatchDatasetOperatorTest(unittest.TestCase):
class TestBigQueryPatchDatasetOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
dataset_resource = {"friendlyName": 'Test DS'}
Expand All @@ -203,7 +203,7 @@ def test_execute(self, mock_hook):
)


class BigQueryUpdateDatasetOperatorTest(unittest.TestCase):
class TestBigQueryUpdateDatasetOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook')
def test_execute(self, mock_hook):
dataset_resource = {"friendlyName": 'Test DS'}
Expand All @@ -226,7 +226,7 @@ def test_execute(self, mock_hook):
)


class BigQueryOperatorTest(unittest.TestCase):
class TestBigQueryOperator(unittest.TestCase):
def setUp(self):
self.dagbag = models.DagBag(
dag_folder='/dev/null', include_examples=True)
Expand Down Expand Up @@ -462,7 +462,7 @@ def test_bigquery_operator_extra_link(self, mock_hook):
)


class BigQueryGetDataOperatorTest(unittest.TestCase):
class TestBigQueryGetDataOperator(unittest.TestCase):

@mock.patch('airflow.contrib.operators.bigquery_get_data.BigQueryHook')
def test_execute(self, mock_hook):
Expand All @@ -488,7 +488,7 @@ def test_execute(self, mock_hook):
)


class BigQueryTableDeleteOperatorTest(unittest.TestCase):
class TestBigQueryTableDeleteOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_table_delete_operator.BigQueryHook')
def test_execute(self, mock_hook):
ignore_if_missing = True
Expand All @@ -511,7 +511,7 @@ def test_execute(self, mock_hook):
)


class BigQueryToBigQueryOperatorTest(unittest.TestCase):
class TestBigQueryToBigQueryOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_to_bigquery.BigQueryHook')
def test_execute(self, mock_hook):
source_project_dataset_tables = '{}.{}'.format(
Expand Down Expand Up @@ -548,7 +548,7 @@ def test_execute(self, mock_hook):
)


class BigQueryToCloudStorageOperatorTest(unittest.TestCase):
class TestBigQueryToCloudStorageOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_to_gcs.BigQueryHook')
def test_execute(self, mock_hook):
source_project_dataset_table = '{}.{}'.format(
Expand Down Expand Up @@ -587,7 +587,7 @@ def test_execute(self, mock_hook):
)


class BigQueryToMySqlOperatorTest(unittest.TestCase):
class TestBigQueryToMySqlOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.bigquery_to_mysql_operator.BigQueryHook')
def test_execute_good_request_to_bq(self, mock_hook):
destination_table = 'table'
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_cassandra_to_gcs_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
TMP_FILE_NAME = "temp-file"


class CassandraToGCSTest(unittest.TestCase):
class TestCassandraToGCS(unittest.TestCase):
@mock.patch("airflow.contrib.operators.cassandra_to_gcs.NamedTemporaryFile")
@mock.patch(
"airflow.contrib.operators.cassandra_to_gcs.GoogleCloudStorageHook.upload"
Expand Down
6 changes: 3 additions & 3 deletions tests/contrib/operators/test_databricks_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
SPARK_SUBMIT_PARAMS = ["--class", "org.apache.spark.examples.SparkPi"]


class DatabricksOperatorSharedFunctions(unittest.TestCase):
class TestDatabricksOperatorSharedFunctions(unittest.TestCase):
def test_deep_string_coerce(self):
test_json = {
'test_int': 1,
Expand All @@ -88,7 +88,7 @@ def test_deep_string_coerce(self):
self.assertDictEqual(databricks_operator._deep_string_coerce(test_json), expected)


class DatabricksSubmitRunOperatorTest(unittest.TestCase):
class TestDatabricksSubmitRunOperator(unittest.TestCase):
def test_init_with_named_parameters(self):
"""
Test the initializer with the named parameters.
Expand Down Expand Up @@ -260,7 +260,7 @@ def test_on_kill(self, db_mock_class):
db_mock.cancel_run.assert_called_once_with(RUN_ID)


class DatabricksRunNowOperatorTest(unittest.TestCase):
class TestDatabricksRunNowOperator(unittest.TestCase):

def test_init_with_named_parameters(self):
"""
Expand Down
22 changes: 11 additions & 11 deletions tests/contrib/operators/test_dataproc_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def _assert_dataproc_job_id(mock_hook, dataproc_task):
assert dataproc_task.dataproc_job_id == DATAPROC_JOB_ID


class DataprocClusterCreateOperatorTest(unittest.TestCase):
class TestDataprocClusterCreateOperator(unittest.TestCase):
# Unit test for the DataprocClusterCreateOperator
def setUp(self):
# instantiate two different test cases with different labels.
Expand Down Expand Up @@ -531,7 +531,7 @@ def create_cluster_with_invalid_internal_ip_only_setup():
"Set internal_ip_only to true only when you pass a subnetwork_uri.")


class DataprocClusterScaleOperatorTest(unittest.TestCase):
class TestDataprocClusterScaleOperator(unittest.TestCase):
# Unit test for the DataprocClusterScaleOperator
def setUp(self):
# Setup service.projects().regions().clusters().patch()
Expand Down Expand Up @@ -594,7 +594,7 @@ def test_update_cluster(self):
hook.wait.assert_called_once_with(self.operation)


class DataprocClusterDeleteOperatorTest(unittest.TestCase):
class TestDataprocClusterDeleteOperator(unittest.TestCase):
# Unit test for the DataprocClusterDeleteOperator
def setUp(self):
# Setup service.projects().regions().clusters().delete()
Expand Down Expand Up @@ -643,7 +643,7 @@ def test_delete_cluster(self):
hook.wait.assert_called_once_with(self.operation)


class DataProcJobBaseOperatorTest(unittest.TestCase):
class TestDataProcJobBaseOperator(unittest.TestCase):

def setUp(self):
self.dag = DAG(
Expand Down Expand Up @@ -682,7 +682,7 @@ def submit_side_effect(_1, _2, _3, _4):
mock_hook.cancel.assert_called_once_with(mock.ANY, job_id, GCP_REGION)


class DataProcHadoopOperatorTest(unittest.TestCase):
class TestDataProcHadoopOperator(unittest.TestCase):
# Unit test for the DataProcHadoopOperator
@mock.patch(
'airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook.project_id',
Expand Down Expand Up @@ -731,7 +731,7 @@ def test_dataproc_job_id_is_set():
_assert_dataproc_job_id(mock_hook, dataproc_task)


class DataProcHiveOperatorTest(unittest.TestCase):
class TestDataProcHiveOperator(unittest.TestCase):
# Unit test for the DataProcHiveOperator
@mock.patch(
'airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook.project_id',
Expand Down Expand Up @@ -780,7 +780,7 @@ def test_dataproc_job_id_is_set():
_assert_dataproc_job_id(mock_hook, dataproc_task)


class DataProcPigOperatorTest(unittest.TestCase):
class TestDataProcPigOperator(unittest.TestCase):
@mock.patch(
'airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook.project_id',
new_callable=PropertyMock,
Expand Down Expand Up @@ -832,7 +832,7 @@ def test_dataproc_job_id_is_set():
_assert_dataproc_job_id(mock_hook, dataproc_task)


class DataProcPySparkOperatorTest(unittest.TestCase):
class TestDataProcPySparkOperator(unittest.TestCase):
# Unit test for the DataProcPySparkOperator
@mock.patch(
'airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook.project_id',
Expand Down Expand Up @@ -884,7 +884,7 @@ def test_dataproc_job_id_is_set():
_assert_dataproc_job_id(mock_hook, dataproc_task)


class DataProcSparkOperatorTest(unittest.TestCase):
class TestDataProcSparkOperator(unittest.TestCase):
# Unit test for the DataProcSparkOperator
@mock.patch(
'airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook.project_id',
Expand Down Expand Up @@ -934,7 +934,7 @@ def test_dataproc_job_id_is_set():
_assert_dataproc_job_id(mock_hook, dataproc_task)


class DataprocWorkflowTemplateInstantiateOperatorTest(unittest.TestCase):
class TestDataprocWorkflowTemplateInstantiateOperator(unittest.TestCase):
def setUp(self):
# Setup service.projects().regions().workflowTemplates().instantiate().execute()
self.operation = {'name': 'operation', 'done': True}
Expand Down Expand Up @@ -981,7 +981,7 @@ def test_workflow(self):
hook.wait.assert_called_once_with(self.operation)


class DataprocWorkflowTemplateInstantiateInlineOperatorTest(unittest.TestCase):
class TestDataprocWorkflowTemplateInstantiateInlineOperator(unittest.TestCase):
def setUp(self):
# Setup service.projects().regions().workflowTemplates().instantiateInline()
# .execute()
Expand Down
6 changes: 3 additions & 3 deletions tests/contrib/operators/test_dataproc_operator_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
# under the License.
import unittest

from tests.contrib.utils.base_gcp_system_test_case import SKIP_TEST_WARNING, DagGcpSystemTestCase
from tests.contrib.utils.base_gcp_system_test_case import SKIP_TEST_WARNING, TestDagGcpSystem
from tests.contrib.utils.gcp_authenticator import GCP_DATAPROC_KEY


@unittest.skipIf(DagGcpSystemTestCase.skip_check(GCP_DATAPROC_KEY), SKIP_TEST_WARNING)
class DataprocPigOperatorExampleDagsTest(DagGcpSystemTestCase):
@unittest.skipIf(TestDagGcpSystem.skip_check(GCP_DATAPROC_KEY), SKIP_TEST_WARNING)
class DataprocPigOperatorExampleDagsTest(TestDagGcpSystem):
def __init__(self, method_name="runTest"):
super().__init__(
method_name, dag_id="example_gcp_dataproc_pig_operator", gcp_key=GCP_DATAPROC_KEY
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_gcs_acl_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from tests.compat import mock


class GoogleCloudStorageAclTest(unittest.TestCase):
class TestGoogleCloudStorageAcl(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageHook')
def test_bucket_create_acl(self, mock_hook):
operator = GoogleCloudStorageBucketCreateAclEntryOperator(
Expand Down
Loading