diff --git a/noxfile.py b/noxfile.py index 2e7154fd4e..936cf8b29b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -255,7 +255,7 @@ def system(session): if system_test_folder_exists: session.run( "py.test", - "--quiet", + "-v", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, diff --git a/tests/system/aiplatform/test_e2e_forecasting.py b/tests/system/aiplatform/test_e2e_forecasting.py index 938d0e27b5..45b16a015f 100644 --- a/tests/system/aiplatform/test_e2e_forecasting.py +++ b/tests/system/aiplatform/test_e2e_forecasting.py @@ -18,7 +18,7 @@ from google.cloud import aiplatform from google.cloud.aiplatform import training_jobs -# from google.cloud.aiplatform.compat.types import job_state +from google.cloud.aiplatform.compat.types import job_state from google.cloud.aiplatform.compat.types import pipeline_state import pytest from tests.system.aiplatform import e2e_base @@ -103,24 +103,23 @@ def test_end_to_end_forecasting(self, shared_state, training_job): ) resources.append(model) - # TODO(b/275569167) Uncomment this when the bug is fixed - # batch_prediction_job = model.batch_predict( - # job_display_name=self._make_display_name("forecasting-liquor-model"), - # instances_format="bigquery", - # predictions_format="csv", - # machine_type="n1-standard-4", - # bigquery_source=_PREDICTION_DATASET_BQ_PATH, - # gcs_destination_prefix=( - # f'gs://{shared_state["staging_bucket_name"]}/bp_results/' - # ), - # sync=False, - # ) - # resources.append(batch_prediction_job) + batch_prediction_job = model.batch_predict( + job_display_name=self._make_display_name("forecasting-liquor-model"), + instances_format="bigquery", + predictions_format="csv", + machine_type="n1-standard-4", + bigquery_source=_PREDICTION_DATASET_BQ_PATH, + gcs_destination_prefix=( + f'gs://{shared_state["staging_bucket_name"]}/bp_results/' + ), + sync=False, + ) + resources.append(batch_prediction_job) - # batch_prediction_job.wait() + batch_prediction_job.wait() model.wait() assert job.state == pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED - # assert batch_prediction_job.state == job_state.JobState.JOB_STATE_SUCCEEDED + assert batch_prediction_job.state == job_state.JobState.JOB_STATE_SUCCEEDED finally: for resource in resources: resource.delete() diff --git a/tests/system/aiplatform/test_e2e_tabular.py b/tests/system/aiplatform/test_e2e_tabular.py index daa0364dc5..659a0b6f15 100644 --- a/tests/system/aiplatform/test_e2e_tabular.py +++ b/tests/system/aiplatform/test_e2e_tabular.py @@ -189,7 +189,7 @@ def test_end_to_end_tabular(self, shared_state): # Ensure batch prediction errors output file is empty batch_predict_gcs_output_path = ( - custom_batch_prediction_job.output_info.gcs_output_path + custom_batch_prediction_job.output_info.gcs_output_directory ) client = storage.Client()