Skip to content

Commit

Permalink
chore: system test cleanup
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 526988063
  • Loading branch information
sararob authored and copybara-github committed Apr 25, 2023
1 parent 77b89c0 commit 0b3dac9
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 18 deletions.
2 changes: 1 addition & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def system(session):
if system_test_folder_exists:
session.run(
"py.test",
"--quiet",
"-v",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
Expand Down
31 changes: 15 additions & 16 deletions tests/system/aiplatform/test_e2e_forecasting.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from google.cloud import aiplatform
from google.cloud.aiplatform import training_jobs

# from google.cloud.aiplatform.compat.types import job_state
from google.cloud.aiplatform.compat.types import job_state
from google.cloud.aiplatform.compat.types import pipeline_state
import pytest
from tests.system.aiplatform import e2e_base
Expand Down Expand Up @@ -103,24 +103,23 @@ def test_end_to_end_forecasting(self, shared_state, training_job):
)
resources.append(model)

# TODO(b/275569167) Uncomment this when the bug is fixed
# batch_prediction_job = model.batch_predict(
# job_display_name=self._make_display_name("forecasting-liquor-model"),
# instances_format="bigquery",
# predictions_format="csv",
# machine_type="n1-standard-4",
# bigquery_source=_PREDICTION_DATASET_BQ_PATH,
# gcs_destination_prefix=(
# f'gs://{shared_state["staging_bucket_name"]}/bp_results/'
# ),
# sync=False,
# )
# resources.append(batch_prediction_job)
batch_prediction_job = model.batch_predict(
job_display_name=self._make_display_name("forecasting-liquor-model"),
instances_format="bigquery",
predictions_format="csv",
machine_type="n1-standard-4",
bigquery_source=_PREDICTION_DATASET_BQ_PATH,
gcs_destination_prefix=(
f'gs://{shared_state["staging_bucket_name"]}/bp_results/'
),
sync=False,
)
resources.append(batch_prediction_job)

# batch_prediction_job.wait()
batch_prediction_job.wait()
model.wait()
assert job.state == pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED
# assert batch_prediction_job.state == job_state.JobState.JOB_STATE_SUCCEEDED
assert batch_prediction_job.state == job_state.JobState.JOB_STATE_SUCCEEDED
finally:
for resource in resources:
resource.delete()
2 changes: 1 addition & 1 deletion tests/system/aiplatform/test_e2e_tabular.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def test_end_to_end_tabular(self, shared_state):

# Ensure batch prediction errors output file is empty
batch_predict_gcs_output_path = (
custom_batch_prediction_job.output_info.gcs_output_path
custom_batch_prediction_job.output_info.gcs_output_directory
)
client = storage.Client()

Expand Down

0 comments on commit 0b3dac9

Please sign in to comment.