Skip to content

Commit

Permalink
Merge branch 'main' into patch-1
Browse files Browse the repository at this point in the history
  • Loading branch information
jaycee-li authored Nov 30, 2022
2 parents e9ac880 + cdd557e commit db76c5b
Show file tree
Hide file tree
Showing 7 changed files with 98 additions and 26 deletions.
22 changes: 22 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,28 @@
# Changelog


## [1.19.0](https://github.com/googleapis/python-aiplatform/compare/v1.18.3...v1.19.0) (2022-11-17)


### Features

* Add Feature Store: Streaming Ingestion (write_feature_values()) and introduce Preview namespace to Vertex SDK ([bae0315](https://github.com/googleapis/python-aiplatform/commit/bae03158c06865d1b61c06a1c8af64e876ce76dd))
* Add bq_dataset_id parameter to batch_serve_to_df ([bb72562](https://github.com/googleapis/python-aiplatform/commit/bb72562f4515b6ace73a735477584ca0b5a30f58))
* Add annotation_labels to ImportDataConfig in aiplatform v1 dataset.proto ([43e2805](https://github.com/googleapis/python-aiplatform/commit/43e28052d798c380de6e102edbe257a0100738cd))
* Add support for ordery_by in Metadata SDK list methods for Artifact, Execution and Context. ([2377606](https://github.com/googleapis/python-aiplatform/commit/23776066909b5b7f77f704722d2719e1a1733ad4))
* Support global network parameter. ([c7f57ad](https://github.com/googleapis/python-aiplatform/commit/c7f57ad505b7251b9c663538e2312998445db691))


### Bug Fixes

* Correct data file gcs path for import_data_text_sentiment_analysis_sample test ([86df4b5](https://github.com/googleapis/python-aiplatform/commit/86df4b5d79118caf8f45c3845c92afe6585c24e9))
* Print error for schema classes ([13e2165](https://github.com/googleapis/python-aiplatform/commit/13e216518f20a32c7e18e6ea5b497a5fcb1d77a0))


### Documentation

* Update README with new link for AI Platform API ([35b83d9](https://github.com/googleapis/python-aiplatform/commit/35b83d90649ec396b736469278def4aaaf80621e))

## [1.18.3](https://github.com/googleapis/python-aiplatform/compare/v1.18.2...v1.18.3) (2022-11-01)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -391,13 +391,21 @@ def list(

for pipeline_execution in filtered_pipeline_executions:
if "pipeline_job_resource_name" in pipeline_execution.metadata:
service_pipeline_job = cls(
pipeline_execution.metadata["pipeline_job_resource_name"],
project=project,
location=location,
credentials=credentials,
)
service_pipeline_jobs.append(service_pipeline_job)
# This is wrapped in a try/except for cases when both
# `_coponent_identifier` and `_template_name_identifier` are
# set. In that case, even though all pipelines returned by the
# Execution.list() call will match the `_component_identifier`,
# some may not match the `_template_name_identifier`
try:
service_pipeline_job = cls(
pipeline_execution.metadata["pipeline_job_resource_name"],
project=project,
location=location,
credentials=credentials,
)
service_pipeline_jobs.append(service_pipeline_job)
except ValueError:
continue

return service_pipeline_jobs

Expand Down
11 changes: 10 additions & 1 deletion google/cloud/aiplatform/constants/prediction.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2021 Google LLC
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -79,6 +79,12 @@
]

TF_CONTAINER_URIS = [
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest",
Expand Down Expand Up @@ -130,6 +136,9 @@
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,15 @@

"""A plugin to handle remote tensoflow profiler sessions for Vertex AI."""

from google.cloud.aiplatform.training_utils.cloud_profiler import cloud_profiler_utils
from google.cloud.aiplatform.training_utils.cloud_profiler import (
cloud_profiler_utils,
)

try:
import tensorflow as tf
from tensorboard_plugin_profile.profile_plugin import ProfilePlugin
from tensorboard_plugin_profile.profile_plugin import (
ProfilePlugin,
)
except ImportError as err:
raise ImportError(cloud_profiler_utils.import_error_msg) from err

Expand All @@ -36,10 +40,14 @@
import tensorboard.plugins.base_plugin as tensorboard_base_plugin
from werkzeug import Response

from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader
from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import (
profile_uploader,
)
from google.cloud.aiplatform.training_utils import environment_variables
from google.cloud.aiplatform.training_utils.cloud_profiler import wsgi_types
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import (
base_plugin,
)
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import (
tensorboard_api,
)
Expand Down Expand Up @@ -68,8 +76,7 @@ def _get_tf_versioning() -> Optional[Version]:
versioning = version.split(".")
if len(versioning) != 3:
return

return Version(int(versioning[0]), int(versioning[1]), int(versioning[2]))
return Version(int(versioning[0]), int(versioning[1]), versioning[2])


def _is_compatible_version(version: Version) -> bool:
Expand Down Expand Up @@ -228,7 +235,7 @@ def warn_tensorboard_env_var(var_name: str):
Required. The name of the missing environment variable.
"""
logging.warning(
f"Environment variable `{var_name}` must be set. " + _BASE_TB_ENV_WARNING
"Environment variable `%s` must be set. %s", var_name, _BASE_TB_ENV_WARNING
)


Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
# limitations under the License.
#

__version__ = "1.18.3"
__version__ = "1.19.0"
18 changes: 14 additions & 4 deletions tests/unit/aiplatform/test_cloud_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,12 @@
from google.api_core import exceptions
from google.cloud import aiplatform
from google.cloud.aiplatform import training_utils
from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin
from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import (
profile_uploader,
)
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import (
base_plugin,
)
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import (
tf_profiler,
)
Expand Down Expand Up @@ -175,15 +179,21 @@ def tf_import_mock(name, *args, **kwargs):
def testCanInitializeTFVersion(self):
import tensorflow

with mock.patch.object(tensorflow, "__version__", return_value="1.2.3.4"):
with mock.patch.object(tensorflow, "__version__", "1.2.3.4"):
assert not TFProfiler.can_initialize()

def testCanInitializeOldTFVersion(self):
import tensorflow

with mock.patch.object(tensorflow, "__version__", return_value="2.3.0"):
with mock.patch.object(tensorflow, "__version__", "2.3.0"):
assert not TFProfiler.can_initialize()

def testCanInitializeRcTFVersion(self):
import tensorflow as tf

with mock.patch.object(tf, "__version__", "2.4.0-rc2"):
assert TFProfiler.can_initialize()

def testCanInitializeNoProfilePlugin(self):
orig_find_spec = importlib.util.find_spec

Expand Down
28 changes: 22 additions & 6 deletions tests/unit/aiplatform/test_pipeline_based_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,15 +596,9 @@ def test_create_and_submit_pipeline_job(
== test_backing_pipeline_job.resource_name
)

@pytest.mark.parametrize(
"job_spec_json",
[_TEST_PIPELINE_JOB],
)
def test_list_pipeline_based_service(
self,
mock_pipeline_based_service_get,
mock_load_yaml_and_json,
job_spec_json,
get_execution_mock,
list_executions_mock,
):
Expand Down Expand Up @@ -635,3 +629,25 @@ def test_list_pipeline_based_service(
# only 1 of the 2 executions in list_executions_mock matches the
# properties of FakePipelineBasedService
assert len(test_list_request) == 1

def test_list_pipeline_based_service_with_template_name_identifier(
self,
mock_pipeline_based_service_get,
get_execution_mock,
list_executions_mock,
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
credentials=_TEST_CREDENTIALS,
)

self.FakePipelineBasedService._template_name_identifier = (
_TEST_INVALID_PIPELINE_NAME_IDENTIFIER
)

test_list_request = self.FakePipelineBasedService.list()

# None of the mock pipelines match the `_template_name_identifier`
# set above, so the returned list should be empty
assert len(test_list_request) == 0

0 comments on commit db76c5b

Please sign in to comment.