diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 4f301354753..7aed78e8b1f 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -2,10 +2,52 @@ ## Major Features and Improvements +* Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). +* Implement experimental v2 `@component` component [\#6825](https://github.com/kubeflow/pipelines/pull/6825) +* Add load_component_from_* for v2 [\#6822](https://github.com/kubeflow/pipelines/pull/6822) +* Merge v2 experimental change back to v2 namespace [\#6890](https://github.com/kubeflow/pipelines/pull/6890) +* Add ImporterSpec v2 [\#6917](https://github.com/kubeflow/pipelines/pull/6917) +* Add add set_env_variable for Pipeline task [\#6919](https://github.com/kubeflow/pipelines/pull/6919) +* Add metadata field for importer [\#7112](https://github.com/kubeflow/pipelines/pull/7112) + +## Breaking Changes + +* Remove sdk/python/kfp/v2/google directory for v2, including google client and custom job [\#6886](https://github.com/kubeflow/pipelines/pull/6886) +* APIs imported from the v1 namespace are no longer supported by the v2 compiler. [\#6890](https://github.com/kubeflow/pipelines/pull/6890) +* Deprecate v2 compatible mode in v1 compiler. [\#6958](https://github.com/kubeflow/pipelines/pull/6958) + +### For Pipeline Authors + +### For Component Authors + +## Deprecations + +## Bug Fixes and Other Changes + +* Fix importer ignoring reimport setting, and switch to Protobuf.Value for import uri [\#6827](https://github.com/kubeflow/pipelines/pull/6827) +* Fix display name support for groups [\#6832](https://github.com/kubeflow/pipelines/pull/6832) +* Fix regression on optional inputs [\#6905](https://github.com/kubeflow/pipelines/pull/6905) [\#6937](https://github.com/kubeflow/pipelines/pull/6937) +* Depends on `google-auth>=1.6.1,<3` [\#6939](https://github.com/kubeflow/pipelines/pull/6939) +* Change otherwise to else in yaml [\#6952](https://github.com/kubeflow/pipelines/pull/6952) +* Avoid pydantic bug on Union type [\#6957](https://github.com/kubeflow/pipelines/pull/6957) +* Fix bug for if and concat placeholders [\#6978](https://github.com/kubeflow/pipelines/pull/6978) +* Fix bug for resourceSpec [\#6979](https://github.com/kubeflow/pipelines/pull/6979) +* Fix regression on nested loops [\#6990](https://github.com/kubeflow/pipelines/pull/6990) +* Fix bug for input/outputspec and positional arguments [\#6980](https://github.com/kubeflow/pipelines/pull/6980) + +## Documentation Updates + +# 1.8.11 + +## Major Features and Improvements + +* kfp.Client uses namespace from initialization if set for the instance context [\#7056](https://github.com/kubeflow/pipelines/pull/7056) * Add importer_spec metadata to v1 [\#7180](https://github.com/kubeflow/pipelines/pull/7180) ## Breaking Changes +* Fix breaking change in Argo 3.0, to define TTL for workflows. Makes SDK incompatible with KFP pre-1.7 versions [\#7141](https://github.com/kubeflow/pipelines/pull/7141) + ### For Pipeline Authors ### For Component Authors @@ -14,6 +56,10 @@ ## Bug Fixes and Other Changes +* Remove redundant check in set_gpu_limit [\#6866](https://github.com/kubeflow/pipelines/pull/6866) +* Fix create_runtime_artifact not covering all types. [\#7168](https://github.com/kubeflow/pipelines/pull/7168) +* Depend on `absl-py>=0.9,<2` [\#7172](https://github.com/kubeflow/pipelines/pull/7172) + ## Documentation Updates # 1.8.10 diff --git a/sdk/python/kfp/_client.py b/sdk/python/kfp/_client.py index 3c3a7a48910..eb037b9101a 100644 --- a/sdk/python/kfp/_client.py +++ b/sdk/python/kfp/_client.py @@ -165,6 +165,12 @@ def __init__(self, # Save the loaded API client configuration, as a reference if update is # needed. self._load_context_setting_or_default() + + # If custom namespace provided, overwrite the loaded or default one in + # context settings for current client instance + if namespace != 'kubeflow': + self._context_setting['namespace'] = namespace + self._existing_config = config if cookies is None: cookies = self._context_setting.get('client_authentication_cookie') diff --git a/sdk/python/kfp/compiler/compiler.py b/sdk/python/kfp/compiler/compiler.py index e8abfc268ad..00dca83bc9f 100644 --- a/sdk/python/kfp/compiler/compiler.py +++ b/sdk/python/kfp/compiler/compiler.py @@ -831,8 +831,7 @@ def _create_pipeline_workflow(self, # set ttl after workflow finishes if pipeline_conf.ttl_seconds_after_finished >= 0: - workflow['spec'][ - 'ttlSecondsAfterFinished'] = pipeline_conf.ttl_seconds_after_finished + workflow['spec']['ttlStrategy'] = {'secondsAfterCompletion': pipeline_conf.ttl_seconds_after_finished} if pipeline_conf._pod_disruption_budget_min_available: pod_disruption_budget = { diff --git a/sdk/python/kfp/dsl/_container_op.py b/sdk/python/kfp/dsl/_container_op.py index 9284c85c96e..52913881e21 100644 --- a/sdk/python/kfp/dsl/_container_op.py +++ b/sdk/python/kfp/dsl/_container_op.py @@ -411,8 +411,7 @@ def set_gpu_limit( ignored in v2. """ - if not isinstance(gpu, _pipeline_param.PipelineParam) or not isinstance( - gpu, _pipeline_param.PipelineParam): + if not isinstance(gpu, _pipeline_param.PipelineParam): self._validate_positive_number(gpu, 'gpu') if self._container_spec: diff --git a/sdk/python/kfp/v2/components/types/artifact_types.py b/sdk/python/kfp/v2/components/types/artifact_types.py index d3f027f275a..2976cbd6d58 100644 --- a/sdk/python/kfp/v2/components/types/artifact_types.py +++ b/sdk/python/kfp/v2/components/types/artifact_types.py @@ -429,8 +429,16 @@ def __init__(self, _SCHEMA_TITLE_TO_TYPE: Dict[str, Artifact] = { - x.TYPE_NAME: x - for x in [Artifact, Model, Dataset, Metrics, ClassificationMetrics] + x.TYPE_NAME: x for x in [ + Artifact, + Model, + Dataset, + Metrics, + ClassificationMetrics, + SlicedClassificationMetrics, + HTML, + Markdown, + ] } diff --git a/sdk/python/kfp/v2/components/types/artifact_types_test.py b/sdk/python/kfp/v2/components/types/artifact_types_test.py index 571e3144387..64ec6fd723f 100644 --- a/sdk/python/kfp/v2/components/types/artifact_types_test.py +++ b/sdk/python/kfp/v2/components/types/artifact_types_test.py @@ -17,10 +17,11 @@ import json import os +from absl.testing import parameterized from kfp.v2.components.types import artifact_types -class ArtifactsTest(unittest.TestCase): +class ArtifactsTest(parameterized.TestCase): def test_complex_metrics(self): metrics = artifact_types.ClassificationMetrics() @@ -55,6 +56,105 @@ def test_complex_metrics_bulk_loading(self): expected_json = json.load(json_file) self.assertEqual(expected_json, metrics.metadata) + @parameterized.parameters( + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.Artifact" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.Artifact, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.Model" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.Model, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.Dataset" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.Dataset, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.Metrics" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.Metrics, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.ClassificationMetrics" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.ClassificationMetrics, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.SlicedClassificationMetrics" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.SlicedClassificationMetrics, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.HTML" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.HTML, + }, + { + 'runtime_artifact': { + "metadata": {}, + "name": "input_artifact_one", + "type": { + "schemaTitle": "system.Markdown" + }, + "uri": "gs://some-bucket/input_artifact_one" + }, + 'expected_type': artifact_types.Markdown, + }, + ) + def test_create_runtime_artifact( + self, + runtime_artifact, + expected_type, + ): + self.assertIsInstance( + artifact_types.create_runtime_artifact(runtime_artifact), + expected_type) + if __name__ == '__main__': unittest.main() diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 75ba23b331b..706360b8ed3 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -23,7 +23,7 @@ # NOTICE, after any updates to the following, ./requirements.in should be updated # accordingly. REQUIRES = [ - 'absl-py>=0.9,<=0.11', + 'absl-py>=0.9,<2', 'PyYAML>=5.3,<6', # `Blob.from_string` was introduced in google-cloud-storage 1.20.0 # https://github.com/googleapis/python-storage/blob/master/CHANGELOG.md#1200 diff --git a/sdk/python/tests/compiler/compiler_tests.py b/sdk/python/tests/compiler/compiler_tests.py index 18a0ade6309..b6d95f04e5a 100644 --- a/sdk/python/tests/compiler/compiler_tests.py +++ b/sdk/python/tests/compiler/compiler_tests.py @@ -981,7 +981,7 @@ def some_pipeline(): dsl.get_pipeline_conf().set_ttl_seconds_after_finished(86400) workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline) - self.assertEqual(workflow_dict['spec']['ttlSecondsAfterFinished'], + self.assertEqual(workflow_dict['spec']['ttlStrategy']['secondsAfterCompletion'], 86400) def test_pod_disruption_budget(self): diff --git a/test/presubmit-tests-tfx.sh b/test/presubmit-tests-tfx.sh index f273c98f811..0136ded812e 100755 --- a/test/presubmit-tests-tfx.sh +++ b/test/presubmit-tests-tfx.sh @@ -17,6 +17,8 @@ source_root=$(pwd) # TODO(#5051) Unpin pip version once we figure out how to make the new dependency resolver in pip 20.3+ work in our case. python3 -m pip install --upgrade pip==20.2.3 +# TODO(#7142): remove future +python3 -m pip install --upgrade future==0.18.2 # TODO: unpin google-cloud-bigquery once TFX revert https://github.com/tensorflow/tfx/commit/f8c1dea2095197ceda60e1c4d67c4c90fc17ed44 python3 -m pip install --upgrade google-cloud-bigquery==1.28.0 python3 -m pip install -r "$source_root/sdk/python/requirements.txt"