diff --git a/examples/airflow_orchestration/steps/importers.py b/examples/airflow_orchestration/steps/importers.py index 52b56ca1e49..0c4bcfb045e 100644 --- a/examples/airflow_orchestration/steps/importers.py +++ b/examples/airflow_orchestration/steps/importers.py @@ -20,11 +20,9 @@ @step -def importer_mnist() -> ( - Output( - train_dataloader=DataLoader, - test_dataloader=DataLoader, - ) +def importer_mnist() -> Output( + train_dataloader=DataLoader, + test_dataloader=DataLoader, ): """Download the Fashion MNIST dataset.""" # Download training data from open datasets. diff --git a/examples/bentoml_deployment/steps/importers.py b/examples/bentoml_deployment/steps/importers.py index 52b56ca1e49..0c4bcfb045e 100644 --- a/examples/bentoml_deployment/steps/importers.py +++ b/examples/bentoml_deployment/steps/importers.py @@ -20,11 +20,9 @@ @step -def importer_mnist() -> ( - Output( - train_dataloader=DataLoader, - test_dataloader=DataLoader, - ) +def importer_mnist() -> Output( + train_dataloader=DataLoader, + test_dataloader=DataLoader, ): """Download the Fashion MNIST dataset.""" # Download training data from open datasets. diff --git a/examples/custom_code_deployment/kserve_tensorflow/steps/tf_data_loader.py b/examples/custom_code_deployment/kserve_tensorflow/steps/tf_data_loader.py index 23adb49b6b5..a41c5b5e8cf 100644 --- a/examples/custom_code_deployment/kserve_tensorflow/steps/tf_data_loader.py +++ b/examples/custom_code_deployment/kserve_tensorflow/steps/tf_data_loader.py @@ -19,13 +19,11 @@ @step -def tf_data_loader() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def tf_data_loader() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/custom_code_deployment/seldon_tensorflow/steps/tf_data_loader.py b/examples/custom_code_deployment/seldon_tensorflow/steps/tf_data_loader.py index 23adb49b6b5..a41c5b5e8cf 100644 --- a/examples/custom_code_deployment/seldon_tensorflow/steps/tf_data_loader.py +++ b/examples/custom_code_deployment/seldon_tensorflow/steps/tf_data_loader.py @@ -19,13 +19,11 @@ @step -def tf_data_loader() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def tf_data_loader() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/deepchecks_data_validation/steps/data_loader.py b/examples/deepchecks_data_validation/steps/data_loader.py index fa17b9929b3..e1192bf6eb2 100644 --- a/examples/deepchecks_data_validation/steps/data_loader.py +++ b/examples/deepchecks_data_validation/steps/data_loader.py @@ -19,8 +19,8 @@ @step -def data_loader() -> ( - Output(reference_dataset=pd.DataFrame, comparison_dataset=pd.DataFrame) +def data_loader() -> Output( + reference_dataset=pd.DataFrame, comparison_dataset=pd.DataFrame ): """Load the iris dataset.""" iris_df = iris.load_data(data_format="Dataframe", as_train_test=False) diff --git a/examples/kserve_deployment/steps/pytorch_steps/mnist_handler.py b/examples/kserve_deployment/steps/pytorch_steps/mnist_handler.py index 1f2f0e34ece..12ff8870285 100644 --- a/examples/kserve_deployment/steps/pytorch_steps/mnist_handler.py +++ b/examples/kserve_deployment/steps/pytorch_steps/mnist_handler.py @@ -19,8 +19,7 @@ class MNISTDigitClassifier(ImageClassifier): """ MNISTDigitClassifier handler class. This handler extends class ImageClassifier from image_classifier.py, a default handler. This handler takes an image and returns the number in that image. - Here the post-processing method has been overridden while others are reused from parent class. - """ + Here the post-processing method has been overridden while others are reused from parent class.""" image_processing = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] diff --git a/examples/kserve_deployment/steps/tensorflow_steps/mnist_importer.py b/examples/kserve_deployment/steps/tensorflow_steps/mnist_importer.py index e83f7ea7d29..77e04f93a96 100644 --- a/examples/kserve_deployment/steps/tensorflow_steps/mnist_importer.py +++ b/examples/kserve_deployment/steps/tensorflow_steps/mnist_importer.py @@ -18,13 +18,11 @@ @step -def importer_mnist() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def importer_mnist() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data and store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/kubeflow_pipelines_orchestration/steps/importers.py b/examples/kubeflow_pipelines_orchestration/steps/importers.py index a39e6fba128..70291fd5f53 100644 --- a/examples/kubeflow_pipelines_orchestration/steps/importers.py +++ b/examples/kubeflow_pipelines_orchestration/steps/importers.py @@ -20,13 +20,11 @@ @step -def importer() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def importer() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (X_train, y_train), ( diff --git a/examples/kubernetes_orchestration/steps/importer.py b/examples/kubernetes_orchestration/steps/importer.py index 04fa1246876..6abaf108e36 100644 --- a/examples/kubernetes_orchestration/steps/importer.py +++ b/examples/kubernetes_orchestration/steps/importer.py @@ -20,13 +20,11 @@ @step -def digits_data_loader() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def digits_data_loader() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): """Loads the digits dataset as a tuple of flattened numpy arrays.""" digits = load_digits() diff --git a/examples/mlflow_tracking/steps/loader/loader_step.py b/examples/mlflow_tracking/steps/loader/loader_step.py index 0b34d870324..4c6e469e81f 100644 --- a/examples/mlflow_tracking/steps/loader/loader_step.py +++ b/examples/mlflow_tracking/steps/loader/loader_step.py @@ -18,13 +18,11 @@ @step -def loader_mnist() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def loader_mnist() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/neptune_tracking/steps/loader/loader_step.py b/examples/neptune_tracking/steps/loader/loader_step.py index 184629e1115..5d6ff5940c7 100644 --- a/examples/neptune_tracking/steps/loader/loader_step.py +++ b/examples/neptune_tracking/steps/loader/loader_step.py @@ -19,13 +19,11 @@ @step -def loader_mnist() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def loader_mnist() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/pytorch/steps/importers.py b/examples/pytorch/steps/importers.py index 52b56ca1e49..0c4bcfb045e 100644 --- a/examples/pytorch/steps/importers.py +++ b/examples/pytorch/steps/importers.py @@ -20,11 +20,9 @@ @step -def importer_mnist() -> ( - Output( - train_dataloader=DataLoader, - test_dataloader=DataLoader, - ) +def importer_mnist() -> Output( + train_dataloader=DataLoader, + test_dataloader=DataLoader, ): """Download the Fashion MNIST dataset.""" # Download training data from open datasets. diff --git a/examples/quickstart/run.py b/examples/quickstart/run.py index 25d2a0c0743..64a72579b95 100644 --- a/examples/quickstart/run.py +++ b/examples/quickstart/run.py @@ -39,6 +39,7 @@ def main(): + # initialize and run the training pipeline training_pipeline_instance = training_pipeline( training_data_loader=training_data_loader(), diff --git a/examples/quickstart/steps/data_loaders.py b/examples/quickstart/steps/data_loaders.py index 9cb407be907..a329ce8af68 100644 --- a/examples/quickstart/steps/data_loaders.py +++ b/examples/quickstart/steps/data_loaders.py @@ -21,13 +21,11 @@ @step -def training_data_loader() -> ( - Output( - X_train=pd.DataFrame, - X_test=pd.DataFrame, - y_train=pd.Series, - y_test=pd.Series, - ) +def training_data_loader() -> Output( + X_train=pd.DataFrame, + X_test=pd.DataFrame, + y_train=pd.Series, + y_test=pd.Series, ): """Load the iris dataset as tuple of Pandas DataFrame / Series.""" iris = load_iris(as_frame=True) diff --git a/examples/quickstart_py37/run.py b/examples/quickstart_py37/run.py index b1b16c98b15..18984a9dd57 100644 --- a/examples/quickstart_py37/run.py +++ b/examples/quickstart_py37/run.py @@ -32,6 +32,7 @@ def main(): + # initialize and run the training pipeline training_pipeline_instance = training_pipeline( training_data_loader=training_data_loader(), diff --git a/examples/quickstart_py37/steps/data_loaders.py b/examples/quickstart_py37/steps/data_loaders.py index 9cb407be907..a329ce8af68 100644 --- a/examples/quickstart_py37/steps/data_loaders.py +++ b/examples/quickstart_py37/steps/data_loaders.py @@ -21,13 +21,11 @@ @step -def training_data_loader() -> ( - Output( - X_train=pd.DataFrame, - X_test=pd.DataFrame, - y_train=pd.Series, - y_test=pd.Series, - ) +def training_data_loader() -> Output( + X_train=pd.DataFrame, + X_test=pd.DataFrame, + y_train=pd.Series, + y_test=pd.Series, ): """Load the iris dataset as tuple of Pandas DataFrame / Series.""" iris = load_iris(as_frame=True) diff --git a/examples/sagemaker_orchestration/steps/importers.py b/examples/sagemaker_orchestration/steps/importers.py index 52b56ca1e49..0c4bcfb045e 100644 --- a/examples/sagemaker_orchestration/steps/importers.py +++ b/examples/sagemaker_orchestration/steps/importers.py @@ -20,11 +20,9 @@ @step -def importer_mnist() -> ( - Output( - train_dataloader=DataLoader, - test_dataloader=DataLoader, - ) +def importer_mnist() -> Output( + train_dataloader=DataLoader, + test_dataloader=DataLoader, ): """Download the Fashion MNIST dataset.""" # Download training data from open datasets. diff --git a/examples/scipy/steps/loader/loader_step.py b/examples/scipy/steps/loader/loader_step.py index 39c3279fcfb..a19dedb18ac 100644 --- a/examples/scipy/steps/loader/loader_step.py +++ b/examples/scipy/steps/loader/loader_step.py @@ -28,13 +28,11 @@ def clean_text(text: str): @step -def importer() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def importer() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): train = pd.read_csv(TRAIN_PATH) test = pd.read_csv(TEST_PATH) diff --git a/examples/seldon_deployment/pipeline.py b/examples/seldon_deployment/pipeline.py index a186bfa9143..7719bc468df 100644 --- a/examples/seldon_deployment/pipeline.py +++ b/examples/seldon_deployment/pipeline.py @@ -35,13 +35,11 @@ @step -def importer_mnist() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def importer_mnist() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/slack_alert/steps/data_loader.py b/examples/slack_alert/steps/data_loader.py index 04fa1246876..6abaf108e36 100644 --- a/examples/slack_alert/steps/data_loader.py +++ b/examples/slack_alert/steps/data_loader.py @@ -20,13 +20,11 @@ @step -def digits_data_loader() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def digits_data_loader() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): """Loads the digits dataset as a tuple of flattened numpy arrays.""" digits = load_digits() diff --git a/examples/step_operator_remote_training/steps/importer/importer_step.py b/examples/step_operator_remote_training/steps/importer/importer_step.py index 7186edced6f..248cfdc4f76 100644 --- a/examples/step_operator_remote_training/steps/importer/importer_step.py +++ b/examples/step_operator_remote_training/steps/importer/importer_step.py @@ -19,13 +19,11 @@ @step -def digits_data_loader() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def digits_data_loader() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): """Loads the digits dataset as a tuple of flattened numpy arrays.""" digits = load_digits() diff --git a/examples/tekton_pipelines_orchestration/steps/importers.py b/examples/tekton_pipelines_orchestration/steps/importers.py index a39e6fba128..70291fd5f53 100644 --- a/examples/tekton_pipelines_orchestration/steps/importers.py +++ b/examples/tekton_pipelines_orchestration/steps/importers.py @@ -20,13 +20,11 @@ @step -def importer() -> ( - Output( - X_train=np.ndarray, - X_test=np.ndarray, - y_train=np.ndarray, - y_test=np.ndarray, - ) +def importer() -> Output( + X_train=np.ndarray, + X_test=np.ndarray, + y_train=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (X_train, y_train), ( diff --git a/examples/wandb_tracking/run.py b/examples/wandb_tracking/run.py index d9c08f377da..8a0d6b47975 100644 --- a/examples/wandb_tracking/run.py +++ b/examples/wandb_tracking/run.py @@ -21,6 +21,7 @@ from steps.trainer.trainer_step import TrainerParameters, tf_trainer if __name__ == "__main__": + # Initialize a pipeline run run_1 = wandb_example_pipeline( importer=importer_mnist(), diff --git a/examples/wandb_tracking/steps/importer/importer_step.py b/examples/wandb_tracking/steps/importer/importer_step.py index 3afde81fbc5..de3fdd4e511 100644 --- a/examples/wandb_tracking/steps/importer/importer_step.py +++ b/examples/wandb_tracking/steps/importer/importer_step.py @@ -18,13 +18,11 @@ @step -def importer_mnist() -> ( - Output( - x_train=np.ndarray, - y_train=np.ndarray, - x_test=np.ndarray, - y_test=np.ndarray, - ) +def importer_mnist() -> Output( + x_train=np.ndarray, + y_train=np.ndarray, + x_test=np.ndarray, + y_test=np.ndarray, ): """Download the MNIST data store it as an artifact.""" (x_train, y_train), ( diff --git a/examples/whylogs_data_profiling/run.py b/examples/whylogs_data_profiling/run.py index 1dfd6cd02b2..78d0728f574 100644 --- a/examples/whylogs_data_profiling/run.py +++ b/examples/whylogs_data_profiling/run.py @@ -54,6 +54,7 @@ def visualize_statistics( if __name__ == "__main__": + pipeline_instance = data_profiling_pipeline( data_loader=data_loader(), data_splitter=data_splitter(), diff --git a/examples/whylogs_data_profiling/steps/loader/loader_step.py b/examples/whylogs_data_profiling/steps/loader/loader_step.py index c17106766ce..45575c7710c 100644 --- a/examples/whylogs_data_profiling/steps/loader/loader_step.py +++ b/examples/whylogs_data_profiling/steps/loader/loader_step.py @@ -29,11 +29,9 @@ ) } ) -def data_loader() -> ( - Output( - data=pd.DataFrame, - profile=DatasetProfileView, - ) +def data_loader() -> Output( + data=pd.DataFrame, + profile=DatasetProfileView, ): """Load the diabetes dataset.""" X, y = datasets.load_diabetes(return_X_y=True, as_frame=True) diff --git a/examples/xgboost/run.py b/examples/xgboost/run.py index dc834c535a4..38b2adbd8ac 100644 --- a/examples/xgboost/run.py +++ b/examples/xgboost/run.py @@ -14,6 +14,7 @@ from steps import data_loader, predictor, trainer if __name__ == "__main__": + pipeline = xgboost_pipeline( data_loader=data_loader(), trainer=trainer(), predictor=predictor() ) diff --git a/scripts/verify_flavor_url_valid.py b/scripts/verify_flavor_url_valid.py index 3cfe3c79596..1f9b167f599 100644 --- a/scripts/verify_flavor_url_valid.py +++ b/scripts/verify_flavor_url_valid.py @@ -103,6 +103,7 @@ def docs(root_domain: str, ignore_passing: bool) -> None: Console().print(text, style=style) elif not ignore_passing: + style = "green" text = ( @@ -125,6 +126,7 @@ def docs(root_domain: str, ignore_passing: bool) -> None: Console().print(text, style=style) elif not ignore_passing: + style = "green" text = ( @@ -174,6 +176,7 @@ def logos(ignore_passing: bool) -> None: _verify_image_not_corrupt(r, flavor) if not ignore_passing: + style = "green" text = ( diff --git a/src/zenml/cli/base.py b/src/zenml/cli/base.py index c06b0d95ec8..e30018dd68b 100644 --- a/src/zenml/cli/base.py +++ b/src/zenml/cli/base.py @@ -141,6 +141,7 @@ def init( console.print(zenml_cli_privacy_message, width=80) if not starter: + from rich.markdown import Markdown prompt_message = Markdown( diff --git a/src/zenml/cli/cli.py b/src/zenml/cli/cli.py index 404b311b06f..59b89bc2103 100644 --- a/src/zenml/cli/cli.py +++ b/src/zenml/cli/cli.py @@ -132,7 +132,7 @@ def format_commands( ) ) rows: List[Tuple[str, str, str]] = [] - for tag, subcommand, cmd in commands: + for (tag, subcommand, cmd) in commands: help_ = cmd.get_short_help_str(limit=formatter.width) rows.append((tag.value, subcommand, help_)) if rows: diff --git a/src/zenml/cli/formatter.py b/src/zenml/cli/formatter.py index 2c536237437..a272ca73ec3 100644 --- a/src/zenml/cli/formatter.py +++ b/src/zenml/cli/formatter.py @@ -132,11 +132,12 @@ def write_dl( self.write("\n") elif len(widths) == 3: + first_col = min(widths[0], col_max) + col_spacing second_col = min(widths[1], col_max) + col_spacing * 2 current_tag = None - for first, second, third in iter_rows(rows, len(widths)): + for (first, second, third) in iter_rows(rows, len(widths)): if current_tag != first: current_tag = first self.write("\n") diff --git a/src/zenml/cli/hub.py b/src/zenml/cli/hub.py index 58703c7c3dd..7caaa9b9d72 100644 --- a/src/zenml/cli/hub.py +++ b/src/zenml/cli/hub.py @@ -768,6 +768,7 @@ def _validate_repository( from zenml.utils.git_utils import clone_git_repository while True: + # Make sure the repository URL is provided. if not url: if not interactive: diff --git a/src/zenml/cli/pipeline.py b/src/zenml/cli/pipeline.py index 1702df67593..9bffdc368a6 100644 --- a/src/zenml/cli/pipeline.py +++ b/src/zenml/cli/pipeline.py @@ -294,6 +294,7 @@ def list_pipelines(**kwargs: Any) -> None: cli_utils.print_active_config() client = Client() with console.status("Listing pipelines...\n"): + pipelines = client.list_pipelines(**kwargs) if not pipelines.items: diff --git a/src/zenml/cli/role.py b/src/zenml/cli/role.py index e9101e597c0..f7bb43412a7 100644 --- a/src/zenml/cli/role.py +++ b/src/zenml/cli/role.py @@ -43,6 +43,7 @@ def list_roles(**kwargs: Any) -> None: cli_utils.print_active_config() client = Client() with console.status("Listing roles...\n"): + roles = client.list_roles(**kwargs) if not roles.items: cli_utils.declare("No roles found for the given filters.") @@ -298,6 +299,7 @@ def list_role_assignments(**kwargs: Any) -> None: cli_utils.print_active_config() client = Client() with console.status("Listing roles...\n"): + role_assignments = client.list_user_role_assignment(**kwargs) if not role_assignments.items: cli_utils.declare( diff --git a/src/zenml/cli/secret.py b/src/zenml/cli/secret.py index 8d037944958..f0b6e465aa8 100644 --- a/src/zenml/cli/secret.py +++ b/src/zenml/cli/secret.py @@ -210,6 +210,7 @@ def register_secret( secret_contents = {"name": name} if interactive: + if parsed_args: error( "Cannot pass secret fields as arguments when using " @@ -428,6 +429,7 @@ def update_secret( updated_contents[key] = value else: + if not parsed_args: error( "Secret fields must be passed as arguments when not using " @@ -647,6 +649,7 @@ def migrate_secrets( migrated_secrets_count = 0 for secret_name in secret_names: + migrated_secret_name = secret_name try: @@ -697,6 +700,7 @@ def migrate_secrets( pass if not skip_migration: + if prompt_migrate: choice = click.prompt( "Would you like to migrate this secret ?", @@ -764,6 +768,7 @@ def migrate_secrets( declare(f"Secret `{secret_name}` migrated successfully.") if delete: + if prompt_delete: choice = click.prompt( "Would you like to delete the secret ?", diff --git a/src/zenml/cli/server.py b/src/zenml/cli/server.py index 93353aca264..75e7957d7e8 100644 --- a/src/zenml/cli/server.py +++ b/src/zenml/cli/server.py @@ -153,6 +153,7 @@ def up( with event_handler( AnalyticsEvent.ZENML_SERVER_STARTED ) as analytics_handler: + from zenml.zen_server.deploy.deployer import ServerDeployer from zenml.zen_stores.sql_zen_store import SQLDatabaseDriver diff --git a/src/zenml/cli/stack.py b/src/zenml/cli/stack.py index 90d61b46445..39863c9caa4 100644 --- a/src/zenml/cli/stack.py +++ b/src/zenml/cli/stack.py @@ -421,6 +421,7 @@ def update_stack( client = Client() with console.status("Updating stack...\n"): + updates: Dict[StackComponentType, List[Union[str, UUID]]] = dict() if artifact_store: updates[StackComponentType.ARTIFACT_STORE] = [artifact_store] @@ -814,6 +815,7 @@ def delete_stack( client = Client() if recursive and recursive_confirmation: + client.delete_stack(stack_name_or_id, recursive=True) return diff --git a/src/zenml/cli/user_management.py b/src/zenml/cli/user_management.py index a5f29f73106..66a0154b945 100644 --- a/src/zenml/cli/user_management.py +++ b/src/zenml/cli/user_management.py @@ -143,6 +143,7 @@ def create_user( client = Client() if not password: if client.zen_store.type != StoreType.REST: + password = click.prompt( f"Password for user {user_name}", hide_input=True, @@ -263,6 +264,7 @@ def list_teams(**kwargs: Any) -> None: client = Client() with console.status("Listing teams...\n"): + teams = client.list_teams(**kwargs) if not teams: diff --git a/src/zenml/cli/utils.py b/src/zenml/cli/utils.py index 1d846bb76e1..5bcae92eb79 100644 --- a/src/zenml/cli/utils.py +++ b/src/zenml/cli/utils.py @@ -1028,6 +1028,7 @@ def describe_pydantic_object(schema_json: Dict[str, Any]) -> None: if properties: warning("Properties", bold=True) for prop, prop_schema in properties.items(): + if "$ref" not in prop_schema.keys(): warning( f"{prop}, {prop_schema['type']}" @@ -1258,6 +1259,7 @@ def print_pipeline_runs_table( """ runs_dicts = [] for pipeline_run in pipeline_runs: + if pipeline_run.user: user_name = pipeline_run.user.name else: @@ -1430,6 +1432,7 @@ def list_options(filter_model: Type[BaseFilterModel]) -> Callable[[F], F]: """ def inner_decorator(func: F) -> F: + options = [] data_type_descriptors = set() for k, v in filter_model.__fields__.items(): diff --git a/src/zenml/cli/workspace.py b/src/zenml/cli/workspace.py index 20c925b449a..e78080837bd 100644 --- a/src/zenml/cli/workspace.py +++ b/src/zenml/cli/workspace.py @@ -47,6 +47,7 @@ def list_workspaces(**kwargs: Any) -> None: cli_utils.print_active_config() client = Client() with console.status("Listing workspaces...\n"): + workspaces = client.list_workspaces(**kwargs) if workspaces: active_workspace = Client().active_workspace diff --git a/src/zenml/client.py b/src/zenml/client.py index 121cf16e39e..b2f0ef5a564 100644 --- a/src/zenml/client.py +++ b/src/zenml/client.py @@ -1598,6 +1598,7 @@ def create_stack( stack_components = dict() for c_type, c_identifier in components.items(): + # Skip non-existent components. if not c_identifier: continue @@ -3421,6 +3422,7 @@ def create_run_metadata( created_metadata: Dict[str, RunMetadataResponseModel] = {} for key, value in metadata.items(): + # Skip metadata that is too large to be stored in the database. if len(json.dumps(value)) > TEXT_FIELD_MAX_LENGTH: logger.warning( @@ -3637,6 +3639,7 @@ def get_secret( ) for search_scope in search_scopes: + partial_matches: List[SecretResponseModel] = [] for secret in secrets.items: if secret.scope != search_scope: diff --git a/src/zenml/config/global_config.py b/src/zenml/config/global_config.py index db37b51d24f..934ccaa32f4 100644 --- a/src/zenml/config/global_config.py +++ b/src/zenml/config/global_config.py @@ -592,6 +592,7 @@ def get_default_store(self) -> StoreConfiguration: ) if len(env_secrets_store_config): + if "type" not in env_secrets_store_config: env_secrets_store_config["type"] = config.type.value diff --git a/src/zenml/entrypoints/base_entrypoint_configuration.py b/src/zenml/entrypoints/base_entrypoint_configuration.py index 270a07fe7cc..a11a7ebb359 100644 --- a/src/zenml/entrypoints/base_entrypoint_configuration.py +++ b/src/zenml/entrypoints/base_entrypoint_configuration.py @@ -157,7 +157,6 @@ def _parse_arguments(cls, arguments: List[str]) -> Dict[str, Any]: Raises: ValueError: If the arguments are not valid. """ - # Argument parser subclass that suppresses some argparse logs and # raises an exception instead of the `sys.exit()` call class _CustomParser(argparse.ArgumentParser): diff --git a/src/zenml/hooks/hook_validators.py b/src/zenml/hooks/hook_validators.py index 6aa76ce7204..4080da4fcec 100644 --- a/src/zenml/hooks/hook_validators.py +++ b/src/zenml/hooks/hook_validators.py @@ -21,6 +21,7 @@ from zenml.utils import source_utils if TYPE_CHECKING: + HookSpecification = Union[str, Source, FunctionType] diff --git a/src/zenml/integrations/aws/step_operators/sagemaker_step_operator.py b/src/zenml/integrations/aws/step_operators/sagemaker_step_operator.py index 3e3355184b2..dd273f998f4 100644 --- a/src/zenml/integrations/aws/step_operators/sagemaker_step_operator.py +++ b/src/zenml/integrations/aws/step_operators/sagemaker_step_operator.py @@ -188,9 +188,7 @@ def launch( unique_training_job_name = f"{training_job_name}-{suffix}" # Sagemaker doesn't allow any underscores in job/experiment/trial names - sanitized_training_job_name = unique_training_job_name.replace( - "_", "-" - ) + sanitized_training_job_name = unique_training_job_name.replace("_", "-") # Construct training input object, if necessary inputs = None diff --git a/src/zenml/integrations/azure/secrets_managers/azure_secrets_manager.py b/src/zenml/integrations/azure/secrets_managers/azure_secrets_manager.py index 1f48e531347..f77edd01cb3 100644 --- a/src/zenml/integrations/azure/secrets_managers/azure_secrets_manager.py +++ b/src/zenml/integrations/azure/secrets_managers/azure_secrets_manager.py @@ -299,6 +299,7 @@ def delete_all_secrets(self) -> None: # List all secrets. for secret_property in self.CLIENT.list_properties_of_secrets(): + tags = secret_property.tags if not tags: continue diff --git a/src/zenml/integrations/bentoml/services/bentoml_deployment.py b/src/zenml/integrations/bentoml/services/bentoml_deployment.py index 37944200d5b..3a67cdcb6a6 100644 --- a/src/zenml/integrations/bentoml/services/bentoml_deployment.py +++ b/src/zenml/integrations/bentoml/services/bentoml_deployment.py @@ -38,6 +38,7 @@ from zenml.services.service import BaseDeploymentService if TYPE_CHECKING: + from zenml.integrations.bentoml.model_deployers.bentoml_model_deployer import ( # noqa BentoMLModelDeployer, ) @@ -170,6 +171,7 @@ def __init__( isinstance(config, BentoMLDeploymentConfig) and "endpoint" not in attrs ): + endpoint = BentoMLDeploymentEndpoint( config=BentoMLDeploymentEndpointConfig( protocol=ServiceEndpointProtocol.HTTP, diff --git a/src/zenml/integrations/evidently/metrics.py b/src/zenml/integrations/evidently/metrics.py index 82734b13fdc..6b43688ea9c 100644 --- a/src/zenml/integrations/evidently/metrics.py +++ b/src/zenml/integrations/evidently/metrics.py @@ -336,6 +336,7 @@ def to_evidently_metric( metric_class = self.get_metric_class(self.class_path) if self.is_generator: + try: return generate_column_metrics( metric_class=metric_class, diff --git a/src/zenml/integrations/evidently/tests.py b/src/zenml/integrations/evidently/tests.py index f396cc8869e..b0aadb1e34f 100644 --- a/src/zenml/integrations/evidently/tests.py +++ b/src/zenml/integrations/evidently/tests.py @@ -321,6 +321,7 @@ def to_evidently_test(self) -> Union[Test, TestPreset, BaseGenerator]: test_class = self.get_test_class(self.class_path) if self.is_generator: + try: return generate_column_tests( test_class=test_class, diff --git a/src/zenml/integrations/great_expectations/data_validators/ge_data_validator.py b/src/zenml/integrations/great_expectations/data_validators/ge_data_validator.py index e21a6f3917d..da6a3811252 100644 --- a/src/zenml/integrations/great_expectations/data_validators/ge_data_validator.py +++ b/src/zenml/integrations/great_expectations/data_validators/ge_data_validator.py @@ -291,6 +291,7 @@ def data_context(self) -> BaseDataContext: ] = site_config if self.config.configure_local_docs: + client = Client() artifact_store = client.active_stack.artifact_store if artifact_store.flavor != "local": diff --git a/src/zenml/integrations/kserve/model_deployers/kserve_model_deployer.py b/src/zenml/integrations/kserve/model_deployers/kserve_model_deployer.py index d14fd6965ad..2ec75aae6ad 100644 --- a/src/zenml/integrations/kserve/model_deployers/kserve_model_deployer.py +++ b/src/zenml/integrations/kserve/model_deployers/kserve_model_deployer.py @@ -517,6 +517,7 @@ def _create_or_update_kserve_credentials( secret_name = config.secret_name or self.config.secret if secret_name: + if config.secret_name: secret_source = "model deployment" else: @@ -546,12 +547,14 @@ def _create_or_update_kserve_credentials( credentials = zenml_secret.secret_values else: + # if no secret is configured, try to fetch credentials from the # active artifact store and convert them into the appropriate format # expected by KServe converted_secret = self._convert_artifact_store_secret() if not converted_secret: + # If a secret and service account were previously configured, we # need to delete them before we can proceed if config.k8s_service_account: @@ -631,6 +634,7 @@ def _convert_artifact_store_secret(self) -> Optional[BaseSecretSchema]: ) = artifact_store.get_credentials() if aws_access_key_id and aws_secret_access_key: + # Convert the credentials into the format expected by KServe zenml_secret = KServeS3SecretSchema( name="", @@ -672,6 +676,7 @@ def _convert_artifact_store_secret(self) -> Optional[BaseSecretSchema]: gcp_credentials = artifact_store.get_credentials() if gcp_credentials: + # Convert the credentials into the format expected by KServe return KServeGSSecretSchema( name="", @@ -710,6 +715,7 @@ def _convert_artifact_store_secret(self) -> Optional[BaseSecretSchema]: azure_tenant_id=azure_credentials.tenant_id, ) else: + logger.warning( "The KServe model deployer could not use the " "credentials currently configured in the active Azure " diff --git a/src/zenml/integrations/kserve/services/kserve_deployment.py b/src/zenml/integrations/kserve/services/kserve_deployment.py index 507e6390168..61430d756cb 100644 --- a/src/zenml/integrations/kserve/services/kserve_deployment.py +++ b/src/zenml/integrations/kserve/services/kserve_deployment.py @@ -42,6 +42,7 @@ from zenml.services.service import BaseDeploymentService if TYPE_CHECKING: + from zenml.integrations.kserve.model_deployers.kserve_model_deployer import ( # noqa KServeModelDeployer, ) diff --git a/src/zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py b/src/zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py index 1226f9b10d4..94284a746c9 100644 --- a/src/zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py +++ b/src/zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py @@ -140,6 +140,7 @@ def validator(self) -> Optional[StackValidator]: """ def _validate_local_requirements(stack: "Stack") -> Tuple[bool, str]: + container_registry = stack.container_registry # should not happen, because the stack validation takes care of @@ -190,6 +191,7 @@ def _validate_local_requirements(stack: "Stack") -> Tuple[bool, str]: not self.config.skip_local_validations and not self.config.is_local ): + # if the orchestrator is not running in a local k3d cluster, # we cannot have any other local components in our stack, # because we cannot mount the local path into the container. diff --git a/src/zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py b/src/zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py index c97365a35ae..7a89ab56359 100644 --- a/src/zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +++ b/src/zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py @@ -197,6 +197,7 @@ def _validate_local_requirements(stack: "Stack") -> Tuple[bool, str]: not self.config.skip_local_validations and not self.config.is_local ): + # if the orchestrator is not running in a local k3d cluster, # we cannot have any other local components in our stack, # because we cannot mount the local path into the container. diff --git a/src/zenml/integrations/label_studio/annotators/label_studio_annotator.py b/src/zenml/integrations/label_studio/annotators/label_studio_annotator.py index 95e0480909d..c707a530f76 100644 --- a/src/zenml/integrations/label_studio/annotators/label_studio_annotator.py +++ b/src/zenml/integrations/label_studio/annotators/label_studio_annotator.py @@ -538,6 +538,7 @@ def populate_artifact_store_parameters( gcp_credentials = artifact_store.get_credentials() if gcp_credentials: + # Save the credentials to a file in secure location, because # Label Studio will need to read it from a file secret_folder = Path( @@ -600,6 +601,7 @@ def populate_artifact_store_parameters( azure_credentials.account_name is not None and azure_credentials.account_key is not None ): + params.azure_account_name = azure_credentials.account_name params.azure_account_key = azure_credentials.account_key @@ -622,6 +624,7 @@ def populate_artifact_store_parameters( ) elif artifact_store.flavor == "local": + from zenml.artifact_stores.local_artifact_store import ( LocalArtifactStore, ) diff --git a/src/zenml/integrations/seldon/model_deployers/seldon_model_deployer.py b/src/zenml/integrations/seldon/model_deployers/seldon_model_deployer.py index bd843b58e21..17a36d12a94 100644 --- a/src/zenml/integrations/seldon/model_deployers/seldon_model_deployer.py +++ b/src/zenml/integrations/seldon/model_deployers/seldon_model_deployer.py @@ -191,6 +191,7 @@ def _create_or_update_kubernetes_secret(self) -> Optional[str]: # if a Kubernetes secret was explicitly configured in the model # deployer, use that instead of creating a new one if self.config.kubernetes_secret_name: + logger.warning( "Your Seldon Core model deployer is configured to use a " "pre-existing Kubernetes secret that holds credentials needed " @@ -209,6 +210,7 @@ def _create_or_update_kubernetes_secret(self) -> Optional[str]: # if a ZenML secret reference was configured in the model deployer, # create a Kubernetes secret from that if self.config.secret: + logger.warning( "Your Seldon Core model deployer is configured to use a " "ZenML secret that holds credentials needed to access the " @@ -235,6 +237,7 @@ def _create_or_update_kubernetes_secret(self) -> Optional[str]: ) else: + # if no ZenML secret was configured, try to convert the credentials # configured for the artifact store, if any are included, into # the format expected by Seldon Core @@ -312,6 +315,7 @@ def _convert_artifact_store_secret(self) -> BaseSecretSchema: gcp_credentials = artifact_store.get_credentials() if gcp_credentials: + # Convert the credentials into the format expected by Seldon # Core if gcp_credentials.get("type") == "service_account": @@ -722,6 +726,7 @@ def delete_model_server( service.stop(timeout=timeout, force=force) if service.config.secret_name: + # delete the Kubernetes secret used to store the authentication # information for the Seldon Core model server storage initializer # if no other Seldon Core model servers are using it diff --git a/src/zenml/integrations/seldon/seldon_client.py b/src/zenml/integrations/seldon/seldon_client.py index bd963d179af..4fff12483c3 100644 --- a/src/zenml/integrations/seldon/seldon_client.py +++ b/src/zenml/integrations/seldon/seldon_client.py @@ -880,6 +880,7 @@ def find_deployments( ",".join(f"{k}={v}" for k, v in labels.items()) if labels else None ) try: + logger.debug( f"Searching SeldonDeployment resources with label selector " f"'{labels or ''}' and field selector '{fields or ''}'" diff --git a/src/zenml/integrations/slack/alerters/slack_alerter.py b/src/zenml/integrations/slack/alerters/slack_alerter.py index a46285385cf..ad31af1ee5c 100644 --- a/src/zenml/integrations/slack/alerters/slack_alerter.py +++ b/src/zenml/integrations/slack/alerters/slack_alerter.py @@ -259,6 +259,7 @@ def handle(**payload: Any) -> None: """ event = payload["data"] if event["channel"] == slack_channel_id: + # approve request (return True) if event["text"] in self._get_approve_msg_options(params): print(f"User {event['user']} approved on slack.") diff --git a/src/zenml/integrations/tekton/orchestrators/tekton_orchestrator.py b/src/zenml/integrations/tekton/orchestrators/tekton_orchestrator.py index a311eaeb75a..a18317292cd 100644 --- a/src/zenml/integrations/tekton/orchestrators/tekton_orchestrator.py +++ b/src/zenml/integrations/tekton/orchestrators/tekton_orchestrator.py @@ -143,6 +143,7 @@ def _validate(stack: "Stack") -> Tuple[bool, str]: not self.config.skip_local_validations and not self.config.is_local ): + # if the orchestrator is not running in a local k3d cluster, # we cannot have any other local components in our stack, # because we cannot mount the local path into the container. diff --git a/src/zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py b/src/zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py index df4697383ea..8663bbf6d3c 100644 --- a/src/zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py +++ b/src/zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py @@ -132,6 +132,7 @@ def visualize_tensorboard( height: Height of the generated visualization. """ if Environment.in_notebook(): + notebook.display(port, height=height) return diff --git a/src/zenml/materializers/pandas_materializer.py b/src/zenml/materializers/pandas_materializer.py index 59c1c765fac..67e57d9f5ff 100644 --- a/src/zenml/materializers/pandas_materializer.py +++ b/src/zenml/materializers/pandas_materializer.py @@ -123,6 +123,7 @@ def save(self, df: Union[pd.DataFrame, pd.Series]) -> None: super().save(df) if isinstance(df, pd.Series): + df = df.to_frame(name="series") if self.pyarrow_exists: diff --git a/src/zenml/models/base_models.py b/src/zenml/models/base_models.py index 02211d8ef44..2fa9d27be3b 100644 --- a/src/zenml/models/base_models.py +++ b/src/zenml/models/base_models.py @@ -24,7 +24,6 @@ from zenml.models.user_models import UserResponseModel from zenml.models.workspace_models import WorkspaceResponseModel - # ------------# # BASE MODELS # # ------------# diff --git a/src/zenml/models/filter_models.py b/src/zenml/models/filter_models.py index 5538bfcbb63..e8e6d454811 100644 --- a/src/zenml/models/filter_models.py +++ b/src/zenml/models/filter_models.py @@ -404,6 +404,7 @@ def _generate_filter_list(cls, values: Dict[str, Any]) -> List[Filter]: list_of_filters: List[Filter] = [] for key, value in values.items(): + # Ignore excluded filters if key in cls.FILTER_EXCLUDE_FIELDS: continue diff --git a/src/zenml/utils/daemon.py b/src/zenml/utils/daemon.py index d7eff1452e4..3221383df2e 100644 --- a/src/zenml/utils/daemon.py +++ b/src/zenml/utils/daemon.py @@ -115,6 +115,7 @@ def daemon(*args: Any, **kwargs: Any) -> None: "Daemon functionality is currently not supported on Windows." ) else: + CHILD_PROCESS_WAIT_TIMEOUT = 5 def terminate_children() -> None: diff --git a/src/zenml/utils/networking_utils.py b/src/zenml/utils/networking_utils.py index b1121dbd9ba..65ffdaa6f74 100644 --- a/src/zenml/utils/networking_utils.py +++ b/src/zenml/utils/networking_utils.py @@ -136,6 +136,7 @@ def replace_localhost_with_internal_hostname(url: str) -> str: parsed_url = urlparse(url) if parsed_url.hostname in ("localhost", "127.0.0.1"): + for internal_hostname in ( "host.docker.internal", "host.k3d.internal", @@ -183,6 +184,7 @@ def replace_internal_hostname_with_localhost(hostname: str) -> str: return hostname if Environment.in_container(): + # Try to resolve one of the special hostnames to see if it is available # inside the container and use that if it is. for internal_hostname in ( diff --git a/src/zenml/zen_server/deploy/base_provider.py b/src/zenml/zen_server/deploy/base_provider.py index b5a51327fd5..98e2144123b 100644 --- a/src/zenml/zen_server/deploy/base_provider.py +++ b/src/zenml/zen_server/deploy/base_provider.py @@ -273,6 +273,7 @@ def _get_deployment_status( gc = GlobalConfiguration() url: Optional[str] = None if service.is_running: + # all services must have an endpoint assert service.endpoint is not None diff --git a/src/zenml/zen_stores/migrations/versions/26b776ad583e_redesign_artifacts.py b/src/zenml/zen_stores/migrations/versions/26b776ad583e_redesign_artifacts.py index 0f0ad9bcd9d..ff14da2911c 100644 --- a/src/zenml/zen_stores/migrations/versions/26b776ad583e_redesign_artifacts.py +++ b/src/zenml/zen_stores/migrations/versions/26b776ad583e_redesign_artifacts.py @@ -150,6 +150,7 @@ def _find_produced_artifact(cached_artifact: "Row") -> "Row": # Adjust columns # -------------- with op.batch_alter_table("artifacts", schema=None) as batch_op: + # Add artifact store link column batch_op.add_column( sa.Column( diff --git a/src/zenml/zen_stores/migrations/versions/3944116bbd56_rename_project_to_workspace.py b/src/zenml/zen_stores/migrations/versions/3944116bbd56_rename_project_to_workspace.py index 7d66f129837..3c00d01cbb5 100644 --- a/src/zenml/zen_stores/migrations/versions/3944116bbd56_rename_project_to_workspace.py +++ b/src/zenml/zen_stores/migrations/versions/3944116bbd56_rename_project_to_workspace.py @@ -31,6 +31,7 @@ def _fk_constraint_name(table: str, column: str) -> str: def _get_changed_tables() -> Set[str]: + return { "artifact", "flavor", diff --git a/src/zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py b/src/zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py index 84ce0ad8d54..57a0d4a08e4 100644 --- a/src/zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py +++ b/src/zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py @@ -86,15 +86,13 @@ def _create_fk_constraint( ) -def _get_changes() -> ( - Tuple[ - List[str], - List[str], - List[str], - List[Tuple[str, str, str, str, str]], - List[Tuple[str, str, str, str, str]], - ] -): +def _get_changes() -> Tuple[ + List[str], + List[str], + List[str], + List[Tuple[str, str, str, str, str]], + List[Tuple[str, str, str, str, str]], +]: """Define the data that should be changed in the schema. Returns: diff --git a/src/zenml/zen_stores/migrations/versions/bea8a6ce3015_port_flavors_into_database.py b/src/zenml/zen_stores/migrations/versions/bea8a6ce3015_port_flavors_into_database.py index 97c5a1319a5..6dfa9dbf200 100644 --- a/src/zenml/zen_stores/migrations/versions/bea8a6ce3015_port_flavors_into_database.py +++ b/src/zenml/zen_stores/migrations/versions/bea8a6ce3015_port_flavors_into_database.py @@ -20,6 +20,7 @@ def upgrade() -> None: """Upgrade database schema and/or data, creating a new revision.""" with op.batch_alter_table("flavor", schema=None) as batch_op: + batch_op.add_column( sa.Column( "logo_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True @@ -46,6 +47,7 @@ def upgrade() -> None: def downgrade() -> None: """Downgrade database schema and/or data back to the previous revision.""" with op.batch_alter_table("flavor", schema=None) as batch_op: + batch_op.drop_column("logo_url") batch_op.drop_column("docs_url") batch_op.drop_column("is_custom") diff --git a/src/zenml/zen_stores/secrets_stores/aws_secrets_store.py b/src/zenml/zen_stores/secrets_stores/aws_secrets_store.py index 897023a0c0f..05202c920fb 100644 --- a/src/zenml/zen_stores/secrets_stores/aws_secrets_store.py +++ b/src/zenml/zen_stores/secrets_stores/aws_secrets_store.py @@ -167,6 +167,7 @@ def client(self) -> Any: The AWS Secrets Manager client. """ if self._client is None: + # Initialize the AWS Secrets Manager client with the # credentials from the configuration, if provided. self._client = boto3.client( diff --git a/src/zenml/zen_stores/secrets_stores/azure_secrets_store.py b/src/zenml/zen_stores/secrets_stores/azure_secrets_store.py index 46bab91693f..9dc761d179c 100644 --- a/src/zenml/zen_stores/secrets_stores/azure_secrets_store.py +++ b/src/zenml/zen_stores/secrets_stores/azure_secrets_store.py @@ -154,6 +154,7 @@ def client(self) -> SecretClient: The Azure Key Vault client. """ if self._client is None: + azure_logger = logging.getLogger("azure") # Suppress the INFO logging level of the Azure SDK if the diff --git a/src/zenml/zen_stores/secrets_stores/base_secrets_store.py b/src/zenml/zen_stores/secrets_stores/base_secrets_store.py index e2bb3e9fd71..562be29ee5d 100644 --- a/src/zenml/zen_stores/secrets_stores/base_secrets_store.py +++ b/src/zenml/zen_stores/secrets_stores/base_secrets_store.py @@ -423,6 +423,7 @@ def _check_secret_scope( ] if existing_secrets: + existing_secret_model = existing_secrets[0] msg = ( diff --git a/src/zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py b/src/zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py index 8d08548769a..1c8c6d2f288 100644 --- a/src/zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py +++ b/src/zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py @@ -151,6 +151,7 @@ def client(self) -> hvac.Client: The HashiCorp Vault client. """ if self._client is None: + # Initialize the HashiCorp Vault client with the # credentials from the configuration. self._client = hvac.Client( diff --git a/src/zenml/zen_stores/secrets_stores/sql_secrets_store.py b/src/zenml/zen_stores/secrets_stores/sql_secrets_store.py index 211b22e8d90..48376af3889 100644 --- a/src/zenml/zen_stores/secrets_stores/sql_secrets_store.py +++ b/src/zenml/zen_stores/secrets_stores/sql_secrets_store.py @@ -229,6 +229,7 @@ def _check_sql_secret_scope( existing_secret = session.exec(scope_filter).first() if existing_secret is not None: + existing_secret_model = existing_secret.to_model( encryption_engine=self._encryption_engine ) diff --git a/src/zenml/zen_stores/sql_zen_store.py b/src/zenml/zen_stores/sql_zen_store.py index 89bd7dcf4ce..9191d87e6b3 100644 --- a/src/zenml/zen_stores/sql_zen_store.py +++ b/src/zenml/zen_stores/sql_zen_store.py @@ -3149,6 +3149,7 @@ def create_run( EntityExistsError: If an identical pipeline run already exists. """ with Session(self.engine) as session: + # Check if pipeline run with same name already exists. existing_domain_run = session.exec( select(PipelineRunSchema).where( @@ -3351,6 +3352,7 @@ def create_run_step( KeyError: if the pipeline run doesn't exist. """ with Session(self.engine) as session: + # Check if the pipeline run exists run = session.exec( select(PipelineRunSchema).where( @@ -3689,6 +3691,7 @@ def update_run_step( KeyError: if the step run doesn't exist. """ with Session(self.engine) as session: + # Check if the step exists existing_step_run = session.exec( select(StepRunSchema).where(StepRunSchema.id == step_run_id) diff --git a/tests/harness/cli/deployment.py b/tests/harness/cli/deployment.py index 52fa367d679..47881821744 100755 --- a/tests/harness/cli/deployment.py +++ b/tests/harness/cli/deployment.py @@ -144,6 +144,7 @@ def exec_in_deployment( sys.exit(1) with deployment.connect(): + print( f"Running ZenML CLI command in test deployment '{name}': 'zenml {' '.join(args[1:])}'" ) diff --git a/tests/harness/deployment/local_docker.py b/tests/harness/deployment/local_docker.py index bee00533427..800e1113bfb 100644 --- a/tests/harness/deployment/local_docker.py +++ b/tests/harness/deployment/local_docker.py @@ -92,6 +92,7 @@ def up(self) -> None: from zenml.utils.networking_utils import scan_for_available_port if self.is_running: + logging.info( f"Deployment '{self.config.name}' is already running. " f"Skipping provisioning." diff --git a/tests/harness/deployment/server_docker.py b/tests/harness/deployment/server_docker.py index 4555e9f893e..4ee65043838 100644 --- a/tests/harness/deployment/server_docker.py +++ b/tests/harness/deployment/server_docker.py @@ -61,6 +61,7 @@ def server(self) -> Optional["ServerDeployment"]: # Managing the local server deployment is done through a default # local deployment with the same config. with self.default_deployment.connect(): + deployer = ServerDeployer() servers = deployer.list_servers( provider_type=ServerProviderType.DOCKER @@ -95,6 +96,7 @@ def up(self) -> None: from zenml.zen_server.deploy.deployment import ServerDeploymentConfig if self.is_running: + logging.info( f"Deployment '{self.config.name}' is already running. " f"Skipping provisioning." @@ -108,6 +110,7 @@ def up(self) -> None: # Managing the local server deployment is done through the default # deployment with the same config. with self.default_deployment.connect(): + port = scan_for_available_port(LOCAL_ZENML_SERVER_DEFAULT_PORT) if port is None: @@ -134,9 +137,11 @@ def down(self) -> None: server = self.server if server is not None: + # Managing the local server deployment is done through the default # deployment with the same config. with self.default_deployment.connect(): + deployer = ServerDeployer() deployer.remove_server(server.config.name) diff --git a/tests/harness/deployment/server_docker_compose.py b/tests/harness/deployment/server_docker_compose.py index 8aec5064b44..f609b0b71d7 100644 --- a/tests/harness/deployment/server_docker_compose.py +++ b/tests/harness/deployment/server_docker_compose.py @@ -163,6 +163,7 @@ def up(self) -> None: ) if self.is_running: + logging.info( f"Deployment '{self.config.name}' is already running. " f"Skipping provisioning." diff --git a/tests/harness/deployment/server_local.py b/tests/harness/deployment/server_local.py index 5b3ad3e983c..b4c8ba59994 100644 --- a/tests/harness/deployment/server_local.py +++ b/tests/harness/deployment/server_local.py @@ -61,6 +61,7 @@ def server(self) -> Optional["ServerDeployment"]: # Managing the local server deployment is done through a default # local deployment with the same config. with self.default_deployment.connect(): + deployer = ServerDeployer() servers = deployer.list_servers( provider_type=ServerProviderType.LOCAL @@ -103,6 +104,7 @@ def up(self) -> None: pass if self.is_running: + logging.info( f"Deployment '{self.config.name}' is already running. " f"Skipping provisioning." @@ -114,6 +116,7 @@ def up(self) -> None: # Managing the local server deployment is done through the default # deployment with the same config. with self.default_deployment.connect(): + port = scan_for_available_port(LOCAL_ZENML_SERVER_DEFAULT_PORT) if port is None: @@ -147,6 +150,7 @@ def down(self) -> None: # Managing the local server deployment is done through the default # deployment with the same config. with self.default_deployment.connect(): + deployer = ServerDeployer() deployer.remove_server(server.config.name) diff --git a/tests/harness/environment.py b/tests/harness/environment.py index 5cbbf60489f..37ab0deb835 100644 --- a/tests/harness/environment.py +++ b/tests/harness/environment.py @@ -72,9 +72,11 @@ def is_provisioned(self) -> bool: return False with self.deployment.connect() as client: + component_requirements: List[StackRequirement] = [] for requirement in self.config.compiled_requirements: + if not requirement.mandatory: # optional components the software requirements of which are # not met locally do not need to exist for the environment @@ -108,6 +110,7 @@ def _collect_components( ) -> None: """Collect the components managed or tracked by this environment.""" with self.deployment.connect() as client: + self._optional_components = {} self._mandatory_components = {} @@ -228,10 +231,12 @@ def provision(self) -> None: build_base_image: bool = False with self.deployment.connect() as client: + component_requirements: List[StackRequirement] = [] components: List[ComponentResponseModel] = [] for requirement in self.config.compiled_requirements: + result, err = requirement.check_software_requirements() if not result: if requirement.mandatory: @@ -291,6 +296,7 @@ def provision(self) -> None: BaseTestDeployment.build_base_image() for component_model in components: + component = StackComponent.from_model( component_model=component_model ) @@ -334,11 +340,13 @@ def deprovision(self) -> None: return with self.deployment.connect() as client: + component_requirements: List[StackRequirement] = [] components: List[ComponentResponseModel] = [] external_components: List[UUID] = [] for requirement in self.config.compiled_requirements: + # components the software requirements of which are # not met locally are ignored result, _ = requirement.check_software_requirements() @@ -348,6 +356,7 @@ def deprovision(self) -> None: component_requirements.extend(requirement.stacks) for component_requirement in component_requirements: + # Environment requirements are managed by the test framework # by default external = component_requirement.external or False diff --git a/tests/harness/model/requirements.py b/tests/harness/model/requirements.py index c2efb454476..29c09a65aaa 100644 --- a/tests/harness/model/requirements.py +++ b/tests/harness/model/requirements.py @@ -134,6 +134,7 @@ def filter_components(component: "ComponentResponseModel") -> bool: # components (default artifact store and orchestrator) that are # always available. if environment is not None: + # If one or more mandatory components of the same type are # enforced, the component must be one of them. if len(mandatory_components) > 0: @@ -181,6 +182,7 @@ def filter_components(component: "ComponentResponseModel") -> bool: return None if len(components) > 1: + logging.warning( f"found multiple {self.type.value} components that meet the " f"test requirements: {', '.join([c.name for c in components])}. " diff --git a/tests/integration/examples/test_evidently_data_validation.py b/tests/integration/examples/test_evidently_data_validation.py index ca02ba89f51..04be650dd50 100644 --- a/tests/integration/examples/test_evidently_data_validation.py +++ b/tests/integration/examples/test_evidently_data_validation.py @@ -26,6 +26,7 @@ def test_example(request: pytest.FixtureRequest) -> None: name="evidently_data_validation", pipelines={"text_data_report_test_pipeline": (1, 5)}, ) as (example, runs): + pipeline = get_pipeline("text_data_report_test_pipeline") assert pipeline diff --git a/tests/integration/examples/test_seldon_deployment.py b/tests/integration/examples/test_seldon_deployment.py index b3b1dd319e8..b8951dc56c5 100644 --- a/tests/integration/examples/test_seldon_deployment.py +++ b/tests/integration/examples/test_seldon_deployment.py @@ -35,6 +35,7 @@ def test_example(request: pytest.FixtureRequest) -> None: "inference_pipeline": (1, 4), }, ) as run_result: + from zenml.integrations.seldon.model_deployers.seldon_model_deployer import ( SeldonModelDeployer, ) diff --git a/tests/integration/functional/cli/test_secret.py b/tests/integration/functional/cli/test_secret.py index 7150c5cf5c9..f52cd91a2a2 100644 --- a/tests/integration/functional/cli/test_secret.py +++ b/tests/integration/functional/cli/test_secret.py @@ -65,6 +65,7 @@ def test_create_fails_with_bad_scope(): """Tests that creating a secret with a bad scope fails.""" runner = CliRunner() with cleanup_secrets() as secret_name: + result = runner.invoke( secret_create_command, [secret_name, "--test_value=aria", "--scope=axl_scope"], @@ -79,6 +80,7 @@ def test_list_secret_works(): """Test that the secret list command works.""" runner = CliRunner() with cleanup_secrets() as secret_name: + result1 = runner.invoke( secret_list_command, ) @@ -102,6 +104,7 @@ def test_get_secret_works(): """Test that the secret get command works.""" runner = CliRunner() with cleanup_secrets() as secret_name: + result1 = runner.invoke( secret_get_command, [secret_name], @@ -157,6 +160,7 @@ def test_get_secret_with_scope_works(): """Test that the secret get command works with a scope.""" runner = CliRunner() with cleanup_secrets() as secret_name: + result1 = runner.invoke( secret_get_command, [secret_name, f"--scope={SecretScope.USER}"], @@ -227,7 +231,9 @@ def test_rename_secret_works(): runner = CliRunner() with cleanup_secrets() as secret_name: + with cleanup_secrets() as new_secret_name: + result1 = runner.invoke( secret_rename_command, [secret_name, "-n", new_secret_name], @@ -269,6 +275,7 @@ def test_update_secret_works(): client = Client() with cleanup_secrets() as secret_name: + result1 = runner.invoke( secret_update_command, [secret_name, "--test_value=aria", "--test_value2=axl"], diff --git a/tests/integration/functional/cli/test_stack.py b/tests/integration/functional/cli/test_stack.py index 7681bbfac2f..fec3869f012 100644 --- a/tests/integration/functional/cli/test_stack.py +++ b/tests/integration/functional/cli/test_stack.py @@ -509,6 +509,7 @@ def test_share_stack_when_component_is_already_shared_by_other_user_fails( other_user, other_client, ): + other_client.create_stack_component( name=shared_artifact_store.name, is_shared=True, diff --git a/tests/integration/functional/cli/utils.py b/tests/integration/functional/cli/utils.py index 17759d13399..b6d0b731ac4 100644 --- a/tests/integration/functional/cli/utils.py +++ b/tests/integration/functional/cli/utils.py @@ -101,9 +101,7 @@ def create_sample_team() -> TeamResponseModel: return Client().create_team(name=sample_team_name()) -def test_parse_name_and_extra_arguments_returns_a_dict_of_known_options() -> ( - None -): +def test_parse_name_and_extra_arguments_returns_a_dict_of_known_options() -> None: """Check that parse_name_and_extra_arguments returns a dict of known options.""" name, parsed_sample_args = parse_name_and_extra_arguments( SAMPLE_CUSTOM_ARGUMENTS diff --git a/tests/integration/functional/zen_stores/test_secrets_store.py b/tests/integration/functional/zen_stores/test_secrets_store.py index f52956e2bf9..cc391e29b08 100644 --- a/tests/integration/functional/zen_stores/test_secrets_store.py +++ b/tests/integration/functional/zen_stores/test_secrets_store.py @@ -361,6 +361,7 @@ def test_update_secret_name_fails_if_exists_in_workspace(): with SecretContext() as secret: with SecretContext() as other_secret: + saved_secret = store.get_secret(secret_id=secret.id) assert saved_secret.name == secret.name @@ -399,6 +400,7 @@ def test_update_user_secret_name_succeeds_if_exists_in_workspace(): with SecretContext(scope=SecretScope.USER) as secret: with SecretContext() as other_secret: + saved_secret = store.get_secret(secret_id=secret.id) assert saved_secret.name == secret.name @@ -461,6 +463,7 @@ def test_update_workspace_secret_name_succeeds_if_exists_for_a_user(): with SecretContext() as secret: with SecretContext(scope=SecretScope.USER) as other_secret: + saved_secret = store.get_secret(secret_id=secret.id) assert saved_secret.name == secret.name @@ -525,6 +528,7 @@ def test_reusing_user_secret_name_succeeds(): store = client.zen_store with SecretContext(scope=SecretScope.USER) as secret: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -551,6 +555,7 @@ def test_reusing_user_secret_name_succeeds(): with SecretContext( secret_name=secret.name, scope=SecretScope.USER ) as other_secret: + all_secrets = other_store.list_secrets( SecretFilterModel(name=secret.name), ).items @@ -596,6 +601,7 @@ def test_update_scope_succeeds(): store = client.zen_store with SecretContext() as secret: + saved_secret = store.get_secret(secret_id=secret.id) assert saved_secret.name == secret.name assert saved_secret.scope == SecretScope.WORKSPACE @@ -737,6 +743,7 @@ def test_update_scope_fails_if_name_already_in_scope(): with SecretContext( secret_name=secret.name, scope=SecretScope.USER ) as other_secret: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -884,6 +891,7 @@ def test_user_secret_is_not_visible_to_other_users(): with SecretContext( scope=SecretScope.USER, secret_name=secret.name ) as user_secret: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -979,6 +987,7 @@ def test_workspace_secret_is_not_visible_to_other_workspaces(): assert len(user_secrets) == 0 with WorkspaceContext(activate=True): + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -1066,6 +1075,7 @@ def test_user_secret_is_not_visible_to_other_workspaces(): with SecretContext( scope=SecretScope.USER, secret_name=secret.name ) as user_secret: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -1094,6 +1104,7 @@ def test_user_secret_is_not_visible_to_other_workspaces(): assert user_secret.id == user_secrets[0].id with WorkspaceContext(activate=True) as workspace: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -1168,6 +1179,7 @@ def test_list_secrets_filter(): ) as secret_three, SecretContext( secret_name=axl_secret_name, scope=SecretScope.USER ) as secret_four: + all_secrets = store.list_secrets(SecretFilterModel()).items assert len(all_secrets) >= 4 assert set( @@ -1319,6 +1331,7 @@ def test_list_secrets_pagination_and_sorting(): secret_name=f"axls-spots-{suffix}", scope=SecretScope.USER, ) as secret_four: + secrets = store.list_secrets( SecretFilterModel( name=f"endswith:{suffix}", @@ -1581,6 +1594,7 @@ def test_secret_values_cannot_be_accessed_by_readonly_user(): # Switch to a different user with read-write access password = random_str(32) with UserContext(password=password, login=True) as user: + client = Client() store = client.zen_store @@ -1588,6 +1602,7 @@ def test_secret_values_cannot_be_accessed_by_readonly_user(): with SecretContext() as secret, SecretContext( scope=SecretScope.USER ) as user_secret: + all_secrets = store.list_secrets(SecretFilterModel()).items assert len(all_secrets) >= 2 assert secret.id in [s.id for s in all_secrets] @@ -1635,6 +1650,7 @@ def test_secret_values_cannot_be_accessed_by_readonly_user(): with UserContext( user_name=user.name, password=password, existing_user=True ): + all_secrets = store.list_secrets(SecretFilterModel()).items assert len(all_secrets) >= 2 assert secret.id in [s.id for s in all_secrets] @@ -1679,6 +1695,7 @@ def test_secrets_cannot_be_created_or_updated_by_readonly_user(): # Switch to a different user with read-write access password = random_str(32) with UserContext(password=password, login=True) as user: + client = Client() store = client.zen_store @@ -1686,6 +1703,7 @@ def test_secrets_cannot_be_created_or_updated_by_readonly_user(): with SecretContext() as secret, SecretContext( scope=SecretScope.USER ) as user_secret: + with does_not_raise(): store.update_secret( secret.id, SecretUpdateModel(name=f"{secret.name}-new") @@ -1747,6 +1765,7 @@ def test_secrets_cannot_be_created_or_updated_by_readonly_user(): with UserContext( user_name=user.name, password=password, existing_user=True ): + new_client = Client() new_store = new_client.zen_store @@ -1806,6 +1825,7 @@ def test_secret_is_deleted_with_workspace(): assert secret.id == workspace_secrets[0].id with WorkspaceContext(activate=True) as workspace: + all_secrets = store.list_secrets( SecretFilterModel(name=secret.name) ).items @@ -1821,6 +1841,7 @@ def test_secret_is_deleted_with_workspace(): assert len(workspace_secrets) == 0 with SecretContext(secret_name=secret.name) as other_secret: + with does_not_raise(): store.get_secret(other_secret.id) @@ -1900,6 +1921,7 @@ def test_secret_is_deleted_with_user(): with SecretContext( secret_name=secret.name, scope=SecretScope.USER, delete=False ) as other_secret: + with does_not_raise(): other_store.get_secret(other_secret.id) diff --git a/tests/integration/functional/zen_stores/test_zen_store.py b/tests/integration/functional/zen_stores/test_zen_store.py index dd80bcd1bda..8f6c76c325c 100644 --- a/tests/integration/functional/zen_stores/test_zen_store.py +++ b/tests/integration/functional/zen_stores/test_zen_store.py @@ -306,6 +306,7 @@ def test_removing_user_from_team_succeeds(): with UserContext() as created_user: with TeamContext() as created_team: + team_update = TeamUpdateModel(users=[created_user.id]) team_update = zen_store.update_team( team_id=created_team.id, team_update=team_update @@ -446,6 +447,7 @@ def test_deleting_assigned_role_fails(): zen_store = Client().zen_store with RoleContext() as created_role: with UserContext() as created_user: + role_assignment = UserRoleAssignmentRequestModel( role=created_role.id, user=created_user.id, @@ -509,6 +511,7 @@ def test_assigning_role_if_assignment_already_exists_fails(): with RoleContext() as created_role: with UserContext() as created_user: + role_assignment = UserRoleAssignmentRequestModel( role=created_role.id, user=created_user.id, @@ -784,6 +787,7 @@ def test_register_stack_fails_when_stack_exists(): StackComponentType.ARTIFACT_STORE: [artifact_store.id], } with StackContext(components=components) as stack: + new_stack = StackRequestModel( name=stack.name, components=components, @@ -994,6 +998,7 @@ def test_list_runs_is_ordered(): num_runs = 5 with PipelineRunContext(num_runs): + pipelines = store.list_runs(PipelineRunFilterModel()).items assert len(pipelines) == num_pipelines_before + num_runs assert all( @@ -1077,6 +1082,7 @@ def test_get_run_step_inputs_succeeds(): store = client.zen_store with PipelineRunContext(1): + steps = store.list_run_steps(StepRunFilterModel(name="step_2")) for step in steps.items: run_step_inputs = store.get_run_step(step.id).input_artifacts @@ -1099,6 +1105,7 @@ def test_list_unused_artifacts(): ).total num_runs = 1 with PipelineRunContext(num_runs): + artifacts = store.list_artifacts(ArtifactFilterModel()) assert artifacts.total == num_artifacts_before + num_runs * 2 @@ -1114,6 +1121,7 @@ def test_artifacts_are_not_deleted_with_run(): num_artifacts_before = store.list_artifacts(ArtifactFilterModel()).total num_runs = 1 with PipelineRunContext(num_runs): + artifacts = store.list_artifacts(ArtifactFilterModel()) assert artifacts.total == num_artifacts_before + num_runs * 2 diff --git a/tests/unit/integrations/neptune/experiment_tracker/test_neptune_experiment_tracker.py b/tests/unit/integrations/neptune/experiment_tracker/test_neptune_experiment_tracker.py index b6f0c97381d..e72f3fa3333 100644 --- a/tests/unit/integrations/neptune/experiment_tracker/test_neptune_experiment_tracker.py +++ b/tests/unit/integrations/neptune/experiment_tracker/test_neptune_experiment_tracker.py @@ -104,9 +104,7 @@ def test_neptune_experiment_tracker_stack_validation( ).validate() -def test_neptune_experiment_tracker_does_not_need_explicit_api_token_or_project() -> ( - None -): +def test_neptune_experiment_tracker_does_not_need_explicit_api_token_or_project() -> None: """Test that passing an empty config upon constructing neptune experiment tracker still works (arguments are optional). """ diff --git a/tests/unit/steps/test_base_step.py b/tests/unit/steps/test_base_step.py index d3317ac7731..f45b44d66db 100644 --- a/tests/unit/steps/test_base_step.py +++ b/tests/unit/steps/test_base_step.py @@ -560,8 +560,8 @@ def list_of_str_output_step() -> List[str]: @step -def dict_of_str_output_step() -> ( - Output(str_output=str, dict_output=Dict[str, int]) +def dict_of_str_output_step() -> Output( + str_output=str, dict_output=Dict[str, int] ): return "", {} @@ -605,8 +605,8 @@ def p(s1, s2): @step -def subscripted_generic_output_step() -> ( - Output(dict_output=Dict[str, int], list_output=List[str]) +def subscripted_generic_output_step() -> Output( + dict_output=Dict[str, int], list_output=List[str] ): return {}, []