From be1506e35da072eb258bf6932473b13c3c746da9 Mon Sep 17 00:00:00 2001 From: Daniel Tom Date: Mon, 23 Sep 2024 12:56:34 +0200 Subject: [PATCH] Remove depr. basic auth params deploy lambda --- cluster_sdk.py | 124 ++++++++++++++++++++++ typescript/src/resources/deploy-lambda.ts | 8 +- 2 files changed, 127 insertions(+), 5 deletions(-) create mode 100644 cluster_sdk.py diff --git a/cluster_sdk.py b/cluster_sdk.py new file mode 100644 index 00000000..083a0089 --- /dev/null +++ b/cluster_sdk.py @@ -0,0 +1,124 @@ +import os +import logging +from typing import Optional, Dict, List + +from databricks.sdk import WorkspaceClient +from databricks.sdk.service.compute import ( + AwsAttributes, + AutoScale, + ClusterLogConf, + ClusterSource, + DataSecurityMode, + DockerImage, + InitScriptInfo, + RuntimeEngine, + WorkloadType, +) +from pydantic import BaseModel + +from databricks_cdk.utils import ( + get_account_id, + get_deploy_user, + get_param, + PASS_PARAM, + CnfResponse, +) + + +logger = logging.getLogger(__name__) + +HOST_PARAM = os.environ.get("HOST_PARAM", "/databricks/workspace_url") +def get_host(): + return get_param(HOST_PARAM, required=True) + + +account_id = get_account_id() +host = get_host() +user = get_deploy_user() +password = get_param(PASS_PARAM, required=True) + +class Cluster(BaseModel): + spark_version: str + apply_policy_default_values: Optional[bool] = None + autoscale: Optional[AutoScale] = None + autotermination_minutes: Optional[int] = None + aws_attributes: Optional[AwsAttributes] = None + cluster_log_conf: Optional[ClusterLogConf] = None + cluster_name: Optional[str] = None + cluster_source: Optional[ClusterSource] = None + custom_tags: Optional[Dict[str, str]] = None + data_security_mode: Optional[DataSecurityMode] = None + docker_image: Optional[DockerImage] = None + driver_instance_pool_id: Optional[str] = None + driver_node_type_id: Optional[str] = None + enable_elastic_disk: Optional[bool] = None + enable_local_disk_encryption: Optional[bool] = None + init_scripts: Optional[List[InitScriptInfo]] = None + instance_pool_id: Optional[str] = None + node_type_id: Optional[str] = None + num_workers: Optional[int] = None + policy_id: Optional[str] = None + runtime_engine: Optional[RuntimeEngine] = None + single_user_name: Optional[str] = None + spark_conf: Optional[Dict[str, str]] = None + spark_env_vars: Optional[Dict[str, str]] = None + ssh_public_keys: Optional[List[str]] = None + workload_type: Optional[WorkloadType] = None + + +class ClusterProperties(BaseModel): + action: str = "cluster" + workspace_url: str + cluster: Cluster + +def create_or_update_cluster( + properties: ClusterProperties, physical_resource_id: Optional[str] = None +) -> CnfResponse: + """Create or update cluster at databricks""" + w = WorkspaceClient( + account_id=account_id, username=user, password=password, host=properties.workspace_url + ) + cluster = cluster_properties.cluster + current = None + if physical_resource_id: + current = w.clusters.get(cluster_id=physical_resource_id) + if not current: + clstr = w.clusters.create(**cluster.dict()) + return CnfResponse(physical_resource_id=clstr.cluster_id) + + clstr = w.clusters.edit(cluster_id=physical_resource_id, **cluster.dict()) + return CnfResponse(physical_resource_id=clstr.cluster_id) + + +def delete_cluster( + properties: ClusterProperties, physical_resource_id: Optional[str] = None +) -> CnfResponse: + """Delete cluster at databricks""" + + w = WorkspaceClient( + account_id=account_id, username=user, password=password, host=properties.workspace_url + ) + current = w.clusters.get(cluster_id=physical_resource_id) + if current: + w.clusters.permanent_delete(cluster_id=physical_resource_id) + else: + logger.warning("Already removed") + return CnfResponse(physical_resource_id=physical_resource_id) + + +cluster = Cluster( + cluster_name="test-cluster", + spark_version="11.3.x-scala2.12", + num_workers=1, + node_type_id="i3.xlarge", +) + +cluster_properties = ClusterProperties( + workspace_url=host, + cluster=cluster, +) + +id = "0825-131818-dne7ycvz" +create_or_update_cluster(cluster_properties, physical_resource_id=id) +delete_cluster(cluster_properties, physical_resource_id=id) +pass # for debugging diff --git a/typescript/src/resources/deploy-lambda.ts b/typescript/src/resources/deploy-lambda.ts index a93860c8..a9a1183e 100644 --- a/typescript/src/resources/deploy-lambda.ts +++ b/typescript/src/resources/deploy-lambda.ts @@ -43,9 +43,8 @@ export interface CustomDeployLambdaProps { readonly accountId: string readonly region: string readonly lambdaVersion?: string - readonly databricksUserParam?: string - readonly databricksPassParam?: string readonly databricksAccountParam?: string + readonly clientSecretParam?: string readonly lambdaCode?: aws_lambda.DockerImageCode readonly lambdaName?: string readonly lambdaId?: string @@ -337,9 +336,8 @@ export class DatabricksDeployLambda extends IDatabricksDeployLambda { memorySize: 512, environment: { LAMBDA_METHOD: "cfn-deploy", - USER_PARAM: props.databricksUserParam || "/databricks/deploy/user", - PASS_PARAM: props.databricksPassParam || "/databricks/deploy/password", - ACCOUNT_PARAM: props.databricksAccountParam || "/databricks/account-id" + ACCOUNT_PARAM: props.databricksAccountParam || "/databricks/account-id", + CLIENT_SECRET_PARAM: props.clientSecretParam || "/databricks/deploy/client-secret" }, logRetention: aws_logs.RetentionDays.THREE_MONTHS, });