Skip to content

Commit

Permalink
Remove depr. basic auth params deploy lambda
Browse files Browse the repository at this point in the history
  • Loading branch information
dan1elt0m committed Sep 23, 2024
1 parent cca9296 commit be1506e
Show file tree
Hide file tree
Showing 2 changed files with 127 additions and 5 deletions.
124 changes: 124 additions & 0 deletions cluster_sdk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
import os
import logging
from typing import Optional, Dict, List

from databricks.sdk import WorkspaceClient
from databricks.sdk.service.compute import (
AwsAttributes,
AutoScale,
ClusterLogConf,
ClusterSource,
DataSecurityMode,
DockerImage,
InitScriptInfo,
RuntimeEngine,
WorkloadType,
)
from pydantic import BaseModel

from databricks_cdk.utils import (
get_account_id,
get_deploy_user,
get_param,
PASS_PARAM,
CnfResponse,
)


logger = logging.getLogger(__name__)

HOST_PARAM = os.environ.get("HOST_PARAM", "/databricks/workspace_url")
def get_host():
return get_param(HOST_PARAM, required=True)


account_id = get_account_id()
host = get_host()
user = get_deploy_user()
password = get_param(PASS_PARAM, required=True)

class Cluster(BaseModel):
spark_version: str
apply_policy_default_values: Optional[bool] = None
autoscale: Optional[AutoScale] = None
autotermination_minutes: Optional[int] = None
aws_attributes: Optional[AwsAttributes] = None
cluster_log_conf: Optional[ClusterLogConf] = None
cluster_name: Optional[str] = None
cluster_source: Optional[ClusterSource] = None
custom_tags: Optional[Dict[str, str]] = None
data_security_mode: Optional[DataSecurityMode] = None
docker_image: Optional[DockerImage] = None
driver_instance_pool_id: Optional[str] = None
driver_node_type_id: Optional[str] = None
enable_elastic_disk: Optional[bool] = None
enable_local_disk_encryption: Optional[bool] = None
init_scripts: Optional[List[InitScriptInfo]] = None
instance_pool_id: Optional[str] = None
node_type_id: Optional[str] = None
num_workers: Optional[int] = None
policy_id: Optional[str] = None
runtime_engine: Optional[RuntimeEngine] = None
single_user_name: Optional[str] = None
spark_conf: Optional[Dict[str, str]] = None
spark_env_vars: Optional[Dict[str, str]] = None
ssh_public_keys: Optional[List[str]] = None
workload_type: Optional[WorkloadType] = None


class ClusterProperties(BaseModel):
action: str = "cluster"
workspace_url: str
cluster: Cluster

def create_or_update_cluster(
properties: ClusterProperties, physical_resource_id: Optional[str] = None
) -> CnfResponse:
"""Create or update cluster at databricks"""
w = WorkspaceClient(
account_id=account_id, username=user, password=password, host=properties.workspace_url
)
cluster = cluster_properties.cluster
current = None
if physical_resource_id:
current = w.clusters.get(cluster_id=physical_resource_id)
if not current:
clstr = w.clusters.create(**cluster.dict())
return CnfResponse(physical_resource_id=clstr.cluster_id)

clstr = w.clusters.edit(cluster_id=physical_resource_id, **cluster.dict())
return CnfResponse(physical_resource_id=clstr.cluster_id)


def delete_cluster(
properties: ClusterProperties, physical_resource_id: Optional[str] = None
) -> CnfResponse:
"""Delete cluster at databricks"""

w = WorkspaceClient(
account_id=account_id, username=user, password=password, host=properties.workspace_url
)
current = w.clusters.get(cluster_id=physical_resource_id)
if current:
w.clusters.permanent_delete(cluster_id=physical_resource_id)
else:
logger.warning("Already removed")
return CnfResponse(physical_resource_id=physical_resource_id)


cluster = Cluster(
cluster_name="test-cluster",
spark_version="11.3.x-scala2.12",
num_workers=1,
node_type_id="i3.xlarge",
)

cluster_properties = ClusterProperties(
workspace_url=host,
cluster=cluster,
)

id = "0825-131818-dne7ycvz"
create_or_update_cluster(cluster_properties, physical_resource_id=id)
delete_cluster(cluster_properties, physical_resource_id=id)
pass # for debugging
8 changes: 3 additions & 5 deletions typescript/src/resources/deploy-lambda.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,8 @@ export interface CustomDeployLambdaProps {
readonly accountId: string
readonly region: string
readonly lambdaVersion?: string
readonly databricksUserParam?: string
readonly databricksPassParam?: string
readonly databricksAccountParam?: string
readonly clientSecretParam?: string
readonly lambdaCode?: aws_lambda.DockerImageCode
readonly lambdaName?: string
readonly lambdaId?: string
Expand Down Expand Up @@ -337,9 +336,8 @@ export class DatabricksDeployLambda extends IDatabricksDeployLambda {
memorySize: 512,
environment: {
LAMBDA_METHOD: "cfn-deploy",
USER_PARAM: props.databricksUserParam || "/databricks/deploy/user",
PASS_PARAM: props.databricksPassParam || "/databricks/deploy/password",
ACCOUNT_PARAM: props.databricksAccountParam || "/databricks/account-id"
ACCOUNT_PARAM: props.databricksAccountParam || "/databricks/account-id",
CLIENT_SECRET_PARAM: props.clientSecretParam || "/databricks/deploy/client-secret"
},
logRetention: aws_logs.RetentionDays.THREE_MONTHS,
});
Expand Down

0 comments on commit be1506e

Please sign in to comment.