-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Remove depr. basic auth params deploy lambda
- Loading branch information
Showing
2 changed files
with
127 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,124 @@ | ||
import os | ||
import logging | ||
from typing import Optional, Dict, List | ||
|
||
from databricks.sdk import WorkspaceClient | ||
from databricks.sdk.service.compute import ( | ||
AwsAttributes, | ||
AutoScale, | ||
ClusterLogConf, | ||
ClusterSource, | ||
DataSecurityMode, | ||
DockerImage, | ||
InitScriptInfo, | ||
RuntimeEngine, | ||
WorkloadType, | ||
) | ||
from pydantic import BaseModel | ||
|
||
from databricks_cdk.utils import ( | ||
get_account_id, | ||
get_deploy_user, | ||
get_param, | ||
PASS_PARAM, | ||
CnfResponse, | ||
) | ||
|
||
|
||
logger = logging.getLogger(__name__) | ||
|
||
HOST_PARAM = os.environ.get("HOST_PARAM", "/databricks/workspace_url") | ||
def get_host(): | ||
return get_param(HOST_PARAM, required=True) | ||
|
||
|
||
account_id = get_account_id() | ||
host = get_host() | ||
user = get_deploy_user() | ||
password = get_param(PASS_PARAM, required=True) | ||
|
||
class Cluster(BaseModel): | ||
spark_version: str | ||
apply_policy_default_values: Optional[bool] = None | ||
autoscale: Optional[AutoScale] = None | ||
autotermination_minutes: Optional[int] = None | ||
aws_attributes: Optional[AwsAttributes] = None | ||
cluster_log_conf: Optional[ClusterLogConf] = None | ||
cluster_name: Optional[str] = None | ||
cluster_source: Optional[ClusterSource] = None | ||
custom_tags: Optional[Dict[str, str]] = None | ||
data_security_mode: Optional[DataSecurityMode] = None | ||
docker_image: Optional[DockerImage] = None | ||
driver_instance_pool_id: Optional[str] = None | ||
driver_node_type_id: Optional[str] = None | ||
enable_elastic_disk: Optional[bool] = None | ||
enable_local_disk_encryption: Optional[bool] = None | ||
init_scripts: Optional[List[InitScriptInfo]] = None | ||
instance_pool_id: Optional[str] = None | ||
node_type_id: Optional[str] = None | ||
num_workers: Optional[int] = None | ||
policy_id: Optional[str] = None | ||
runtime_engine: Optional[RuntimeEngine] = None | ||
single_user_name: Optional[str] = None | ||
spark_conf: Optional[Dict[str, str]] = None | ||
spark_env_vars: Optional[Dict[str, str]] = None | ||
ssh_public_keys: Optional[List[str]] = None | ||
workload_type: Optional[WorkloadType] = None | ||
|
||
|
||
class ClusterProperties(BaseModel): | ||
action: str = "cluster" | ||
workspace_url: str | ||
cluster: Cluster | ||
|
||
def create_or_update_cluster( | ||
properties: ClusterProperties, physical_resource_id: Optional[str] = None | ||
) -> CnfResponse: | ||
"""Create or update cluster at databricks""" | ||
w = WorkspaceClient( | ||
account_id=account_id, username=user, password=password, host=properties.workspace_url | ||
) | ||
cluster = cluster_properties.cluster | ||
current = None | ||
if physical_resource_id: | ||
current = w.clusters.get(cluster_id=physical_resource_id) | ||
if not current: | ||
clstr = w.clusters.create(**cluster.dict()) | ||
return CnfResponse(physical_resource_id=clstr.cluster_id) | ||
|
||
clstr = w.clusters.edit(cluster_id=physical_resource_id, **cluster.dict()) | ||
return CnfResponse(physical_resource_id=clstr.cluster_id) | ||
|
||
|
||
def delete_cluster( | ||
properties: ClusterProperties, physical_resource_id: Optional[str] = None | ||
) -> CnfResponse: | ||
"""Delete cluster at databricks""" | ||
|
||
w = WorkspaceClient( | ||
account_id=account_id, username=user, password=password, host=properties.workspace_url | ||
) | ||
current = w.clusters.get(cluster_id=physical_resource_id) | ||
if current: | ||
w.clusters.permanent_delete(cluster_id=physical_resource_id) | ||
else: | ||
logger.warning("Already removed") | ||
return CnfResponse(physical_resource_id=physical_resource_id) | ||
|
||
|
||
cluster = Cluster( | ||
cluster_name="test-cluster", | ||
spark_version="11.3.x-scala2.12", | ||
num_workers=1, | ||
node_type_id="i3.xlarge", | ||
) | ||
|
||
cluster_properties = ClusterProperties( | ||
workspace_url=host, | ||
cluster=cluster, | ||
) | ||
|
||
id = "0825-131818-dne7ycvz" | ||
create_or_update_cluster(cluster_properties, physical_resource_id=id) | ||
delete_cluster(cluster_properties, physical_resource_id=id) | ||
pass # for debugging |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters