From c3fbdc3aebe92270c8f1f55dfa9913c00cb6805c Mon Sep 17 00:00:00 2001 From: David Crook Date: Fri, 18 Jan 2019 11:02:36 -0500 Subject: [PATCH] Create test_setup.py --- test_setup.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 test_setup.py diff --git a/test_setup.py b/test_setup.py new file mode 100644 index 0000000..ef81819 --- /dev/null +++ b/test_setup.py @@ -0,0 +1,21 @@ +# Databricks notebook source +configs = {"fs.azure.account.auth.type": "OAuth", + "fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider", + "fs.azure.account.oauth2.client.id": dbutils.secrets.get(scope = "data-lake", key = "sp-app-id"), #Service Principal App ID + "fs.azure.account.oauth2.client.secret": dbutils.secrets.get(scope = "data-lake", key = "sp-password"), #Service Principal Key + "fs.azure.account.oauth2.client.endpoint": dbutils.secrets.get(scope = "data-lake", key = "sp-token-endpoint")} #directory id + +# Optionally, you can add to the source URI of your mount point. +dbutils.fs.mount( + source = "abfss://datalake@dacrookdbdevstorage.dfs.core.windows.net", #blobcontainername@storageaccount + mount_point = "/mnt/datalake", + extra_configs = configs) + +# COMMAND ---------- + +census = sqlContext.read.format('csv').options(header='true', inferSchema='true').load("/mnt/datalake/AdultCensusIncome.csv") +display(census) + +# COMMAND ---------- + +dbutils.fs.unmount("/mnt/datalake")