diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d712d827a6..d13d41a2b9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,5 +35,5 @@ repos: args: [ --badwords, - "spark:Spark,dataframe:DataFrame,Dataframe:DataFrame,gem:Gem,prophecy:Prophecy,pipeline:Pipeline,fabric:Fabric,GIT:Git,git:Git,databricks:Databricks,DataBricks:Databricks,delta:Delta,fabric:Fabric,dataset:Dataset,sql:SQL,python:Python,scala:Scala,vaccum:vacuum,Vaccum:Vacuum,partion:partition,job:Job,api:API,airflow:Airflow", + "spark:Spark,dataframe:DataFrame,Dataframe:DataFrame,gem:Gem,prophecy:Prophecy,pipeline:Pipeline,GIT:Git,git:Git,databricks:Databricks,DataBricks:Databricks,delta:Delta,dataset:Dataset,sql:SQL,python:Python,scala:Scala,vaccum:vacuum,Vaccum:Vacuum,partion:partition,job:Job,api:API,airflow:Airflow", ] diff --git a/docs/getting-started/getting-help/getting-help.md b/docs/getting-started/getting-help/getting-help.md index fffecf4e90..62feb216de 100644 --- a/docs/getting-started/getting-help/getting-help.md +++ b/docs/getting-started/getting-help/getting-help.md @@ -1,8 +1,8 @@ --- -title: Getting Help with Prophecy +title: Getting help with Prophecy id: getting-help sidebar_position: 6 -description: Getting Help with Prophecy +description: Getting help with Prophecy sidebar_label: Getting Help tags: [] --- @@ -13,10 +13,20 @@ At Prophecy, we're dedicated to ensuring you have the support you need, whether - **Get Trial Support in Slack:** Have questions during your trial? Join the conversation in the #support channel on our Slack community. Our dynamic community is here to assist you. [Join #support on Slack](https://prophecy-io-support.slack.com/archives/C01P1PD7JJY). - If you'd like to see Prophecy in action, sign up [here](https://www.prophecy.io/request-a-demo) for a live demo with our incredible team. -- _If you're a Trial customer and unable to signup for our Slack Community, please reach out to Sales@Prophecy.io for quick assistance._ +- If you're a Trial customer and unable to signup for our Slack Community, please reach out to Sales@Prophecy.io for quick assistance. ## For Existing Customers - **Log a Ticket in the Support Portal:** For our existing customers, the Customer Support Portal is your hub for logging tickets and getting personalized assistance. Log in to the Support Portal [here](https://prophecy.zendesk.com/). -- Please send Prophecy [logs](./logs/logs.md), Spark Cluster [Configuration](./logs/logs.md), and [Connectivity Check output](./logs/logs.md) (if applicable) via the Support Portal. -- _Trouble Logging In? If you're encountering issues accessing the Customer Support Portal, don't worry. Please reach out to Support@Prophecy.io, and we'll get you back on track!_ +- Trouble Logging In? If you're encountering issues accessing the Customer Support Portal, don't worry. Please reach out to Support@Prophecy.io, and we'll get you back on track! + +## Send details + +To assist with resolving an issue, gather logs and configurations from your Prophecy and Spark environments and send them to our Support team. Review the table below to learn which items to send in the [Support Portal](https://prophecy.zendesk.com/) according to your use case. + +| **Issue** | **Info to Send** | +| --------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| General Prophecy issues | Prophecy's support team can access [Prophecy logs](./prophecy-details.md) directly for customers using the SaaS platform. If you have self-managed Prophecy, please provide [Prophecy logs](./prophecy-details.md) from the [admin page](/docs/architecture/self-hosted/download-logs.md#navigate-to-the-download-logs-ui). | +| Attaching a Pipeline to a Spark cluster | Connection logs from [Prophecy](./prophecy-details.md), Spark cluster [configuration](./spark-cluster-details.md#configurations) and [connectivity](./spark-cluster-details.md#connectivity-check) check. | +| Running a Pipeline | Prophecy [Pipeline](./prophecy-details.md) logs, Spark cluster [configuration](./spark-cluster-details.md#spark-configurations) and [connectivity](./spark-cluster-details.md#connectivity-check) check. | +| Spark application issues | Prophecy [Pipeline](./prophecy-details.md) logs and Spark [Driver](https://docs.databricks.com/en/compute/troubleshooting/debugging-spark-ui.html#driver-logs) logs. | diff --git a/docs/getting-started/getting-help/img/cluster_5.png b/docs/getting-started/getting-help/img/cluster_5.png new file mode 100644 index 0000000000..f87bbf775d Binary files /dev/null and b/docs/getting-started/getting-help/img/cluster_5.png differ diff --git a/docs/getting-started/getting-help/img/prophecy_connection_log.png b/docs/getting-started/getting-help/img/prophecy_connection_log.png new file mode 100644 index 0000000000..e93fa3083b Binary files /dev/null and b/docs/getting-started/getting-help/img/prophecy_connection_log.png differ diff --git a/docs/getting-started/getting-help/img/prophecy_logs.png b/docs/getting-started/getting-help/img/prophecy_logs.png index 7aae4e9a38..c86bc99244 100644 Binary files a/docs/getting-started/getting-help/img/prophecy_logs.png and b/docs/getting-started/getting-help/img/prophecy_logs.png differ diff --git a/docs/getting-started/getting-help/logs/cluster-config-notebook.md b/docs/getting-started/getting-help/logs/cluster-config-notebook.md deleted file mode 100644 index 6f4343d4fd..0000000000 --- a/docs/getting-started/getting-help/logs/cluster-config-notebook.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -title: Use a notebook to collect Spark cluster configuration -id: config-sparknotebook -sidebar_position: 3 -description: How to access the Spark Cluster configuration using a notebook -sidebar_label: Use a notebook to collect Spark cluster configuration -tags: [help, connectivity] ---- - -Create a notebook as follows and send the output via the Prophecy [Support Portal](https://prophecy.zendesk.com/). - -:::info -Replace the workspace URL, personal access token, clusterID, and API token as appropriate. -::: - -``` -# Databricks notebook source -import requests - -#Get Databricks runtime of cluster -# Get the notebook context using dbutils -context = dbutils.notebook.entry_point.getDbutils().notebook().getContext() - -# Retrieve the Databricks runtime version from the context tags -runtime_version = context.tags().get("sparkVersion").get() - -# Print the runtime version -print(f"Databricks Runtime Version: {runtime_version}") - -# Get Spark version -spark_version = spark.version -print(f"Spark Version: {spark_version}") - - -#Get the installed libraries and access mode details of the cluster -# Replace with your Databricks workspace URL and token -workspace_url = "replace_with_workspace_url" -token = "replace_with_token" -cluster_id = "replace_with_cluster_id" - - -# API endpoint to get info of installed libraries -url = f"{workspace_url}/api/2.0/libraries/cluster-status" - -# Make the API request -response = requests.get(url, headers={"Authorization": f"Bearer {token}"}, params={"cluster_id": cluster_id}) - -library_info=response.json() -print("Libraries:") -for i in library_info['library_statuses']: - print(i) - -# API endpoint to get access mode details -url = f"{workspace_url}/api/2.1/clusters/get" - -# Make the API request -response = requests.get(url, headers={"Authorization": f"Bearer {token}"}, params={"cluster_id": cluster_id}) - -cluster_access_info=response.json() -print(f"Cluster Access Mode: {cluster_access_info['data_security_mode']}") -``` diff --git a/docs/getting-started/getting-help/logs/cluster-config-ui.md b/docs/getting-started/getting-help/logs/cluster-config-ui.md deleted file mode 100644 index 1022db723b..0000000000 --- a/docs/getting-started/getting-help/logs/cluster-config-ui.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -title: Send Spark Cluster Configuration using the Spark UI -id: config-sparkui -sidebar_position: 2 -description: How to access the Spark Cluster configuration -sidebar_label: Use the SparkUI to collect Spark cluster configuration -tags: [help, connectivity] ---- - -Send the following via the Prophecy [Support Portal](https://prophecy.zendesk.com/) - -1. Overall Cluster config (what spark version, databricks runtime version, UC single or UC shared, etc). Please send a screenshot. - ![img](./../img/cluster_1.png) - -2. Please provide the json, edited to remove any private or sensitive information. - ![img](./../img/cluster_2.png) - -3. What libraries are installed on the cluster? Please send a screenshot. - ![img](./../img/cluster_3.png) - -4. Try attaching the same cluster in a notebook. You may need to duplicate the tab and try attaching the same cluster in the duplicate tab. Please send the error screenshot if any. - -5. What init scripts are run on the cluster? Having the script itself is helpful to debug. - ![img](./../img/cluster_4.png) diff --git a/docs/getting-started/getting-help/logs/logs.md b/docs/getting-started/getting-help/logs/logs.md deleted file mode 100644 index 105ec68700..0000000000 --- a/docs/getting-started/getting-help/logs/logs.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: Send Details in the Support Portal -id: logs -sidebar_position: 1 -description: How to download logs and send for support -sidebar_label: Send Logs -tags: [] ---- - -Which logs to send in the [Support Portal](https://prophecy.zendesk.com/)? - -
- -|
I'm having an issue with...
| My Prophecy endpoint is app.prophecy.io | My Prophecy endpoint is custom.prophecy.io | -| ------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| ...attaching a Pipeline to a spark cluster, or running a Pipeline. | Spark cluster [configuration](./logs.md#configurations) and [connectivity](./logs.md#connectivity-check) check. | Spark cluster [configuration](./logs.md#configurations) and [connectivity](./logs.md#connectivity-check) check. | -| ...with a Spark application. | Prophecy [Pipeline](./logs.md#pipeline-logs) logs and Spark [Driver](https://docs.databricks.com/en/compute/troubleshooting/debugging-spark-ui.html#driver-logs) logs. | Prophecy [Pipeline](./logs.md#pipeline-logs) logs and Spark [Driver](https://docs.databricks.com/en/compute/troubleshooting/debugging-spark-ui.html#driver-logs) logs. | -| ...anything else. | Support team can access [Prophecy logs](./logs.md#prophecy-system-logs)) directly. | [Prophecy logs](./logs.md#prophecy-system-logs) from admin page. | - -
- -## Prophecy Issues - -### Prophecy System Logs - -:::info -This feature requires Prophecy 3.4.1.0 or later. -::: - -Use the log collection feature to download all Prophecy system logs from the [admin page.](/docs/architecture/self-hosted/download-logs.md#navigate-to-the-download-logs-ui) -![img](./../img/prophecy_logs.png) - -### Pipeline Logs - -Use the log download button inside any Pipeline to download logs related to that particular Pipeline. -![img](./../img/pipeline_logs.png) - -## Spark Cluster Issues - -When attaching a Spark cluster to a Pipeline, Prophecy uses the Fabric details. Check for error codes [here](/docs/Spark/fabrics/diagnostics.md) and send this information via the Support Portal if applicable. - -### Configurations - -Use the [Spark UI](./cluster-config-ui.md) or a [notebook](./cluster-config-notebook.md) to collect cluster configurations and send via the Support Portal. - -### Connectivity Check - -Open a notebook on the Spark cluster and run the following command, adjusting the Prophecy endpoint: - -````mdx-code-block -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - - - - -```py -import subprocess - -command = 'curl -X GET "https://customer_prophecy_url/execution"' -output = subprocess.check_output(['/bin/bash', '-c', command], text=True) - -print(output) -``` - - - - -```scala -%scala -import sys.process._ -val command = """curl -X GET "https://customer_prophecy_url/execution"""" -Seq("/bin/bash", "-c", command).!! -``` - - - -```` - -This command tests the reverse websocket protocol required by Prophecy to execute Pipelines on Spark clusters. Please send the output from this command in the Support Portal. - -**We look forward to hearing from you!** diff --git a/docs/getting-started/getting-help/prophecy-details.md b/docs/getting-started/getting-help/prophecy-details.md new file mode 100644 index 0000000000..7c28b52701 --- /dev/null +++ b/docs/getting-started/getting-help/prophecy-details.md @@ -0,0 +1,60 @@ +--- +title: Send Prophecy details to Support +id: prophecy-details +sidebar_position: 1 +description: How to download logs and send for support +sidebar_label: Send info from Prophecy +tags: [] +--- + +Gather logs from your Prophecy environment and send them to the support team via the [Support Portal](https://prophecy.zendesk.com/). + + + + + + + + + + + + + + + + + + + + + + + +
Log to Send Example
Prophecy System logs +
+ Download Prophecy logs +
+
Pipeline logs +
+ Download Pipeline logs +
+
Connection logs from Prophecy with error code +
+ Connection logs from Prophecy +
+
+ +**We look forward to hearing from you!** diff --git a/docs/getting-started/getting-help/spark-cluster-details.md b/docs/getting-started/getting-help/spark-cluster-details.md new file mode 100644 index 0000000000..874ce2261b --- /dev/null +++ b/docs/getting-started/getting-help/spark-cluster-details.md @@ -0,0 +1,200 @@ +--- +title: Send Spark cluster details +id: spark-cluster-details +sidebar_position: 2 +description: Helpful Spark cluster configurations to send to Support +sidebar_label: Send info from the Spark cluster +tags: [help, connectivity] +--- + +There are helpful Spark cluster configurations and a connectivity check that you can send to us via the Prophecy [Support Portal](https://prophecy.zendesk.com/) for troubleshooting. + +## Spark configurations + +Two ways to access the configurations: + +- Browsing the Spark UI +- Running a notebook + +### Configurations in the UI {#configUI} + +You can access your Spark cluster configurations directly from the Spark UI. + +:::note +Please send screenshots of each configuration if possible. +::: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Configuration to SendExample
Overall cluster configuration (e.g., Spark version, Databricks runtime version, UC single or UC shared) +
+ Cluster configuration example +
+
Cluster JSON (edited to remove any private or sensitive information) +
+ Cluster JSON example +
+
Libraries installed on the cluster +
+ Cluster libraries example +
+
Init scripts run on the cluster. Include the script itself if possible. +
+ Cluster init scripts example +
+
Output of attaching cluster in a notebook. You may need to duplicate the tab and try attaching the same cluster in the duplicate tab. +
+ Notebook attach to cluster example +
+
+ +### Run a notebook {#configNB} + +For those who prefer to use code, create a notebook (example below) and send the output via the Prophecy [Support Portal](https://prophecy.zendesk.com/). + +:::info +Replace the workspace URL, personal access token, clusterID, and API token as appropriate. +::: + +
Python +

+ +``` +# Databricks notebook source +import requests + +#Get Databricks runtime of cluster +# Get the notebook context using dbutils +context = dbutils.notebook.entry_point.getDbutils().notebook().getContext() + +# Retrieve the Databricks runtime version from the context tags +runtime_version = context.tags().get("sparkVersion").get() + +# Print the runtime version +print(f"Databricks Runtime Version: {runtime_version}") + +# Get Spark version +spark_version = spark.version +print(f"Spark Version: {spark_version}") + + +#Get the installed libraries and access mode details of the cluster +# Replace with your Databricks workspace URL and token +workspace_url = "replace_with_workspace_url" +token = "replace_with_token" +cluster_id = "replace_with_cluster_id" + + +# API endpoint to get info of installed libraries +url = f"{workspace_url}/api/2.0/libraries/cluster-status" + +# Make the API request +response = requests.get(url, headers={"Authorization": f"Bearer {token}"}, params={"cluster_id": cluster_id}) + +library_info=response.json() +print("Libraries:") +for i in library_info['library_statuses']: + print(i) + +# API endpoint to get access mode details +url = f"{workspace_url}/api/2.1/clusters/get" + +# Make the API request +response = requests.get(url, headers={"Authorization": f"Bearer {token}"}, params={"cluster_id": cluster_id}) + +cluster_access_info=response.json() +print(f"Cluster Access Mode: {cluster_access_info['data_security_mode']}") +``` + +

+
+ +## Connectivity Check + +Open a notebook on the Spark cluster and run the following command. + +:::info +Replace the Prophecy endpoint. +::: + +````mdx-code-block +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + + + + +```py +import subprocess + +command = 'curl -X GET "https://customer_prophecy_url/execution"' +output = subprocess.check_output(['/bin/bash', '-c', command], text=True) + +print(output) +``` + + + + +```scala +%scala +import sys.process._ +val command = """curl -X GET "https://customer_prophecy_url/execution"""" +Seq("/bin/bash", "-c", command).!! +``` + + + +```` + +This command tests the reverse websocket protocol required by Prophecy to execute Pipelines on Spark clusters. Please send the output from this command in the Support Portal. + +**We look forward to hearing from you!** diff --git a/docusaurus.config.js b/docusaurus.config.js index c9aa902b98..b87c8d0ea5 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -131,6 +131,18 @@ const config = { to: "/getting-started", from: "/developer/videos/", }, + { + to: "/getting-started/getting-help/spark-cluster-details", + from: "/getting-started/getting-help/logs/config-sparkui", + }, + { + to: "/getting-started/getting-help/prophecy-details", + from: "/getting-started/getting-help/logs/", + }, + { + to: "/getting-started/getting-help/spark-cluster-details", + from: "/getting-started/getting-help/logs/config-sparknotebook", + }, { to: "/concepts/copilot/enable-data-copilot", from: "/architecture/deployment/enable-data-copilot",