Skip to content

Commit

Permalink
[Synapse] - new cmdlets for Spark job definition (#15156)
Browse files Browse the repository at this point in the history
* Save current status

* Save current status

* Upgrade artfacts to 1.0.0-preview.9

* Save current status

* Remvoe SQL script

* Add example files

* Update examples

* Update changelog

* Remove rename parameter set

* Fix changelog

* Fix online version

* Suppresses breaking change issues

* Remove serialized cmdlets config files for Synapse

Co-authored-by: Dongwei Wang <[email protected]>
  • Loading branch information
idear1203 and Dongwei Wang authored Jun 10, 2021
1 parent eba1de6 commit dbf0b22
Show file tree
Hide file tree
Showing 20 changed files with 949 additions and 144,333 deletions.
6 changes: 4 additions & 2 deletions src/Synapse/Synapse/Az.Synapse.psd1
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ CmdletsToExport = 'Get-AzSynapseSparkJob', 'Stop-AzSynapseSparkJob',
'New-AzSynapseWorkspaceKey', 'Get-AzSynapseWorkspaceKey',
'Remove-AzSynapseWorkspaceKey', 'Update-AzSynapseWorkspaceKey',
'New-AzSynapseManagedVirtualNetworkConfig',
'Update-AzSynapseManagedVirtualNetworkConfig'
'Update-AzSynapseManagedVirtualNetworkConfig',
'Get-AzSynapseSparkJobDefinition', 'Remove-AzSynapseSparkJobDefinition', 'Set-AzSynapseSparkJobDefinition'

# Variables to export from this module
# VariablesToExport = @()
Expand All @@ -189,7 +190,8 @@ AliasesToExport = 'New-AzSynapsePipeline', 'New-AzSynapseLinkedService',
'Clear-AzSynapseSqlPoolAdvancedThreatProtectionSetting',
'Clear-AzSynapseSqlPoolVulnerabilityAssessmentSetting',
'Enable-AzSynapseSqlAdvancedThreatProtection',
'Disable-AzSynapseSqlAdvancedThreatProtection'
'Disable-AzSynapseSqlAdvancedThreatProtection',
'New-AzSynapseSparkJobDefinition'

# DSC resources to export from this module
# DscResourcesToExport = @()
Expand Down
6 changes: 5 additions & 1 deletion src/Synapse/Synapse/ChangeLog.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,13 @@
- Additional information about change #1
-->
## Upcoming Release
* Add support for Synapse Spark job definition
- Add `New-AzSynapseSparkJobDefinition` cmdlet
- Add `Get-AzSynapseSparkJobDefinition` cmdlet
- Add `Remove-AzSynapseSparkJobDefinition` cmdlet

## Version 0.12.0
Upgraded Azure.Analytics.Synapse.Artifacts to 1.0.0-preview.9
* Upgraded Azure.Analytics.Synapse.Artifacts to 1.0.0-preview.9

## Version 0.11.0
* Removed principaltype in Synapse Role-based access control
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ public class RemoveAzureSynapsePipeline : SynapseArtifactsCmdletBase
{
private const string RemoveByName = "RemoveByName";
private const string RemoveByObject = "RemoveByObject";
private const string RemoveByInputObject = "NewByInputObject";
private const string RemoveByInputObject = "RemoveByInputObject";

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
Expand All @@ -28,7 +28,8 @@ public class RemoveAzureSynapsePipeline : SynapseArtifactsCmdletBase
[ValidateNotNull]
public PSSynapseWorkspace WorkspaceObject { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByObject, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
[ValidateNotNullOrEmpty]
[Alias("PipelineName")]
public string Name { get; set; }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
using Microsoft.Azure.Commands.Synapse.Common;
using Microsoft.Azure.Commands.Synapse.Models;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using System.Linq;
using System.Management.Automation;

namespace Microsoft.Azure.Commands.Synapse
{
[Cmdlet(VerbsCommon.Get, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
DefaultParameterSetName = GetByName)]
[OutputType(typeof(PSSparkJobDefinitionResource))]
public class GetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
{
private const string GetByName = "GetByName";
private const string GetByObject = "GetByObject";

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = GetByName,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
[ValidateNotNullOrEmpty]
public override string WorkspaceName { get; set; }

[Parameter(ValueFromPipeline = true, ParameterSetName = GetByObject,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
[ValidateNotNull]
public PSSynapseWorkspace WorkspaceObject { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.SparkJobDefinitionName)]
[ValidateNotNullOrEmpty]
[Alias("SparkJobDefinitionName")]
public string Name { get; set; }

public override void ExecuteCmdlet()
{
if (this.IsParameterBound(c => c.WorkspaceObject))
{
this.WorkspaceName = this.WorkspaceObject.Name;
}

if (this.IsParameterBound(c => c.Name))
{
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.GetSparkJobDefinition(this.Name)));
}
else
{
var pipeline = SynapseAnalyticsClient.GetSparkJobDefinitionsByWorkspace()
.Select(element => new PSSparkJobDefinitionResource(element));
WriteObject(pipeline, true);
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
using Microsoft.Azure.Commands.Synapse.Common;
using Microsoft.Azure.Commands.Synapse.Models;
using Microsoft.Azure.Commands.Synapse.Properties;
using Microsoft.Azure.Management.Internal.Resources.Utilities.Models;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using System.Management.Automation;

namespace Microsoft.Azure.Commands.Synapse
{
[Cmdlet(VerbsCommon.Remove, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
DefaultParameterSetName = RemoveByName, SupportsShouldProcess = true)]
[OutputType(typeof(bool))]
public class RemoveAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
{
private const string RemoveByName = "RemoveByName";
private const string RemoveByObject = "RemoveByObject";
private const string RemoveByInputObject = "RemoveByInputObject";

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
[ValidateNotNullOrEmpty]
public override string WorkspaceName { get; set; }

[Parameter(ValueFromPipeline = true, ParameterSetName = RemoveByObject,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
[ValidateNotNull]
public PSSynapseWorkspace WorkspaceObject { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByObject, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
[ValidateNotNullOrEmpty]
[Alias("SparkJobDefinitionName")]
public string Name { get; set; }

[Parameter(ValueFromPipeline = true, ParameterSetName = RemoveByInputObject,
Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionObject)]
[ValidateNotNull]
public PSSparkJobDefinitionResource InputObject { get; set; }

[Parameter(Mandatory = false, HelpMessage = HelpMessages.PassThru)]
public SwitchParameter PassThru { get; set; }

[Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)]
public SwitchParameter AsJob { get; set; }

[Parameter(Mandatory = false, HelpMessage = HelpMessages.Force)]
public SwitchParameter Force { get; set; }

public override void ExecuteCmdlet()
{
if (this.IsParameterBound(c => c.WorkspaceObject))
{
this.WorkspaceName = this.WorkspaceObject.Name;
}

if (this.IsParameterBound(c => c.InputObject))
{
var resourceIdentifier = new ResourceIdentifier(this.InputObject.Id);
this.WorkspaceName = resourceIdentifier.ParentResource;
this.WorkspaceName = this.WorkspaceName.Substring(this.WorkspaceName.LastIndexOf('/') + 1);
this.Name = resourceIdentifier.ResourceName;
}

ConfirmAction(
Force.IsPresent,
string.Format(Resources.RemoveSynapseSparkJobDefinition, Name),
string.Format(Resources.RemovingSynapseSparkJobDefinition, this.Name, this.WorkspaceName),
Name,
() =>
{
SynapseAnalyticsClient.DeleteSparkJobDefinition(this.Name);
if (PassThru)
{
WriteObject(true);
}
});
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
using Microsoft.Azure.Commands.Common.Exceptions;
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
using Microsoft.Azure.Commands.Synapse.Common;
using Microsoft.Azure.Commands.Synapse.Models;
using Microsoft.Azure.Commands.Synapse.Properties;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using System;
using System.Management.Automation;

namespace Microsoft.Azure.Commands.Synapse
{
[Cmdlet(VerbsCommon.Set, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
DefaultParameterSetName = SetByName, SupportsShouldProcess = true)]
[Alias("New-" + ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition)]
[OutputType(typeof(PSSparkJobDefinitionResource))]
public class SetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
{
private const string SetByName = "SetByName";
private const string SetByObject = "SetByObject";

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByName,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
[ValidateNotNullOrEmpty]
public override string WorkspaceName { get; set; }

[Parameter(ValueFromPipeline = true, ParameterSetName = SetByObject,
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
[ValidateNotNull]
public PSSynapseWorkspace WorkspaceObject { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
[ValidateNotNullOrEmpty]
[Alias("SparkJobDefinitionName")]
public string Name { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByObject, Mandatory = true, HelpMessage = HelpMessages.JsonFilePath)]
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByName, Mandatory = true, HelpMessage = HelpMessages.JsonFilePath)]
[ValidateNotNullOrEmpty]
[Alias("File")]
public string DefinitionFile { get; set; }

[Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)]
public SwitchParameter AsJob { get; set; }

public override void ExecuteCmdlet()
{
if (this.IsParameterBound(c => c.WorkspaceObject))
{
this.WorkspaceName = this.WorkspaceObject.Name;
}

if (this.ShouldProcess(this.WorkspaceName, String.Format(Resources.SettingSynapseSparkJobDefinition, this.Name, this.WorkspaceName)))
{
string rawJsonContent = SynapseAnalyticsClient.ReadJsonFileContent(this.TryResolvePath(DefinitionFile));
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, rawJsonContent)));
}
}
}
}
4 changes: 4 additions & 0 deletions src/Synapse/Synapse/Common/HelpMessages.cs
Original file line number Diff line number Diff line change
Expand Up @@ -426,5 +426,9 @@ SELECT on dbo.myTable by public
public const string KeyResourceId = "The resource identifier of Synapse SQL Pool.";

public const string KeyObject = "Workspace key input object, usually passed through the pipeline.";

public const string SparkJobDefinitionName = "The Spark job definition name.";

public const string SparkJobDefinitionObject = "The Spark job definition object.";
}
}
30 changes: 30 additions & 0 deletions src/Synapse/Synapse/Models/PSSparkJobDefinition.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
using Azure.Analytics.Synapse.Artifacts.Models;

namespace Microsoft.Azure.Commands.Synapse.Models
{
public class PSSparkJobDefinition
{
public PSSparkJobDefinition(SparkJobDefinition properties)
{
Description = properties?.Description;
TargetBigDataPool = properties?.TargetBigDataPool != null ? new PSBigDataPoolReference(properties.TargetBigDataPool) : null;
RequiredSparkVersion = properties?.RequiredSparkVersion;
JobProperties = properties?.JobProperties != null ? new PSSparkJobProperties(properties.JobProperties) : null;
}

/// <summary> The description of the Spark job definition. </summary>
public string Description { get; set; }

/// <summary> Big data pool reference. </summary>
public PSBigDataPoolReference TargetBigDataPool { get; set; }

/// <summary> The required Spark version of the application. </summary>
public string RequiredSparkVersion { get; set; }

/// <summary> The language of the Spark application. </summary>
public string Language { get; set; }

/// <summary> The properties of the Spark job. </summary>
public PSSparkJobProperties JobProperties { get; set; }
}
}
16 changes: 16 additions & 0 deletions src/Synapse/Synapse/Models/PSSparkJobDefinitionResource.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
using Azure.Analytics.Synapse.Artifacts.Models;

namespace Microsoft.Azure.Commands.Synapse.Models
{
public class PSSparkJobDefinitionResource : PSSubResource
{
public PSSparkJobDefinitionResource(SparkJobDefinitionResource sparkJobDefinition)
: base(sparkJobDefinition.Id, sparkJobDefinition.Name, sparkJobDefinition.Type, sparkJobDefinition.Etag)
{
Properties = sparkJobDefinition?.Properties != null ? new PSSparkJobDefinition(sparkJobDefinition.Properties) : null;
}

/// <summary> Properties of spark job definition. </summary>
public PSSparkJobDefinition Properties { get; set; }
}
}
63 changes: 63 additions & 0 deletions src/Synapse/Synapse/Models/PSSparkJobProperties.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
using Azure.Analytics.Synapse.Artifacts.Models;
using System.Collections.Generic;

namespace Microsoft.Azure.Commands.Synapse.Models
{
public class PSSparkJobProperties
{
public PSSparkJobProperties(SparkJobProperties sparkJobProperties)
{
this.Name = sparkJobProperties.Name;
this.File = sparkJobProperties.File;
this.ClassName = sparkJobProperties.ClassName;
this.Configuration = sparkJobProperties.Conf;
this.Arguments = sparkJobProperties.Args;
this.Jars = sparkJobProperties.Files;
this.Archives = sparkJobProperties.Archives;
this.DriverMemory = sparkJobProperties.DriverMemory;
this.DriverCores = sparkJobProperties.DriverCores;
this.ExecutorMemory = sparkJobProperties.ExecutorMemory;
this.ExecutorCores = sparkJobProperties.ExecutorCores;
this.NumberOfExecutors = sparkJobProperties.NumExecutors;
}

/// <summary> The name of the job. </summary>
public string Name { get; set; }

/// <summary> File containing the application to execute. </summary>
public string File { get; set; }

/// <summary> Main class for Java/Scala application. </summary>
public string ClassName { get; set; }

/// <summary> Spark configuration properties. </summary>
public object Configuration { get; set; }

/// <summary> Command line arguments for the application. </summary>
public IList<string> Arguments { get; }

/// <summary> Jars to be used in this job. </summary>
public IList<string> Jars { get; }

/// <summary> files to be used in this job. </summary>
public IList<string> Files { get; }

/// <summary> Archives to be used in this job. </summary>
public IList<string> Archives { get; }

/// <summary> Amount of memory to use for the driver process. </summary>
public string DriverMemory { get; set; }

/// <summary> Number of cores to use for the driver. </summary>
public int DriverCores { get; set; }

/// <summary> Amount of memory to use per executor process. </summary>
public string ExecutorMemory { get; set; }

/// <summary> Number of cores to use for each executor. </summary>
public int ExecutorCores { get; set; }

/// <summary> Number of executors to launch for this job. </summary>
public int NumberOfExecutors { get; set; }
}
}
Loading

0 comments on commit dbf0b22

Please sign in to comment.