Skip to content

Commit dbf0b22

Browse files
idear1203Dongwei Wang
andauthored
[Synapse] - new cmdlets for Spark job definition (#15156)
* Save current status * Save current status * Upgrade artfacts to 1.0.0-preview.9 * Save current status * Remvoe SQL script * Add example files * Update examples * Update changelog * Remove rename parameter set * Fix changelog * Fix online version * Suppresses breaking change issues * Remove serialized cmdlets config files for Synapse Co-authored-by: Dongwei Wang <[email protected]>
1 parent eba1de6 commit dbf0b22

20 files changed

+949
-144333
lines changed

src/Synapse/Synapse/Az.Synapse.psd1

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,8 @@ CmdletsToExport = 'Get-AzSynapseSparkJob', 'Stop-AzSynapseSparkJob',
171171
'New-AzSynapseWorkspaceKey', 'Get-AzSynapseWorkspaceKey',
172172
'Remove-AzSynapseWorkspaceKey', 'Update-AzSynapseWorkspaceKey',
173173
'New-AzSynapseManagedVirtualNetworkConfig',
174-
'Update-AzSynapseManagedVirtualNetworkConfig'
174+
'Update-AzSynapseManagedVirtualNetworkConfig',
175+
'Get-AzSynapseSparkJobDefinition', 'Remove-AzSynapseSparkJobDefinition', 'Set-AzSynapseSparkJobDefinition'
175176

176177
# Variables to export from this module
177178
# VariablesToExport = @()
@@ -189,7 +190,8 @@ AliasesToExport = 'New-AzSynapsePipeline', 'New-AzSynapseLinkedService',
189190
'Clear-AzSynapseSqlPoolAdvancedThreatProtectionSetting',
190191
'Clear-AzSynapseSqlPoolVulnerabilityAssessmentSetting',
191192
'Enable-AzSynapseSqlAdvancedThreatProtection',
192-
'Disable-AzSynapseSqlAdvancedThreatProtection'
193+
'Disable-AzSynapseSqlAdvancedThreatProtection',
194+
'New-AzSynapseSparkJobDefinition'
193195

194196
# DSC resources to export from this module
195197
# DscResourcesToExport = @()

src/Synapse/Synapse/ChangeLog.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,13 @@
1818
- Additional information about change #1
1919
-->
2020
## Upcoming Release
21+
* Add support for Synapse Spark job definition
22+
- Add `New-AzSynapseSparkJobDefinition` cmdlet
23+
- Add `Get-AzSynapseSparkJobDefinition` cmdlet
24+
- Add `Remove-AzSynapseSparkJobDefinition` cmdlet
2125

2226
## Version 0.12.0
23-
Upgraded Azure.Analytics.Synapse.Artifacts to 1.0.0-preview.9
27+
* Upgraded Azure.Analytics.Synapse.Artifacts to 1.0.0-preview.9
2428

2529
## Version 0.11.0
2630
* Removed principaltype in Synapse Role-based access control

src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/Pipelines/RemoveAzureSynapsePipeline.cs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public class RemoveAzureSynapsePipeline : SynapseArtifactsCmdletBase
1515
{
1616
private const string RemoveByName = "RemoveByName";
1717
private const string RemoveByObject = "RemoveByObject";
18-
private const string RemoveByInputObject = "NewByInputObject";
18+
private const string RemoveByInputObject = "RemoveByInputObject";
1919

2020
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName,
2121
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
@@ -28,7 +28,8 @@ public class RemoveAzureSynapsePipeline : SynapseArtifactsCmdletBase
2828
[ValidateNotNull]
2929
public PSSynapseWorkspace WorkspaceObject { get; set; }
3030

31-
[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
31+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
32+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByObject, Mandatory = true, HelpMessage = HelpMessages.PipelineName)]
3233
[ValidateNotNullOrEmpty]
3334
[Alias("PipelineName")]
3435
public string Name { get; set; }
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
2+
using Microsoft.Azure.Commands.Synapse.Common;
3+
using Microsoft.Azure.Commands.Synapse.Models;
4+
using Microsoft.WindowsAzure.Commands.Utilities.Common;
5+
using System.Linq;
6+
using System.Management.Automation;
7+
8+
namespace Microsoft.Azure.Commands.Synapse
9+
{
10+
[Cmdlet(VerbsCommon.Get, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
11+
DefaultParameterSetName = GetByName)]
12+
[OutputType(typeof(PSSparkJobDefinitionResource))]
13+
public class GetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
14+
{
15+
private const string GetByName = "GetByName";
16+
private const string GetByObject = "GetByObject";
17+
18+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = GetByName,
19+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
20+
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
21+
[ValidateNotNullOrEmpty]
22+
public override string WorkspaceName { get; set; }
23+
24+
[Parameter(ValueFromPipeline = true, ParameterSetName = GetByObject,
25+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
26+
[ValidateNotNull]
27+
public PSSynapseWorkspace WorkspaceObject { get; set; }
28+
29+
[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.SparkJobDefinitionName)]
30+
[ValidateNotNullOrEmpty]
31+
[Alias("SparkJobDefinitionName")]
32+
public string Name { get; set; }
33+
34+
public override void ExecuteCmdlet()
35+
{
36+
if (this.IsParameterBound(c => c.WorkspaceObject))
37+
{
38+
this.WorkspaceName = this.WorkspaceObject.Name;
39+
}
40+
41+
if (this.IsParameterBound(c => c.Name))
42+
{
43+
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.GetSparkJobDefinition(this.Name)));
44+
}
45+
else
46+
{
47+
var pipeline = SynapseAnalyticsClient.GetSparkJobDefinitionsByWorkspace()
48+
.Select(element => new PSSparkJobDefinitionResource(element));
49+
WriteObject(pipeline, true);
50+
}
51+
}
52+
}
53+
}
Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
2+
using Microsoft.Azure.Commands.Synapse.Common;
3+
using Microsoft.Azure.Commands.Synapse.Models;
4+
using Microsoft.Azure.Commands.Synapse.Properties;
5+
using Microsoft.Azure.Management.Internal.Resources.Utilities.Models;
6+
using Microsoft.WindowsAzure.Commands.Utilities.Common;
7+
using System.Management.Automation;
8+
9+
namespace Microsoft.Azure.Commands.Synapse
10+
{
11+
[Cmdlet(VerbsCommon.Remove, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
12+
DefaultParameterSetName = RemoveByName, SupportsShouldProcess = true)]
13+
[OutputType(typeof(bool))]
14+
public class RemoveAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
15+
{
16+
private const string RemoveByName = "RemoveByName";
17+
private const string RemoveByObject = "RemoveByObject";
18+
private const string RemoveByInputObject = "RemoveByInputObject";
19+
20+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName,
21+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
22+
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
23+
[ValidateNotNullOrEmpty]
24+
public override string WorkspaceName { get; set; }
25+
26+
[Parameter(ValueFromPipeline = true, ParameterSetName = RemoveByObject,
27+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
28+
[ValidateNotNull]
29+
public PSSynapseWorkspace WorkspaceObject { get; set; }
30+
31+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByName, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
32+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = RemoveByObject, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
33+
[ValidateNotNullOrEmpty]
34+
[Alias("SparkJobDefinitionName")]
35+
public string Name { get; set; }
36+
37+
[Parameter(ValueFromPipeline = true, ParameterSetName = RemoveByInputObject,
38+
Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionObject)]
39+
[ValidateNotNull]
40+
public PSSparkJobDefinitionResource InputObject { get; set; }
41+
42+
[Parameter(Mandatory = false, HelpMessage = HelpMessages.PassThru)]
43+
public SwitchParameter PassThru { get; set; }
44+
45+
[Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)]
46+
public SwitchParameter AsJob { get; set; }
47+
48+
[Parameter(Mandatory = false, HelpMessage = HelpMessages.Force)]
49+
public SwitchParameter Force { get; set; }
50+
51+
public override void ExecuteCmdlet()
52+
{
53+
if (this.IsParameterBound(c => c.WorkspaceObject))
54+
{
55+
this.WorkspaceName = this.WorkspaceObject.Name;
56+
}
57+
58+
if (this.IsParameterBound(c => c.InputObject))
59+
{
60+
var resourceIdentifier = new ResourceIdentifier(this.InputObject.Id);
61+
this.WorkspaceName = resourceIdentifier.ParentResource;
62+
this.WorkspaceName = this.WorkspaceName.Substring(this.WorkspaceName.LastIndexOf('/') + 1);
63+
this.Name = resourceIdentifier.ResourceName;
64+
}
65+
66+
ConfirmAction(
67+
Force.IsPresent,
68+
string.Format(Resources.RemoveSynapseSparkJobDefinition, Name),
69+
string.Format(Resources.RemovingSynapseSparkJobDefinition, this.Name, this.WorkspaceName),
70+
Name,
71+
() =>
72+
{
73+
SynapseAnalyticsClient.DeleteSparkJobDefinition(this.Name);
74+
if (PassThru)
75+
{
76+
WriteObject(true);
77+
}
78+
});
79+
}
80+
}
81+
}
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
using Microsoft.Azure.Commands.Common.Exceptions;
2+
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
3+
using Microsoft.Azure.Commands.Synapse.Common;
4+
using Microsoft.Azure.Commands.Synapse.Models;
5+
using Microsoft.Azure.Commands.Synapse.Properties;
6+
using Microsoft.WindowsAzure.Commands.Utilities.Common;
7+
using System;
8+
using System.Management.Automation;
9+
10+
namespace Microsoft.Azure.Commands.Synapse
11+
{
12+
[Cmdlet(VerbsCommon.Set, ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition,
13+
DefaultParameterSetName = SetByName, SupportsShouldProcess = true)]
14+
[Alias("New-" + ResourceManager.Common.AzureRMConstants.AzureRMPrefix + SynapseConstants.SynapsePrefix + SynapseConstants.SparkJobDefinition)]
15+
[OutputType(typeof(PSSparkJobDefinitionResource))]
16+
public class SetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
17+
{
18+
private const string SetByName = "SetByName";
19+
private const string SetByObject = "SetByObject";
20+
21+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByName,
22+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceName)]
23+
[ResourceNameCompleter(ResourceTypes.Workspace, "ResourceGroupName")]
24+
[ValidateNotNullOrEmpty]
25+
public override string WorkspaceName { get; set; }
26+
27+
[Parameter(ValueFromPipeline = true, ParameterSetName = SetByObject,
28+
Mandatory = true, HelpMessage = HelpMessages.WorkspaceObject)]
29+
[ValidateNotNull]
30+
public PSSynapseWorkspace WorkspaceObject { get; set; }
31+
32+
[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = true, HelpMessage = HelpMessages.SparkJobDefinitionName)]
33+
[ValidateNotNullOrEmpty]
34+
[Alias("SparkJobDefinitionName")]
35+
public string Name { get; set; }
36+
37+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByObject, Mandatory = true, HelpMessage = HelpMessages.JsonFilePath)]
38+
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByName, Mandatory = true, HelpMessage = HelpMessages.JsonFilePath)]
39+
[ValidateNotNullOrEmpty]
40+
[Alias("File")]
41+
public string DefinitionFile { get; set; }
42+
43+
[Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)]
44+
public SwitchParameter AsJob { get; set; }
45+
46+
public override void ExecuteCmdlet()
47+
{
48+
if (this.IsParameterBound(c => c.WorkspaceObject))
49+
{
50+
this.WorkspaceName = this.WorkspaceObject.Name;
51+
}
52+
53+
if (this.ShouldProcess(this.WorkspaceName, String.Format(Resources.SettingSynapseSparkJobDefinition, this.Name, this.WorkspaceName)))
54+
{
55+
string rawJsonContent = SynapseAnalyticsClient.ReadJsonFileContent(this.TryResolvePath(DefinitionFile));
56+
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, rawJsonContent)));
57+
}
58+
}
59+
}
60+
}

src/Synapse/Synapse/Common/HelpMessages.cs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -426,5 +426,9 @@ SELECT on dbo.myTable by public
426426
public const string KeyResourceId = "The resource identifier of Synapse SQL Pool.";
427427

428428
public const string KeyObject = "Workspace key input object, usually passed through the pipeline.";
429+
430+
public const string SparkJobDefinitionName = "The Spark job definition name.";
431+
432+
public const string SparkJobDefinitionObject = "The Spark job definition object.";
429433
}
430434
}
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
using Azure.Analytics.Synapse.Artifacts.Models;
2+
3+
namespace Microsoft.Azure.Commands.Synapse.Models
4+
{
5+
public class PSSparkJobDefinition
6+
{
7+
public PSSparkJobDefinition(SparkJobDefinition properties)
8+
{
9+
Description = properties?.Description;
10+
TargetBigDataPool = properties?.TargetBigDataPool != null ? new PSBigDataPoolReference(properties.TargetBigDataPool) : null;
11+
RequiredSparkVersion = properties?.RequiredSparkVersion;
12+
JobProperties = properties?.JobProperties != null ? new PSSparkJobProperties(properties.JobProperties) : null;
13+
}
14+
15+
/// <summary> The description of the Spark job definition. </summary>
16+
public string Description { get; set; }
17+
18+
/// <summary> Big data pool reference. </summary>
19+
public PSBigDataPoolReference TargetBigDataPool { get; set; }
20+
21+
/// <summary> The required Spark version of the application. </summary>
22+
public string RequiredSparkVersion { get; set; }
23+
24+
/// <summary> The language of the Spark application. </summary>
25+
public string Language { get; set; }
26+
27+
/// <summary> The properties of the Spark job. </summary>
28+
public PSSparkJobProperties JobProperties { get; set; }
29+
}
30+
}
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
using Azure.Analytics.Synapse.Artifacts.Models;
2+
3+
namespace Microsoft.Azure.Commands.Synapse.Models
4+
{
5+
public class PSSparkJobDefinitionResource : PSSubResource
6+
{
7+
public PSSparkJobDefinitionResource(SparkJobDefinitionResource sparkJobDefinition)
8+
: base(sparkJobDefinition.Id, sparkJobDefinition.Name, sparkJobDefinition.Type, sparkJobDefinition.Etag)
9+
{
10+
Properties = sparkJobDefinition?.Properties != null ? new PSSparkJobDefinition(sparkJobDefinition.Properties) : null;
11+
}
12+
13+
/// <summary> Properties of spark job definition. </summary>
14+
public PSSparkJobDefinition Properties { get; set; }
15+
}
16+
}
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
using Azure.Analytics.Synapse.Artifacts.Models;
2+
using System.Collections.Generic;
3+
4+
namespace Microsoft.Azure.Commands.Synapse.Models
5+
{
6+
public class PSSparkJobProperties
7+
{
8+
public PSSparkJobProperties(SparkJobProperties sparkJobProperties)
9+
{
10+
this.Name = sparkJobProperties.Name;
11+
this.File = sparkJobProperties.File;
12+
this.ClassName = sparkJobProperties.ClassName;
13+
this.Configuration = sparkJobProperties.Conf;
14+
this.Arguments = sparkJobProperties.Args;
15+
this.Jars = sparkJobProperties.Files;
16+
this.Archives = sparkJobProperties.Archives;
17+
this.DriverMemory = sparkJobProperties.DriverMemory;
18+
this.DriverCores = sparkJobProperties.DriverCores;
19+
this.ExecutorMemory = sparkJobProperties.ExecutorMemory;
20+
this.ExecutorCores = sparkJobProperties.ExecutorCores;
21+
this.NumberOfExecutors = sparkJobProperties.NumExecutors;
22+
}
23+
24+
/// <summary> The name of the job. </summary>
25+
public string Name { get; set; }
26+
27+
/// <summary> File containing the application to execute. </summary>
28+
public string File { get; set; }
29+
30+
/// <summary> Main class for Java/Scala application. </summary>
31+
public string ClassName { get; set; }
32+
33+
/// <summary> Spark configuration properties. </summary>
34+
public object Configuration { get; set; }
35+
36+
/// <summary> Command line arguments for the application. </summary>
37+
public IList<string> Arguments { get; }
38+
39+
/// <summary> Jars to be used in this job. </summary>
40+
public IList<string> Jars { get; }
41+
42+
/// <summary> files to be used in this job. </summary>
43+
public IList<string> Files { get; }
44+
45+
/// <summary> Archives to be used in this job. </summary>
46+
public IList<string> Archives { get; }
47+
48+
/// <summary> Amount of memory to use for the driver process. </summary>
49+
public string DriverMemory { get; set; }
50+
51+
/// <summary> Number of cores to use for the driver. </summary>
52+
public int DriverCores { get; set; }
53+
54+
/// <summary> Amount of memory to use per executor process. </summary>
55+
public string ExecutorMemory { get; set; }
56+
57+
/// <summary> Number of cores to use for each executor. </summary>
58+
public int ExecutorCores { get; set; }
59+
60+
/// <summary> Number of executors to launch for this job. </summary>
61+
public int NumberOfExecutors { get; set; }
62+
}
63+
}

0 commit comments

Comments
 (0)