Skip to content

Commit

Permalink
ScretsAPI: Rename CreateScope to CreateDatabricksBackedScope; JobsAPI…
Browse files Browse the repository at this point in the history
…: return 'end_time' from response rather than calculating from other properties; Update runtime versions
  • Loading branch information
memoryz committed Dec 13, 2020
1 parent d321281 commit 2b5f7cd
Show file tree
Hide file tree
Showing 5 changed files with 159 additions and 82 deletions.
35 changes: 33 additions & 2 deletions csharp/Microsoft.Azure.Databricks.Client/ISecretsApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,48 @@ namespace Microsoft.Azure.Databricks.Client
public interface ISecretsApi : IDisposable
{
/// <summary>
/// Creates a new secret scope.
/// The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.The maximum number of scopes in a workspace is 100.
/// Creates a new Databricks-backed secret scope.
/// The scope name must be unique within a workspace, must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.The maximum number of scopes in a workspace is 100.
/// </summary>
/// <remarks>
/// If initial_manage_principal is specified, the initial ACL applied to the scope is applied to the supplied principal (user or group) with MANAGE permissions. The only supported principal for this option is the group users, which contains all users in the workspace. If initial_manage_principal is not specified, the initial ACL with MANAGE permission applied to the scope is assigned to the API request issuer’s user identity.
/// Throws RESOURCE_ALREADY_EXISTS if a scope with the given name already exists.Throws RESOURCE_LIMIT_EXCEEDED if maximum number of scopes in the workspace is exceeded.Throws INVALID_PARAMETER_VALUE if the scope name is invalid.
/// </remarks>
/// <param name="scope">Scope name requested by the user. Scope names are unique. This field is required.</param>
/// <param name="initialManagePrincipal">The principal that is initially granted MANAGE permission to the created scope.</param>
[Obsolete("This method has been renamed to " + nameof(CreateDatabricksBackedScope) + ".")]
Task CreateScope(string scope, string initialManagePrincipal, CancellationToken cancellationToken = default);


/// <summary>
/// Creates a new Databricks-backed secret scope.
/// The scope name must be unique within a workspace, must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.The maximum number of scopes in a workspace is 100.
/// </summary>
/// <remarks>
/// If initial_manage_principal is specified, the initial ACL applied to the scope is applied to the supplied principal (user or group) with MANAGE permissions. The only supported principal for this option is the group users, which contains all users in the workspace. If initial_manage_principal is not specified, the initial ACL with MANAGE permission applied to the scope is assigned to the API request issuer’s user identity.
/// Throws RESOURCE_ALREADY_EXISTS if a scope with the given name already exists.Throws RESOURCE_LIMIT_EXCEEDED if maximum number of scopes in the workspace is exceeded.Throws INVALID_PARAMETER_VALUE if the scope name is invalid.
/// </remarks>
/// <param name="scope">Scope name requested by the user. Scope names are unique. This field is required.</param>
/// <param name="initialManagePrincipal">The principal that is initially granted MANAGE permission to the created scope.</param>
Task CreateDatabricksBackedScope(string scope, string initialManagePrincipal, CancellationToken cancellationToken = default);

/*
This API call is currently not working per https://github.com/MicrosoftDocs/azure-docs/issues/65000. Comment out for now.
/// <summary>
/// Creates a new Azure Key Vault-backed secret scope.
/// The scope name must be unique within a workspace, must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.The maximum number of scopes in a workspace is 100.
/// </summary>
/// <remarks>
/// If initial_manage_principal is specified, the initial ACL applied to the scope is applied to the supplied principal (user or group) with MANAGE permissions. The only supported principal for this option is the group users, which contains all users in the workspace. If initial_manage_principal is not specified, the initial ACL with MANAGE permission applied to the scope is assigned to the API request issuer’s user identity.
/// Throws RESOURCE_ALREADY_EXISTS if a scope with the given name already exists.Throws RESOURCE_LIMIT_EXCEEDED if maximum number of scopes in the workspace is exceeded.Throws INVALID_PARAMETER_VALUE if the scope name is invalid.
/// </remarks>
/// <param name="scope">Scope name requested by the user. Scope names are unique. This field is required.</param>
/// <param name="initialManagePrincipal">The principal that is initially granted MANAGE permission to the created scope.</param>
/// <param name="akvResourceId">The resource id of the backend Azure Key Vault. E.g. "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/azure-rg/providers/Microsoft.KeyVault/vaults/my-azure-kv"</param>
/// <param name="akvDnsName">The DNS name of the backend Azure Key Vault. E.g. "https://my-azure-kv.vault.azure.net/"</param>
Task CreateAkvBackedScope(string scope, string initialManagePrincipal, string akvResourceId, string akvDnsName, CancellationToken cancellationToken = default);
*/

/// <summary>
/// Deletes a secret scope.
/// </summary>
Expand Down
30 changes: 7 additions & 23 deletions csharp/Microsoft.Azure.Databricks.Client/Run.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,13 @@ public class Run : RunIdentifier
[JsonProperty(PropertyName = "start_time")]
[JsonConverter(typeof(MillisecondEpochDateTimeConverter))]
public DateTimeOffset? StartTime { get; set; }

/// <summary>
/// The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC).
/// </summary>
[JsonProperty(PropertyName = "end_time")]
[JsonConverter(typeof(MillisecondEpochDateTimeConverter))]
public DateTimeOffset? EndTime { get; set; }

/// <summary>
/// The time it took to set up the cluster in milliseconds. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short.
Expand Down Expand Up @@ -106,28 +113,5 @@ public class Run : RunIdentifier
/// </summary>
[JsonIgnore]
public bool IsCompleted => State?.ResultState != null;

/// <summary>
/// The time at which this run was finished in epoch milliseconds (milliseconds since 1/1/1970 UTC).
/// </summary>
[JsonIgnore]
public DateTimeOffset? EndTime
{
get
{
if (StartTime.HasValue == false || IsCompleted == false)
{
return null;
}

var setupDuration = TimeSpan.FromMilliseconds(SetupDuration);
var cleanupDuration = TimeSpan.FromMilliseconds(CleanupDuration);
var executionDuration = TimeSpan.FromMilliseconds(ExecutionDuration);

var jobExecution = setupDuration.Add(cleanupDuration).Add(executionDuration);

return StartTime.Value.Add(jobExecution);
}
}
}
}
127 changes: 80 additions & 47 deletions csharp/Microsoft.Azure.Databricks.Client/RuntimeVersions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,120 +24,153 @@ public class RuntimeVersions
/// </summary>
public const string Runtime_5_5_CPU_ML = "5.5.x-cpu-ml-scala2.11";


/// <summary>
/// 6.2 (includes Apache Spark 2.4.4, Scala 2.11)
/// 6.4 (includes Apache Spark 2.4.5, Scala 2.11)
/// </summary>
public const string Runtime_6_2 = "6.2.x-scala2.11";
public const string Runtime_6_4 = "6.4.x-scala2.11";

/// <summary>
/// 6.2 ML (includes Apache Spark 2.4.4, Scala 2.11)
/// 6.4 ML (includes Apache Spark 2.4.5, Scala 2.11)
/// </summary>
public const string Runtime_6_2_CPU_ML = "6.2.x-cpu-ml-scala2.11";
public const string Runtime_6_4_CPU_ML = "6.4.x-cpu-ml-scala2.11";

/// <summary>
/// 6.2 ML (includes Apache Spark 2.4.4, GPU, Scala 2.11)
/// 6.4 ML (includes Apache Spark 2.4.5, GPU, Scala 2.11)
/// </summary>
public const string Runtime_6_2_GPU_ML = "6.2.x-gpu-ml-scala2.11";
public const string Runtime_6_4_GPU_ML = "6.4.x-gpu-ml-scala2.11";

/// <summary>
/// 6.2 Genomics (includes Apache Spark 2.4.4, Scala 2.11)
/// 6.4 Genomics (includes Apache Spark 2.4.5, Scala 2.11)
/// </summary>
public const string Runtime_6_2_HLS = "6.2.x-hls-scala2.11";
public const string Runtime_6_4_HLS = "6.4.x-hls-scala2.11";


/// <summary>
/// 6.3 (includes Apache Spark 2.4.4, Scala 2.11)
/// 7.0 (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_3 = "6.3.x-scala2.11";
public const string Runtime_7_0 = "7.0.x-scala2.12";

/// <summary>
/// 6.3 ML (includes Apache Spark 2.4.4, Scala 2.11)
/// 7.0 ML (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_3_CPU_ML = "6.3.x-cpu-ml-scala2.11";
public const string Runtime_7_0_CPU_ML = "7.0.x-cpu-ml-scala2.12";

/// <summary>
/// 6.3 ML (includes Apache Spark 2.4.4, GPU, Scala 2.11)
/// 7.0 ML (includes Apache Spark 3.0.0, GPU, Scala 2.12)
/// </summary>
public const string Runtime_6_3_GPU_ML = "6.3.x-gpu-ml-scala2.11";
public const string Runtime_7_0_GPU_ML = "7.0.x-gpu-ml-scala2.12";

/// <summary>
/// 6.3 Genomics (includes Apache Spark 2.4.4, Scala 2.11)
/// 7.0 Genomics (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_3_HLS = "6.3.x-hls-scala2.11";
public const string Runtime_7_0_HLS = "7.0.x-hls-scala2.12";


/// <summary>
/// 6.4 (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.1 (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_4 = "6.4.x-scala2.11";
public const string Runtime_7_1 = "7.1.x-scala2.12";

/// <summary>
/// 6.4 ML (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_4_CPU_ML = "6.4.x-cpu-ml-scala2.11";
public const string Runtime_7_1_CPU_ML = "7.1.x-cpu-ml-scala2.12";

/// <summary>
/// 6.4 ML (includes Apache Spark 2.4.5, GPU, Scala 2.11)
/// 7.1 ML (includes Apache Spark 3.0.0, GPU, Scala 2.12)
/// </summary>
public const string Runtime_6_4_GPU_ML = "6.4.x-gpu-ml-scala2.11";
public const string Runtime_7_1_GPU_ML = "7.1.x-gpu-ml-scala2.12";

/// <summary>
/// 6.4 Genomics (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.1 Genomics (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_4_HLS = "6.4.x-hls-scala2.11";

public const string Runtime_7_1_HLS = "7.1.x-hls-scala2.12";


/// <summary>
/// 6.5 (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.2 (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_5 = "6.5.x-scala2.11";
public const string Runtime_7_2 = "7.2.x-scala2.12";

/// <summary>
/// 6.5 ML (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.2 ML (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_5_CPU_ML = "6.5.x-cpu-ml-scala2.11";
public const string Runtime_7_2_CPU_ML = "7.2.x-cpu-ml-scala2.12";

/// <summary>
/// 6.5 ML (includes Apache Spark 2.4.5, GPU, Scala 2.11)
/// 7.2 ML (includes Apache Spark 3.0.0, GPU, Scala 2.12)
/// </summary>
public const string Runtime_6_5_GPU_ML = "6.5.x-gpu-ml-scala2.11";
public const string Runtime_7_2_GPU_ML = "7.2.x-gpu-ml-scala2.12";

/// <summary>
/// 6.5 Genomics (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.2 Genomics (includes Apache Spark 3.0.0, Scala 2.12)
/// </summary>
public const string Runtime_6_5_HLS = "6.5.x-hls-scala2.11";
public const string Runtime_7_2_HLS = "7.2.x-hls-scala2.12";


/// <summary>
/// 6.6 (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_6_6 = "6.6.x-scala2.11";
public const string Runtime_7_3 = "7.3.x-scala2.12";

/// <summary>
/// 6.6 ML (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.3 LTS ML (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_6_6_CPU_ML = "6.6.x-cpu-ml-scala2.11";
public const string Runtime_7_3_CPU_ML = "7.3.x-cpu-ml-scala2.12";

/// <summary>
/// 6.6 ML (includes Apache Spark 2.4.5, GPU, Scala 2.11)
/// 7.3 LTS ML (includes Apache Spark 3.0.1, GPU, Scala 2.12)
/// </summary>
public const string Runtime_6_6_GPU_ML = "6.6.x-gpu-ml-scala2.11";
public const string Runtime_7_3_GPU_ML = "7.3.x-gpu-ml-scala2.12";

/// <summary>
/// 6.6 Genomics (includes Apache Spark 2.4.5, Scala 2.11)
/// 7.3 LTS Genomics (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_6_6_HLS = "6.6.x-hls-scala2.11";
public const string Runtime_7_3_HLS = "7.3.x-hls-scala2.12";


/// <summary>
/// 7.0 Beta (includes Apache Spark 3.0.0-preview2, Scala 2.12)
/// 7.4 (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_0 = "7.0.x-scala2.12";
public const string Runtime_7_4 = "7.4.x-scala2.12";

/// <summary>
/// 7.0 ML Beta (includes Apache Spark 3.0.0-preview2, Scala 2.12)
/// 7.4 ML (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_0_CPU_ML = "7.0.x-cpu-ml-scala2.12";
public const string Runtime_7_4_CPU_ML = "7.4.x-cpu-ml-scala2.12";

/// <summary>
/// 7.0 Genomics Beta (includes Apache Spark 3.0.0-preview2, Scala 2.12)
/// 7.4 ML (includes Apache Spark 3.0.1, GPU, Scala 2.12)
/// </summary>
public const string Runtime_7_0_HLS = "7.0.x-hls-scala2.12";
public const string Runtime_7_4_GPU_ML = "7.4.x-gpu-ml-scala2.12";

/// <summary>
/// 7.4 Genomics (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_4_HLS = "7.4.x-hls-scala2.12";


/// <summary>
/// 7.5 (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_5 = "7.5.x-scala2.12";

/// <summary>
/// 7.5 ML (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_5_CPU_ML = "7.5.x-cpu-ml-scala2.12";

/// <summary>
/// 7.5 ML (includes Apache Spark 3.0.1, GPU, Scala 2.12)
/// </summary>
public const string Runtime_7_5_GPU_ML = "7.5.x-gpu-ml-scala2.12";

/// <summary>
/// 7.5 Genomics (includes Apache Spark 3.0.1, Scala 2.12)
/// </summary>
public const string Runtime_7_5_HLS = "7.5.x-hls-scala2.12";


/// <summary>
/// Light 2.4 (includes Apache Spark 2.4, Scala 2.11)
Expand Down
33 changes: 31 additions & 2 deletions csharp/Microsoft.Azure.Databricks.Client/SecretsApiClient.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading;
Expand All @@ -17,13 +18,41 @@ public SecretsApiClient(HttpClient httpClient) : base(httpClient)
{
}

[Obsolete("This method has been renamed to " + nameof(CreateDatabricksBackedScope) + ".")]
public async Task CreateScope(string scope, string initialManagePrincipal,
CancellationToken cancellationToken = default)
{
await CreateDatabricksBackedScope(scope, initialManagePrincipal, cancellationToken);
}

public async Task CreateScope(string scope, string initialManagePrincipal, CancellationToken cancellationToken = default)
public async Task CreateDatabricksBackedScope(string scope, string initialManagePrincipal,
CancellationToken cancellationToken = default)
{
var request = new {scope, initial_manage_principal = initialManagePrincipal};
await HttpPost(this.HttpClient, "secrets/scopes/create", request, cancellationToken).ConfigureAwait(false);
}

/*
This API call is currently not working per https://github.com/MicrosoftDocs/azure-docs/issues/65000. Comment out for now.
public async Task CreateAkvBackedScope(string scope, string initialManagePrincipal, string akvResourceId, string akvDnsName,
CancellationToken cancellationToken = default)
{
var request = new
{
scope,
initial_manage_principal = initialManagePrincipal,
scope_backend_type = "AZURE_KEYVAULT",
backend_azure_keyvault = new
{
resource_id = akvResourceId,
dns_name = akvDnsName
}
};
await HttpPost(this.HttpClient, "secrets/scopes/create", request, cancellationToken).ConfigureAwait(false);
}
*/

public async Task DeleteScope(string scope, CancellationToken cancellationToken = default)
{
var request = new {scope};
Expand Down
Loading

0 comments on commit 2b5f7cd

Please sign in to comment.