Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Synapse: regenerate package-2019-06-01-preview #17713

Merged
merged 3 commits into from
Nov 20, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/synapse/mgmt-v2019_06_01_preview/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<relativePath>../../parents/azure-arm-parent/pom.xml</relativePath>
</parent>
<artifactId>azure-mgmt-synapse</artifactId>
<version>1.0.0-beta-3</version>
<version>1.0.0-beta-4</version>
<packaging>jar</packaging>
<name>Microsoft Azure SDK for Synapse Management</name>
<description>This package contains Microsoft Synapse Management SDK.</description>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
import com.microsoft.azure.ProxyResource;

/**
* The resource model definition for a Azure Resource Manager resource with an
* Entity Resource.
* The resource model definition for an Azure Resource Manager resource with an
* etag.
*/
public class AzureEntityResource extends ProxyResource {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,11 @@ public interface BigDataPoolResourceInfo extends HasInner<BigDataPoolResourceInf
*/
String defaultSparkLogFolder();

/**
* @return the haveLibraryRequirementsChanged value.
*/
Boolean haveLibraryRequirementsChanged();

/**
* @return the id value.
*/
Expand Down Expand Up @@ -89,6 +94,16 @@ public interface BigDataPoolResourceInfo extends HasInner<BigDataPoolResourceInf
*/
String provisioningState();

/**
* @return the sessionLevelPackagesEnabled value.
*/
Boolean sessionLevelPackagesEnabled();

/**
* @return the sparkConfigProperties value.
*/
LibraryRequirements sparkConfigProperties();

/**
* @return the sparkEventsFolder value.
*/
Expand Down Expand Up @@ -210,6 +225,18 @@ interface WithDefaultSparkLogFolder {
WithCreate withDefaultSparkLogFolder(String defaultSparkLogFolder);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify HaveLibraryRequirementsChanged.
*/
interface WithHaveLibraryRequirementsChanged {
/**
* Specifies haveLibraryRequirementsChanged.
* @param haveLibraryRequirementsChanged Whether library requirements changed
* @return the next definition stage
*/
WithCreate withHaveLibraryRequirementsChanged(Boolean haveLibraryRequirementsChanged);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify IsComputeIsolationEnabled.
*/
Expand Down Expand Up @@ -252,7 +279,7 @@ interface WithNodeCount {
interface WithNodeSize {
/**
* Specifies nodeSize.
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge'
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge', 'XXXLarge'
* @return the next definition stage
*/
WithCreate withNodeSize(NodeSize nodeSize);
Expand Down Expand Up @@ -282,6 +309,30 @@ interface WithProvisioningState {
WithCreate withProvisioningState(String provisioningState);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SessionLevelPackagesEnabled.
*/
interface WithSessionLevelPackagesEnabled {
/**
* Specifies sessionLevelPackagesEnabled.
* @param sessionLevelPackagesEnabled Whether session level packages enabled
* @return the next definition stage
*/
WithCreate withSessionLevelPackagesEnabled(Boolean sessionLevelPackagesEnabled);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SparkConfigProperties.
*/
interface WithSparkConfigProperties {
/**
* Specifies sparkConfigProperties.
* @param sparkConfigProperties Spark configuration file to specify additional properties
* @return the next definition stage
*/
WithCreate withSparkConfigProperties(LibraryRequirements sparkConfigProperties);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SparkEventsFolder.
*/
Expand Down Expand Up @@ -323,13 +374,13 @@ interface WithTags {
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<BigDataPoolResourceInfo>, DefinitionStages.WithAutoPause, DefinitionStages.WithAutoScale, DefinitionStages.WithCreationDate, DefinitionStages.WithDefaultSparkLogFolder, DefinitionStages.WithIsComputeIsolationEnabled, DefinitionStages.WithLibraryRequirements, DefinitionStages.WithNodeCount, DefinitionStages.WithNodeSize, DefinitionStages.WithNodeSizeFamily, DefinitionStages.WithProvisioningState, DefinitionStages.WithSparkEventsFolder, DefinitionStages.WithSparkVersion, DefinitionStages.WithTags {
interface WithCreate extends Creatable<BigDataPoolResourceInfo>, DefinitionStages.WithAutoPause, DefinitionStages.WithAutoScale, DefinitionStages.WithCreationDate, DefinitionStages.WithDefaultSparkLogFolder, DefinitionStages.WithHaveLibraryRequirementsChanged, DefinitionStages.WithIsComputeIsolationEnabled, DefinitionStages.WithLibraryRequirements, DefinitionStages.WithNodeCount, DefinitionStages.WithNodeSize, DefinitionStages.WithNodeSizeFamily, DefinitionStages.WithProvisioningState, DefinitionStages.WithSessionLevelPackagesEnabled, DefinitionStages.WithSparkConfigProperties, DefinitionStages.WithSparkEventsFolder, DefinitionStages.WithSparkVersion, DefinitionStages.WithTags {
}
}
/**
* The template for a BigDataPoolResourceInfo update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<BigDataPoolResourceInfo>, UpdateStages.WithForce, UpdateStages.WithAutoPause, UpdateStages.WithAutoScale, UpdateStages.WithCreationDate, UpdateStages.WithDefaultSparkLogFolder, UpdateStages.WithIsComputeIsolationEnabled, UpdateStages.WithLibraryRequirements, UpdateStages.WithNodeCount, UpdateStages.WithNodeSize, UpdateStages.WithNodeSizeFamily, UpdateStages.WithProvisioningState, UpdateStages.WithSparkEventsFolder, UpdateStages.WithSparkVersion, UpdateStages.WithTags {
interface Update extends Appliable<BigDataPoolResourceInfo>, UpdateStages.WithForce, UpdateStages.WithAutoPause, UpdateStages.WithAutoScale, UpdateStages.WithCreationDate, UpdateStages.WithDefaultSparkLogFolder, UpdateStages.WithHaveLibraryRequirementsChanged, UpdateStages.WithIsComputeIsolationEnabled, UpdateStages.WithLibraryRequirements, UpdateStages.WithNodeCount, UpdateStages.WithNodeSize, UpdateStages.WithNodeSizeFamily, UpdateStages.WithProvisioningState, UpdateStages.WithSessionLevelPackagesEnabled, UpdateStages.WithSparkConfigProperties, UpdateStages.WithSparkEventsFolder, UpdateStages.WithSparkVersion, UpdateStages.WithTags {
}

/**
Expand Down Expand Up @@ -396,6 +447,18 @@ interface WithDefaultSparkLogFolder {
Update withDefaultSparkLogFolder(String defaultSparkLogFolder);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify HaveLibraryRequirementsChanged.
*/
interface WithHaveLibraryRequirementsChanged {
/**
* Specifies haveLibraryRequirementsChanged.
* @param haveLibraryRequirementsChanged Whether library requirements changed
* @return the next update stage
*/
Update withHaveLibraryRequirementsChanged(Boolean haveLibraryRequirementsChanged);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify IsComputeIsolationEnabled.
*/
Expand Down Expand Up @@ -438,7 +501,7 @@ interface WithNodeCount {
interface WithNodeSize {
/**
* Specifies nodeSize.
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge'
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge', 'XXXLarge'
* @return the next update stage
*/
Update withNodeSize(NodeSize nodeSize);
Expand Down Expand Up @@ -468,6 +531,30 @@ interface WithProvisioningState {
Update withProvisioningState(String provisioningState);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SessionLevelPackagesEnabled.
*/
interface WithSessionLevelPackagesEnabled {
/**
* Specifies sessionLevelPackagesEnabled.
* @param sessionLevelPackagesEnabled Whether session level packages enabled
* @return the next update stage
*/
Update withSessionLevelPackagesEnabled(Boolean sessionLevelPackagesEnabled);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SparkConfigProperties.
*/
interface WithSparkConfigProperties {
/**
* Specifies sparkConfigProperties.
* @param sparkConfigProperties Spark configuration file to specify additional properties
* @return the next update stage
*/
Update withSparkConfigProperties(LibraryRequirements sparkConfigProperties);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SparkEventsFolder.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.fasterxml.jackson.annotation.JsonProperty;

/**
* Details of the customer managed key associated with the workspace.
*/
public class CustomerManagedKeyDetails {
/**
* The customer managed key status on the workspace.
*/
@JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY)
private String status;

/**
* The key object of the workspace.
*/
@JsonProperty(value = "key")
private WorkspaceKeyDetails key;

/**
* Get the customer managed key status on the workspace.
*
* @return the status value
*/
public String status() {
return this.status;
}

/**
* Get the key object of the workspace.
*
* @return the key value
*/
public WorkspaceKeyDetails key() {
return this.key;
}

/**
* Set the key object of the workspace.
*
* @param key the key value to set
* @return the CustomerManagedKeyDetails object itself.
*/
public CustomerManagedKeyDetails withKey(WorkspaceKeyDetails key) {
this.key = key;
return this;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;

/**
* Defines values for DataMaskingFunction.
*/
public enum DataMaskingFunction {
/** Enum value Default. */
DEFAULT("Default"),

/** Enum value CCN. */
CCN("CCN"),

/** Enum value Email. */
EMAIL("Email"),

/** Enum value Number. */
NUMBER("Number"),

/** Enum value SSN. */
SSN("SSN"),

/** Enum value Text. */
TEXT("Text");

/** The actual serialized value for a DataMaskingFunction instance. */
private String value;

DataMaskingFunction(String value) {
this.value = value;
}

/**
* Parses a serialized value to a DataMaskingFunction instance.
*
* @param value the serialized value to parse.
* @return the parsed DataMaskingFunction object, or null if unable to parse.
*/
@JsonCreator
public static DataMaskingFunction fromString(String value) {
DataMaskingFunction[] items = DataMaskingFunction.values();
for (DataMaskingFunction item : items) {
if (item.toString().equalsIgnoreCase(value)) {
return item;
}
}
return null;
}

@JsonValue
@Override
public String toString() {
return this.value;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.microsoft.azure.arm.collection.SupportsCreating;
import rx.Observable;
import com.microsoft.azure.management.synapse.v2019_06_01_preview.implementation.DataMaskingPoliciesInner;
import com.microsoft.azure.arm.model.HasInner;

/**
* Type representing DataMaskingPolicies.
*/
public interface DataMaskingPolicies extends SupportsCreating<DataMaskingPolicy.DefinitionStages.Blank>, HasInner<DataMaskingPoliciesInner> {
/**
* Gets a Sql pool data masking policy.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace
* @param sqlPoolName SQL pool name
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<DataMaskingPolicy> getAsync(String resourceGroupName, String workspaceName, String sqlPoolName);

}
Loading