diff --git a/sdk/search/azure-search-documents/README.md b/sdk/search/azure-search-documents/README.md index 0b24ffe91769..41aa64191f9c 100644 --- a/sdk/search/azure-search-documents/README.md +++ b/sdk/search/azure-search-documents/README.md @@ -110,16 +110,16 @@ Create Index using `searchIndexClient` instantiated in [Create a SearchServiceCl ```java -Index newIndex = new Index() +SearchIndex newIndex = new SearchIndex() .setName("index_name") .setFields( - Arrays.asList(new Field() + Arrays.asList(new SearchField() .setName("Name") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE), - new Field() + new SearchField() .setName("Cuisine") - .setType(DataType.EDM_STRING))); + .setType(SearchFieldDataType.STRING))); // Create index. searchServiceClient.createIndex(newIndex); ``` diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/FieldBuilder.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/FieldBuilder.java index 003c6f76388d..a11ce4f0e768 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/FieldBuilder.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/FieldBuilder.java @@ -7,10 +7,10 @@ import com.azure.search.documents.indexes.FieldIgnore; import com.azure.search.documents.indexes.SearchableFieldProperty; import com.azure.search.documents.indexes.SimpleFieldProperty; -import com.azure.search.documents.models.AnalyzerName; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.GeoPoint; +import com.azure.search.documents.models.LexicalAnalyzerName; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; @@ -24,25 +24,25 @@ import java.util.stream.Collectors; /** - * Helper to convert model class to Search {@link Field fields}. + * Helper to convert model class to Search {@link SearchField fields}. */ public final class FieldBuilder { private static final int MAX_DEPTH = 10000; - private static final Map, DataType> SUPPORTED_NONE_PARAMETERIZED_TYPE = new HashMap<>(); + private static final Map, SearchFieldDataType> SUPPORTED_NONE_PARAMETERIZED_TYPE = new HashMap<>(); static { - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Integer.class, DataType.EDM_INT32); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(int.class, DataType.EDM_INT32); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Long.class, DataType.EDM_INT64); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(long.class, DataType.EDM_INT64); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Double.class, DataType.EDM_DOUBLE); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(double.class, DataType.EDM_DOUBLE); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Boolean.class, DataType.EDM_BOOLEAN); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(boolean.class, DataType.EDM_BOOLEAN); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(String.class, DataType.EDM_STRING); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Date.class, DataType.EDM_DATE_TIME_OFFSET); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(OffsetDateTime.class, DataType.EDM_DATE_TIME_OFFSET); - SUPPORTED_NONE_PARAMETERIZED_TYPE.put(GeoPoint.class, DataType.EDM_GEOGRAPHY_POINT); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Integer.class, SearchFieldDataType.INT32); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(int.class, SearchFieldDataType.INT32); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Long.class, SearchFieldDataType.INT64); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(long.class, SearchFieldDataType.INT64); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Double.class, SearchFieldDataType.DOUBLE); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(double.class, SearchFieldDataType.DOUBLE); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Boolean.class, SearchFieldDataType.BOOLEAN); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(boolean.class, SearchFieldDataType.BOOLEAN); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(String.class, SearchFieldDataType.STRING); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(Date.class, SearchFieldDataType.DATE_TIME_OFFSET); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(OffsetDateTime.class, SearchFieldDataType.DATE_TIME_OFFSET); + SUPPORTED_NONE_PARAMETERIZED_TYPE.put(GeoPoint.class, SearchFieldDataType.GEOGRAPHY_POINT); } private static final List> UNSUPPORTED_TYPES = Arrays.asList(Byte.class, @@ -55,13 +55,13 @@ public final class FieldBuilder { short.class); /** - * Creates a collection of {@link Field} objects corresponding to the properties of the type supplied. + * Creates a collection of {@link SearchField} objects corresponding to the properties of the type supplied. * * @param modelClass The class for which fields will be created, based on its properties. * @param The generic type of the model class. * @return A collection of fields. */ - public static List build(Class modelClass) { + public static List build(Class modelClass) { ClientLogger logger = new ClientLogger(FieldBuilder.class); return build(modelClass, new Stack<>(), logger); } @@ -72,9 +72,9 @@ public static List build(Class modelClass) { * @param currentClass Current class to be built. * @param classChain A class chain from {@code modelClass} to prior of {@code currentClass}. * @param logger {@link ClientLogger}. - * @return A list of {@link Field} that currentClass is built to. + * @return A list of {@link SearchField} that currentClass is built to. */ - private static List build(Class currentClass, Stack> classChain, ClientLogger logger) { + private static List build(Class currentClass, Stack> classChain, ClientLogger logger) { if (classChain.contains(currentClass)) { logger.warning(String.format("There is circular dependencies %s, %s", classChain, currentClass)); return null; @@ -84,7 +84,7 @@ private static List build(Class currentClass, Stack> classCha "The dependency graph is too deep. Please review your schema.")); } classChain.push(currentClass); - List searchFields = Arrays.stream(currentClass.getDeclaredFields()) + List searchFields = Arrays.stream(currentClass.getDeclaredFields()) .filter(classField -> !classField.isAnnotationPresent(FieldIgnore.class)) .map(classField -> buildField(classField, classChain, logger)) .collect(Collectors.toList()); @@ -92,7 +92,7 @@ private static List build(Class currentClass, Stack> classCha return searchFields; } - private static Field buildField(java.lang.reflect.Field classField, Stack> classChain, + private static SearchField buildField(java.lang.reflect.Field classField, Stack> classChain, ClientLogger logger) { Type type = classField.getGenericType(); @@ -102,15 +102,15 @@ private static Field buildField(java.lang.reflect.Field classField, Stack childFields = build((Class) type, classChain, logger); - Field searchField = convertToBasicSearchField(classField, logger); + List childFields = build((Class) type, classChain, logger); + SearchField searchField = convertToBasicSearchField(classField, logger); searchField.setFields(childFields); return searchField; } - private static Field buildNoneParameterizedType(java.lang.reflect.Field classField, + private static SearchField buildNoneParameterizedType(java.lang.reflect.Field classField, ClientLogger logger) { - Field searchField = convertToBasicSearchField(classField, logger); + SearchField searchField = convertToBasicSearchField(classField, logger); return enrichWithAnnotation(searchField, classField, logger); } @@ -128,16 +128,16 @@ private static boolean isList(Type type) { return List.class.isAssignableFrom((Class) rawType); } - private static Field buildCollectionField(java.lang.reflect.Field classField, + private static SearchField buildCollectionField(java.lang.reflect.Field classField, Stack> classChain, ClientLogger logger) { Type componentOrElementType = getComponentOrElementType(classField.getGenericType(), logger); validateType(componentOrElementType, true, logger); if (SUPPORTED_NONE_PARAMETERIZED_TYPE.containsKey(componentOrElementType)) { - Field searchField = convertToBasicSearchField(classField, logger); + SearchField searchField = convertToBasicSearchField(classField, logger); return enrichWithAnnotation(searchField, classField, logger); } - List childFields = build((Class) componentOrElementType, classChain, logger); - Field searchField = convertToBasicSearchField(classField, logger); + List childFields = build((Class) componentOrElementType, classChain, logger); + SearchField searchField = convertToBasicSearchField(classField, logger); searchField.setFields(childFields); return searchField; } @@ -154,11 +154,11 @@ private static Type getComponentOrElementType(Type arrayOrListType, ClientLogger "Collection type %s is not supported.", arrayOrListType.getTypeName()))); } - private static Field convertToBasicSearchField(java.lang.reflect.Field classField, + private static SearchField convertToBasicSearchField(java.lang.reflect.Field classField, ClientLogger logger) { - Field searchField = new Field(); + SearchField searchField = new SearchField(); searchField.setName(classField.getName()); - DataType dataType = covertToDataType(classField.getGenericType(), false, logger); + SearchFieldDataType dataType = covertToSearchFieldDataType(classField.getGenericType(), false, logger); searchField.setType(dataType) .setKey(false) .setSearchable(false) @@ -169,7 +169,7 @@ private static Field convertToBasicSearchField(java.lang.reflect.Field classFiel return searchField; } - private static Field enrichWithAnnotation(Field searchField, java.lang.reflect.Field classField, + private static SearchField enrichWithAnnotation(SearchField searchField, java.lang.reflect.Field classField, ClientLogger logger) { if (classField.isAnnotationPresent(SimpleFieldProperty.class) && classField.isAnnotationPresent(SearchableFieldProperty.class)) { @@ -187,8 +187,8 @@ private static Field enrichWithAnnotation(Field searchField, java.lang.reflect.F .setKey(simpleFieldPropertyAnnotation.isKey()) .setHidden(simpleFieldPropertyAnnotation.isHidden()); } else if (classField.isAnnotationPresent(SearchableFieldProperty.class)) { - if (!searchField.getType().equals(DataType.EDM_STRING) - && !searchField.getType().equals(DataType.collection(DataType.EDM_STRING))) { + if (!searchField.getType().equals(SearchFieldDataType.STRING) + && !searchField.getType().equals(SearchFieldDataType.collection(SearchFieldDataType.STRING))) { throw logger.logExceptionAsError(new RuntimeException(String.format("SearchFieldProperty can only" + " be used on string properties. Property %s returns a %s value.", classField.getName(), searchField.getType()))); @@ -209,13 +209,15 @@ private static Field enrichWithAnnotation(Field searchField, java.lang.reflect.F "Please specify either analyzer or both searchAnalyzer and indexAnalyzer.")); } if (!searchableFieldPropertyAnnotation.analyzer().isEmpty()) { - searchField.setAnalyzer(AnalyzerName.fromString((searchableFieldPropertyAnnotation.analyzer()))); + searchField.setAnalyzer(LexicalAnalyzerName.fromString((searchableFieldPropertyAnnotation.analyzer()))); } if (!searchableFieldPropertyAnnotation.searchAnalyzer().isEmpty()) { - searchField.setAnalyzer(AnalyzerName.fromString((searchableFieldPropertyAnnotation.searchAnalyzer()))); + searchField.setAnalyzer(LexicalAnalyzerName.fromString( + (searchableFieldPropertyAnnotation.searchAnalyzer()))); } if (!searchableFieldPropertyAnnotation.indexAnalyzer().isEmpty()) { - searchField.setAnalyzer(AnalyzerName.fromString((searchableFieldPropertyAnnotation.indexAnalyzer()))); + searchField.setAnalyzer(LexicalAnalyzerName.fromString( + (searchableFieldPropertyAnnotation.indexAnalyzer()))); } if (searchableFieldPropertyAnnotation.synonymMaps().length != 0) { List synonymMaps = Arrays.stream(searchableFieldPropertyAnnotation.synonymMaps()) @@ -248,15 +250,16 @@ private static void validateType(Type type, boolean hasArrayOrCollectionWrapped, } } - private static DataType covertToDataType(Type type, boolean hasArrayOrCollectionWrapped, ClientLogger logger) { + private static SearchFieldDataType covertToSearchFieldDataType(Type type, boolean hasArrayOrCollectionWrapped, + ClientLogger logger) { validateType(type, hasArrayOrCollectionWrapped, logger); if (SUPPORTED_NONE_PARAMETERIZED_TYPE.containsKey(type)) { return SUPPORTED_NONE_PARAMETERIZED_TYPE.get(type); } if (isArrayOrList(type)) { Type componentOrElementType = getComponentOrElementType(type, logger); - return DataType.collection(covertToDataType(componentOrElementType, true, logger)); + return SearchFieldDataType.collection(covertToSearchFieldDataType(componentOrElementType, true, logger)); } - return DataType.EDM_COMPLEX_TYPE; + return SearchFieldDataType.COMPLEX; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexAsyncClient.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexAsyncClient.java index cdb623ec66f2..d145f591496f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexAsyncClient.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexAsyncClient.java @@ -12,24 +12,24 @@ import com.azure.core.util.serializer.JacksonAdapter; import com.azure.core.util.serializer.SerializerAdapter; import com.azure.search.documents.implementation.SearchIndexRestClientImpl; +import com.azure.search.documents.implementation.models.AutocompleteRequest; import com.azure.search.documents.implementation.models.SearchContinuationToken; +import com.azure.search.documents.implementation.models.SearchRequest; +import com.azure.search.documents.implementation.models.SuggestRequest; import com.azure.search.documents.implementation.util.DocumentResponseConversions; import com.azure.search.documents.implementation.util.SuggestOptionsHandler; -import com.azure.search.documents.models.IndexBatchException; -import com.azure.search.documents.models.SearchRequest; import com.azure.search.documents.implementation.SearchIndexRestClientBuilder; import com.azure.search.documents.implementation.SerializationUtil; import com.azure.search.documents.models.AutocompleteOptions; -import com.azure.search.documents.models.AutocompleteRequest; import com.azure.search.documents.models.IndexAction; import com.azure.search.documents.models.IndexActionType; +import com.azure.search.documents.models.IndexBatchException; import com.azure.search.documents.models.IndexDocumentsBatch; import com.azure.search.documents.models.IndexDocumentsResult; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchOptions; import com.azure.search.documents.models.SearchResult; import com.azure.search.documents.models.SuggestOptions; -import com.azure.search.documents.models.SuggestRequest; import com.azure.search.documents.models.SuggestResult; import com.azure.search.documents.util.AutocompletePagedFlux; import com.azure.search.documents.util.AutocompletePagedResponse; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/DataSources.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexerDataSources.java similarity index 72% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/DataSources.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexerDataSources.java index fa47cee673cb..4bf962ca467f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/DataSources.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchIndexerDataSources.java @@ -5,20 +5,20 @@ import com.azure.core.util.CoreUtils; import com.azure.search.documents.models.DataChangeDetectionPolicy; -import com.azure.search.documents.models.DataContainer; import com.azure.search.documents.models.DataDeletionDetectionPolicy; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.DataSourceCredentials; -import com.azure.search.documents.models.DataSourceType; import com.azure.search.documents.models.HighWaterMarkChangeDetectionPolicy; +import com.azure.search.documents.models.SearchIndexerDataContainer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerDataSourceType; /** - * Utility class that aids in the creation of {@link DataSource DataSources}. + * Utility class that aids in the creation of {@link SearchIndexerDataSource SearchIndexerDataSources}. */ -public final class DataSources { +public final class SearchIndexerDataSources { /** - * Creates a new {@link DataSource} to connect to an Azure SQL database. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure SQL database. * * @param dataSourceName The name of the data source. * @param sqlConnectionString The connection string for the Azure SQL database. @@ -27,11 +27,11 @@ public final class DataSources { * @param changeDetectionPolicy The change detection policy for the data source. Note that only high watermark * change detection is allowed for Azure SQL when deletion detection is enabled. * @param deletionDetectionPolicy Optional. The data deletion detection policy for the data source. - * @return A new Azure SQL {@link DataSource} instance. + * @return A new Azure SQL {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code sqlConnectionString}, or {@code * tableOrViewName} is null or empty. */ - public static DataSource createFromAzureSql(String dataSourceName, String sqlConnectionString, + public static SearchIndexerDataSource createFromAzureSql(String dataSourceName, String sqlConnectionString, String tableOrViewName, String description, DataChangeDetectionPolicy changeDetectionPolicy, DataDeletionDetectionPolicy deletionDetectionPolicy) { if (CoreUtils.isNullOrEmpty(dataSourceName)) { @@ -44,27 +44,28 @@ public static DataSource createFromAzureSql(String dataSourceName, String sqlCon throw new IllegalArgumentException("'tableOrViewName' cannot be null or empty."); } - return createDataSource(dataSourceName, DataSourceType.AZURE_SQL, sqlConnectionString, tableOrViewName, null, + return createSearchIndexerDataSource(dataSourceName, SearchIndexerDataSourceType.AZURE_SQL, sqlConnectionString, + tableOrViewName, null, description, changeDetectionPolicy, deletionDetectionPolicy); } /** - * Creates a new {@link DataSource} to connect to an Azure SQL database. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure SQL database. * * @param dataSourceName The name of the data source. * @param sqlConnectionString The connection string for the Azure SQL database. * @param tableOrViewName The name of the table or view from which to read rows. - * @return A new Azure SQL {@link DataSource} instance. + * @return A new Azure SQL {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code sqlConnectionString}, or {@code * tableOrViewName} is null or empty. */ - public static DataSource createFromAzureSql(String dataSourceName, String sqlConnectionString, + public static SearchIndexerDataSource createFromAzureSql(String dataSourceName, String sqlConnectionString, String tableOrViewName) { return createFromAzureSql(dataSourceName, sqlConnectionString, tableOrViewName, null, null, null); } /** - * Creates a new {@link DataSource} to connect to an Azure Blob container. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure Blob container. * * @param dataSourceName The name of the data source. * @param storageConnectionString The connection string for the Azure Storage account. The Storage connection string @@ -78,12 +79,12 @@ public static DataSource createFromAzureSql(String dataSourceName, String sqlCon * is useful when blobs are organized into "virtual folders". * @param description Optional. Description of the data source * @param deletionDetectionPolicy Optional. The data deletion detection policy for the data source - * @return A new Azure Blob {@link DataSource} instance. + * @return A new Azure Blob {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code containerName} or {@code * storageConnectionString} is null or empty. */ - public static DataSource createFromAzureBlobStorage(String dataSourceName, String storageConnectionString, - String containerName, String pathPrefix, String description, + public static SearchIndexerDataSource createFromAzureBlobStorage(String dataSourceName, + String storageConnectionString, String containerName, String pathPrefix, String description, DataDeletionDetectionPolicy deletionDetectionPolicy) { if (CoreUtils.isNullOrEmpty(dataSourceName)) { throw new IllegalArgumentException("'dataSourceName' cannot be null or empty."); @@ -95,12 +96,12 @@ public static DataSource createFromAzureBlobStorage(String dataSourceName, Strin throw new IllegalArgumentException("'containerName' cannot be null or empty."); } - return createDataSource(dataSourceName, DataSourceType.AZURE_BLOB, storageConnectionString, containerName, - pathPrefix, description, null, deletionDetectionPolicy); + return createSearchIndexerDataSource(dataSourceName, SearchIndexerDataSourceType.AZURE_BLOB, + storageConnectionString, containerName, pathPrefix, description, null, deletionDetectionPolicy); } /** - * Creates a new {@link DataSource} to connect to an Azure Blob container. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure Blob container. * * @param dataSourceName The name of the data source. * @param storageConnectionString The connection string for the Azure Storage account. The Storage connection string @@ -110,17 +111,17 @@ public static DataSource createFromAzureBlobStorage(String dataSourceName, Strin *

* Note: The connection string must use HTTPS. * @param containerName The name of the container from which to read blobs. - * @return A new Azure Blob {@link DataSource} instance. + * @return A new Azure Blob {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code containerName} or {@code * storageConnectionString} is null or empty. */ - public static DataSource createFromAzureBlobStorage(String dataSourceName, String storageConnectionString, - String containerName) { + public static SearchIndexerDataSource createFromAzureBlobStorage(String dataSourceName, + String storageConnectionString, String containerName) { return createFromAzureBlobStorage(dataSourceName, storageConnectionString, containerName, null, null, null); } /** - * Creates a new {@link DataSource} to connect to an Azure Table. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure Table. * * @param dataSourceName The name of the data source. * @param storageConnectionString The connection string for the Azure Storage account. The Storage connection string @@ -133,12 +134,12 @@ public static DataSource createFromAzureBlobStorage(String dataSourceName, Strin * @param query Optional. A query that is applied to the table when reading rows. * @param description Optional. Description of the data source * @param deletionDetectionPolicy Optional. The data deletion detection policy for the data source. - * @return A new Azure Table {@link DataSource} instance. + * @return A new Azure Table {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code tableName}, or {@code storageConnectionString} * is null or empty. */ - public static DataSource createFromAzureTableStorage(String dataSourceName, String storageConnectionString, - String tableName, String query, String description, + public static SearchIndexerDataSource createFromAzureTableStorage(String dataSourceName, + String storageConnectionString, String tableName, String query, String description, DataDeletionDetectionPolicy deletionDetectionPolicy) { if (CoreUtils.isNullOrEmpty(dataSourceName)) { throw new IllegalArgumentException("'dataSourceName' cannot be null or empty."); @@ -150,12 +151,12 @@ public static DataSource createFromAzureTableStorage(String dataSourceName, Stri throw new IllegalArgumentException("'storageConnectionString' cannot be null or empty."); } - return createDataSource(dataSourceName, DataSourceType.AZURE_TABLE, storageConnectionString, tableName, query, - description, null, deletionDetectionPolicy); + return createSearchIndexerDataSource(dataSourceName, SearchIndexerDataSourceType.AZURE_TABLE, + storageConnectionString, tableName, query, description, null, deletionDetectionPolicy); } /** - * Creates a new {@link DataSource} to connect to an Azure Table. + * Creates a new {@link SearchIndexerDataSource} to connect to an Azure Table. * * @param dataSourceName The name of the data source. * @param storageConnectionString The connection string for the Azure Storage account. The Storage connection string @@ -165,17 +166,17 @@ public static DataSource createFromAzureTableStorage(String dataSourceName, Stri *

* Note: The connection string must use HTTPS. * @param tableName The name of the Azure table from which to read rows. - * @return A new Azure Table {@link DataSource} instance. + * @return A new Azure Table {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code tableName}, or {@code storageConnectionString} * is null or empty. */ - public static DataSource createFromAzureTableStorage(String dataSourceName, String storageConnectionString, - String tableName) { + public static SearchIndexerDataSource createFromAzureTableStorage(String dataSourceName, + String storageConnectionString, String tableName) { return createFromAzureTableStorage(dataSourceName, storageConnectionString, tableName, null, null, null); } /** - * Creates a new {@link DataSource} to connect to a Cosmos database. + * Creates a new {@link SearchIndexerDataSource} to connect to a Cosmos database. * * @param dataSourceName The name of the data source. * @param cosmosConnectionString The connection string for the Cosmos database. It must follow this format: @@ -187,11 +188,11 @@ public static DataSource createFromAzureTableStorage(String dataSourceName, Stri * @param useChangeDetection Optional. Indicates whether to use change detection when indexing. Default is true. * @param description Optional. Description of the data source * @param deletionDetectionPolicy Optional. The data deletion detection policy for the data source. - * @return A new Cosmos {@link DataSource} instance. + * @return A new Cosmos {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code collectionName}, or {@code * cosmosConnectionString} is null or empty. */ - public static DataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, + public static SearchIndexerDataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, String collectionName, String query, Boolean useChangeDetection, String description, DataDeletionDetectionPolicy deletionDetectionPolicy) { if (CoreUtils.isNullOrEmpty(dataSourceName)) { @@ -208,12 +209,13 @@ public static DataSource createFromCosmos(String dataSourceName, String cosmosCo ? new HighWaterMarkChangeDetectionPolicy().setHighWaterMarkColumnName("_ts") : null; - return createDataSource(dataSourceName, DataSourceType.COSMOS, cosmosConnectionString, collectionName, query, + return createSearchIndexerDataSource(dataSourceName, SearchIndexerDataSourceType.COSMOS_DB, + cosmosConnectionString, collectionName, query, description, changeDetectionPolicy, deletionDetectionPolicy); } /** - * Creates a new {@link DataSource} to connect to a Cosmos database. + * Creates a new {@link SearchIndexerDataSource} to connect to a Cosmos database. * * @param dataSourceName The name of the data source. * @param cosmosConnectionString The connection string for the Cosmos database. It must follow this format: @@ -222,18 +224,18 @@ public static DataSource createFromCosmos(String dataSourceName, String cosmosCo * database name]"} * @param collectionName The name of the collection from which to read documents * @param useChangeDetection Optional. Indicates whether to use change detection when indexing. Default is true. - * @return A new Cosmos {@link DataSource} instance. + * @return A new Cosmos {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code collectionName}, or {@code * cosmosConnectionString} is null or empty. */ - public static DataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, + public static SearchIndexerDataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, String collectionName, Boolean useChangeDetection) { return createFromCosmos(dataSourceName, cosmosConnectionString, collectionName, null, useChangeDetection, null, null); } /** - * Creates a new {@link DataSource} to connect to a Cosmos database with change detection set to true. + * Creates a new {@link SearchIndexerDataSource} to connect to a Cosmos database with change detection set to true. * * @param dataSourceName The name of the data source. * @param cosmosConnectionString The connection string for the Cosmos database. It must follow this format: @@ -241,31 +243,31 @@ public static DataSource createFromCosmos(String dataSourceName, String cosmosCo * {@code AccountName|AccountEndpoint=[your account name or endpoint]; AccountKey=[your account key];Database=[your * database name]"} * @param collectionName The name of the collection from which to read documents - * @return A new Cosmos {@link DataSource} instance. + * @return A new Cosmos {@link SearchIndexerDataSource} instance. * @throws IllegalArgumentException If {@code dataSourceName}, {@code collectionName}, or {@code * cosmosConnectionString} is null or empty. */ - public static DataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, + public static SearchIndexerDataSource createFromCosmos(String dataSourceName, String cosmosConnectionString, String collectionName) { return createFromCosmos(dataSourceName, cosmosConnectionString, collectionName, null, true, null, null); } /* - * Helper method that creates a generic DataSource. + * Helper method that creates a generic SearchIndexerDataSource. */ - private static DataSource createDataSource(String name, DataSourceType type, String connectionString, - String dataSourceName, String dataSourceQuery, String description, + private static SearchIndexerDataSource createSearchIndexerDataSource(String name, SearchIndexerDataSourceType type, + String connectionString, String dataSourceName, String dataSourceQuery, String description, DataChangeDetectionPolicy dataChangeDetectionPolicy, DataDeletionDetectionPolicy dataDeletionDetectionPolicy) { - return new DataSource() + return new SearchIndexerDataSource() .setName(name) .setType(type) .setCredentials(new DataSourceCredentials().setConnectionString(connectionString)) - .setContainer(new DataContainer().setName(dataSourceName).setQuery(dataSourceQuery)) + .setContainer(new SearchIndexerDataContainer().setName(dataSourceName).setQuery(dataSourceQuery)) .setDescription(description) .setDataChangeDetectionPolicy(dataChangeDetectionPolicy) .setDataDeletionDetectionPolicy(dataDeletionDetectionPolicy); } - private DataSources() { + private SearchIndexerDataSources() { } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceAsyncClient.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceAsyncClient.java index f608b96138b4..cb21634ea065 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceAsyncClient.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceAsyncClient.java @@ -11,19 +11,19 @@ import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; -import com.azure.search.documents.models.AnalyzeRequest; import com.azure.search.documents.implementation.SearchServiceRestClientBuilder; import com.azure.search.documents.implementation.SearchServiceRestClientImpl; -import com.azure.search.documents.models.DataSource; +import com.azure.search.documents.models.AnalyzeRequest; +import com.azure.search.documents.models.AnalyzedTokenInfo; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.Index; -import com.azure.search.documents.models.Indexer; -import com.azure.search.documents.models.IndexerExecutionInfo; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerSkillset; +import com.azure.search.documents.models.SearchIndexerStatus; import com.azure.search.documents.models.ServiceStatistics; -import com.azure.search.documents.models.Skillset; import com.azure.search.documents.models.SynonymMap; -import com.azure.search.documents.models.TokenInfo; import reactor.core.publisher.Mono; import java.util.Objects; @@ -117,30 +117,30 @@ public String getEndpoint() { /** * Creates a new Azure Cognitive Search data source or updates a data source if it already exists. * - * @param dataSource The definition of the {@link DataSource} to create or update. + * @param dataSource The definition of the {@link SearchIndexerDataSource} to create or update. * @return the data source that was created or updated. */ - public Mono createOrUpdateDataSource(DataSource dataSource) { + public Mono createOrUpdateDataSource(SearchIndexerDataSource dataSource) { return createOrUpdateDataSourceWithResponse(dataSource, false, null).map(Response::getValue); } /** * Creates a new Azure Cognitive Search data source or updates a data source if it already exists. * - * @param dataSource The definition of the {@link DataSource} to create or update. + * @param dataSource The definition of the {@link SearchIndexerDataSource} to create or update. * @param onlyIfUnchanged {@code true} to update if the {@code dataSource} is the same as the current service value. * {@code false} to always update existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @return a data source response. */ - public Mono> createOrUpdateDataSourceWithResponse(DataSource dataSource, - boolean onlyIfUnchanged, RequestOptions requestOptions) { + public Mono> createOrUpdateDataSourceWithResponse( + SearchIndexerDataSource dataSource, boolean onlyIfUnchanged, RequestOptions requestOptions) { return withContext(context -> createOrUpdateDataSourceWithResponse(dataSource, onlyIfUnchanged, requestOptions, context)); } - Mono> createOrUpdateDataSourceWithResponse(DataSource dataSource, + Mono> createOrUpdateDataSourceWithResponse(SearchIndexerDataSource dataSource, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { Objects.requireNonNull(dataSource, "'DataSource' cannot be null."); String ifMatch = onlyIfUnchanged ? dataSource.getETag() : null; @@ -161,25 +161,25 @@ Mono> createOrUpdateDataSourceWithResponse(DataSource dataS * @param dataSource The definition of the dataSource to create. * @return a Mono which performs the network request upon subscription. */ - public Mono createDataSource(DataSource dataSource) { + public Mono createDataSource(SearchIndexerDataSource dataSource) { return createDataSourceWithResponse(dataSource, null).map(Response::getValue); } /** * Creates a new Azure Cognitive Search data source * - * @param dataSource The definition of the {@link DataSource} to create. + * @param dataSource The definition of the {@link SearchIndexerDataSource} to create. * @param requestOptions Additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging. * @return a Mono which performs the network request upon subscription. */ - public Mono> createDataSourceWithResponse(DataSource dataSource, + public Mono> createDataSourceWithResponse(SearchIndexerDataSource dataSource, RequestOptions requestOptions) { return withContext(context -> this.createDataSourceWithResponse(dataSource, requestOptions, context)); } - Mono> createDataSourceWithResponse(DataSource dataSource, RequestOptions requestOptions, - Context context) { + Mono> createDataSourceWithResponse(SearchIndexerDataSource dataSource, + RequestOptions requestOptions, Context context) { try { return restClient.dataSources() .createWithRestResponseAsync(dataSource, requestOptions, context) @@ -192,27 +192,28 @@ Mono> createDataSourceWithResponse(DataSource dataSource, R /** * Retrieves a DataSource from an Azure Cognitive Search service. * - * @param dataSourceName the name of the {@link DataSource} to retrieve. + * @param dataSourceName the name of the {@link SearchIndexerDataSource} to retrieve. * @return the DataSource. */ - public Mono getDataSource(String dataSourceName) { + public Mono getDataSource(String dataSourceName) { return getDataSourceWithResponse(dataSourceName, null).map(Response::getValue); } /** * Retrieves a DataSource from an Azure Cognitive Search service. * - * @param dataSourceName the name of the {@link DataSource} to retrieve. + * @param dataSourceName the name of the {@link SearchIndexerDataSource} to retrieve. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging. * @return a response containing the DataSource. */ - public Mono> getDataSourceWithResponse(String dataSourceName, RequestOptions requestOptions) { + public Mono> getDataSourceWithResponse(String dataSourceName, + RequestOptions requestOptions) { return withContext(context -> getDataSourceWithResponse(dataSourceName, requestOptions, context)); } - Mono> getDataSourceWithResponse(String dataSourceName, RequestOptions requestOptions, - Context context) { + Mono> getDataSourceWithResponse(String dataSourceName, + RequestOptions requestOptions, Context context) { try { return restClient.dataSources() .getWithRestResponseAsync(dataSourceName, requestOptions, context) @@ -227,7 +228,7 @@ Mono> getDataSourceWithResponse(String dataSourceName, Requ * * @return a list of DataSources */ - public PagedFlux listDataSources() { + public PagedFlux listDataSources() { return listDataSources(null, null); } @@ -240,7 +241,7 @@ public PagedFlux listDataSources() { * help with debugging. * @return a list of DataSources */ - public PagedFlux listDataSources(String select, RequestOptions requestOptions) { + public PagedFlux listDataSources(String select, RequestOptions requestOptions) { try { return new PagedFlux<>(() -> withContext(context -> this.listDataSourcesWithResponse(select, requestOptions, context))); @@ -249,7 +250,7 @@ public PagedFlux listDataSources(String select, RequestOptions reque } } - PagedFlux listDataSources(String select, RequestOptions requestOptions, Context context) { + PagedFlux listDataSources(String select, RequestOptions requestOptions, Context context) { try { return new PagedFlux<>(() -> this.listDataSourcesWithResponse(select, requestOptions, context)); } catch (RuntimeException ex) { @@ -257,8 +258,8 @@ PagedFlux listDataSources(String select, RequestOptions requestOptio } } - private Mono> listDataSourcesWithResponse(String select, RequestOptions requestOptions, - Context context) { + private Mono> listDataSourcesWithResponse(String select, + RequestOptions requestOptions, Context context) { return restClient.dataSources() .listWithRestResponseAsync(select, requestOptions, context) .map(response -> new PagedResponseBase<>( @@ -273,7 +274,7 @@ private Mono> listDataSourcesWithResponse(String selec /** * Delete a DataSource * - * @param dataSourceName the name of the {@link DataSource} for deletion + * @param dataSourceName the name of the {@link SearchIndexerDataSource} for deletion * @return a void Mono */ public Mono deleteDataSource(String dataSourceName) { @@ -284,15 +285,15 @@ public Mono deleteDataSource(String dataSourceName) { /** * Deletes an Azure Cognitive Search data source. * - * @param dataSource The {@link DataSource} to delete. + * @param dataSource The {@link SearchIndexerDataSource} to delete. * @param onlyIfUnchanged {@code true} to delete if the {@code dataSource} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @return a mono response */ - public Mono> deleteDataSourceWithResponse(DataSource dataSource, boolean onlyIfUnchanged, - RequestOptions requestOptions) { + public Mono> deleteDataSourceWithResponse(SearchIndexerDataSource dataSource, + boolean onlyIfUnchanged, RequestOptions requestOptions) { Objects.requireNonNull(dataSource, "'DataSource' cannot be null"); String etag = onlyIfUnchanged ? dataSource.getETag() : null; return withContext(context -> @@ -319,7 +320,7 @@ Mono> deleteDataSourceWithResponse(String dataSourceName, String * @param indexer definition of the indexer to create. * @return the created Indexer. */ - public Mono createIndexer(Indexer indexer) { + public Mono createIndexer(SearchIndexer indexer) { return createIndexerWithResponse(indexer, null).map(Response::getValue); } @@ -331,11 +332,13 @@ public Mono createIndexer(Indexer indexer) { * help with debugging * @return a response containing the created Indexer. */ - public Mono> createIndexerWithResponse(Indexer indexer, RequestOptions requestOptions) { + public Mono> createIndexerWithResponse(SearchIndexer indexer, + RequestOptions requestOptions) { return withContext(context -> createIndexerWithResponse(indexer, requestOptions, context)); } - Mono> createIndexerWithResponse(Indexer indexer, RequestOptions requestOptions, Context context) { + Mono> createIndexerWithResponse(SearchIndexer indexer, RequestOptions requestOptions, + Context context) { try { return restClient.indexers() .createWithRestResponseAsync(indexer, requestOptions, context) @@ -351,27 +354,27 @@ Mono> createIndexerWithResponse(Indexer indexer, RequestOption * @param indexer The definition of the indexer to create or update. * @return a response containing the created Indexer. */ - public Mono createOrUpdateIndexer(Indexer indexer) { + public Mono createOrUpdateIndexer(SearchIndexer indexer) { return createOrUpdateIndexerWithResponse(indexer, false, null).map(Response::getValue); } /** * Creates a new Azure Cognitive Search indexer or updates an indexer if it already exists. * - * @param indexer the definition of the {@link Indexer} to create or update + * @param indexer the definition of the {@link SearchIndexer} to create or update * @param onlyIfUnchanged {@code true} to update if the {@code indexer} is the same as the current service value. * {@code false} to always update existing value. * @param requestOptions additional parameters for the operation Contains the tracking ID sent with the request to * help with debugging * @return a response containing the created Indexer. */ - public Mono> createOrUpdateIndexerWithResponse(Indexer indexer, boolean onlyIfUnchanged, - RequestOptions requestOptions) { + public Mono> createOrUpdateIndexerWithResponse(SearchIndexer indexer, + boolean onlyIfUnchanged, RequestOptions requestOptions) { return withContext(context -> createOrUpdateIndexerWithResponse(indexer, onlyIfUnchanged, requestOptions, context)); } - Mono> createOrUpdateIndexerWithResponse(Indexer indexer, boolean onlyIfUnchanged, + Mono> createOrUpdateIndexerWithResponse(SearchIndexer indexer, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { Objects.requireNonNull(indexer, "'Indexer' cannot be 'null'"); String ifMatch = onlyIfUnchanged ? indexer.getETag() : null; @@ -391,7 +394,7 @@ Mono> createOrUpdateIndexerWithResponse(Indexer indexer, boole * @param indexerName the name of the indexer to retrieve * @return the indexer. */ - public Mono getIndexer(String indexerName) { + public Mono getIndexer(String indexerName) { return getIndexerWithResponse(indexerName, null).map(Response::getValue); } @@ -403,11 +406,12 @@ public Mono getIndexer(String indexerName) { * help with debugging * @return a response containing the indexer. */ - public Mono> getIndexerWithResponse(String indexerName, RequestOptions requestOptions) { + public Mono> getIndexerWithResponse(String indexerName, RequestOptions requestOptions) { return withContext(context -> getIndexerWithResponse(indexerName, requestOptions, context)); } - Mono> getIndexerWithResponse(String indexerName, RequestOptions requestOptions, Context context) { + Mono> getIndexerWithResponse(String indexerName, RequestOptions requestOptions, + Context context) { try { return restClient.indexers() .getWithRestResponseAsync(indexerName, requestOptions, context) @@ -420,7 +424,7 @@ Mono> getIndexerWithResponse(String indexerName, RequestOption /** * @return all Indexers from the Search service. */ - public PagedFlux listIndexers() { + public PagedFlux listIndexers() { return listIndexers(null, null); } @@ -432,7 +436,7 @@ public PagedFlux listIndexers() { * @param requestOptions Additional parameters for the operation. * @return a response containing all Indexers from the Search service. */ - public PagedFlux listIndexers(String select, RequestOptions requestOptions) { + public PagedFlux listIndexers(String select, RequestOptions requestOptions) { try { return new PagedFlux<>(() -> withContext(context -> this.listIndexersWithResponse(select, requestOptions, context))); @@ -441,7 +445,7 @@ public PagedFlux listIndexers(String select, RequestOptions requestOpti } } - PagedFlux listIndexers(String select, RequestOptions requestOptions, Context context) { + PagedFlux listIndexers(String select, RequestOptions requestOptions, Context context) { try { return new PagedFlux<>(() -> this.listIndexersWithResponse(select, requestOptions, context)); } catch (RuntimeException ex) { @@ -449,7 +453,7 @@ PagedFlux listIndexers(String select, RequestOptions requestOptions, Co } } - private Mono> listIndexersWithResponse(String select, RequestOptions requestOptions, + private Mono> listIndexersWithResponse(String select, RequestOptions requestOptions, Context context) { return restClient.indexers() .listWithRestResponseAsync(select, requestOptions, context) @@ -476,14 +480,14 @@ public Mono deleteIndexer(String indexerName) { /** * Deletes an Azure Cognitive Search indexer. * - * @param indexer the {@link Indexer} to delete + * @param indexer the {@link SearchIndexer} to delete * @param onlyIfUnchanged {@code true} to delete if the {@code indexer} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @return a response signalling completion. */ - public Mono> deleteIndexerWithResponse(Indexer indexer, boolean onlyIfUnchanged, + public Mono> deleteIndexerWithResponse(SearchIndexer indexer, boolean onlyIfUnchanged, RequestOptions requestOptions) { Objects.requireNonNull(indexer, "'Indexer' cannot be null"); String etag = onlyIfUnchanged ? indexer.getETag() : null; @@ -580,7 +584,7 @@ Mono> runIndexerWithResponse(String indexerName, RequestOptions r * @param indexerName the name of the indexer for which to retrieve status * @return the indexer execution info. */ - public Mono getIndexerStatus(String indexerName) { + public Mono getIndexerStatus(String indexerName) { return getIndexerStatusWithResponse(indexerName, null).map(Response::getValue); } @@ -592,12 +596,12 @@ public Mono getIndexerStatus(String indexerName) { * help with debugging * @return a response with the indexer execution info. */ - public Mono> getIndexerStatusWithResponse(String indexerName, + public Mono> getIndexerStatusWithResponse(String indexerName, RequestOptions requestOptions) { return withContext(context -> getIndexerStatusWithResponse(indexerName, requestOptions, context)); } - Mono> getIndexerStatusWithResponse(String indexerName, RequestOptions requestOptions, + Mono> getIndexerStatusWithResponse(String indexerName, RequestOptions requestOptions, Context context) { try { return restClient.indexers() @@ -614,7 +618,7 @@ Mono> getIndexerStatusWithResponse(String indexer * @param index definition of the index to create. * @return the created Index. */ - public Mono createIndex(Index index) { + public Mono createIndex(SearchIndex index) { return createIndexWithResponse(index, null).map(Response::getValue); } @@ -626,11 +630,12 @@ public Mono createIndex(Index index) { * help with debugging * @return a response containing the created Index. */ - public Mono> createIndexWithResponse(Index index, RequestOptions requestOptions) { + public Mono> createIndexWithResponse(SearchIndex index, RequestOptions requestOptions) { return withContext(context -> createIndexWithResponse(index, requestOptions, context)); } - Mono> createIndexWithResponse(Index index, RequestOptions requestOptions, Context context) { + Mono> createIndexWithResponse(SearchIndex index, RequestOptions requestOptions, + Context context) { Objects.requireNonNull(index, "'Index' cannot be null"); try { return restClient.indexes() @@ -647,7 +652,7 @@ Mono> createIndexWithResponse(Index index, RequestOptions reques * @param indexName The name of the index to retrieve * @return the Index. */ - public Mono getIndex(String indexName) { + public Mono getIndex(String indexName) { return getIndexWithResponse(indexName, null).map(Response::getValue); } @@ -659,11 +664,11 @@ public Mono getIndex(String indexName) { * help with debugging * @return a response containing the Index. */ - public Mono> getIndexWithResponse(String indexName, RequestOptions requestOptions) { + public Mono> getIndexWithResponse(String indexName, RequestOptions requestOptions) { return withContext(context -> getIndexWithResponse(indexName, requestOptions, context)); } - Mono> getIndexWithResponse(String indexName, RequestOptions requestOptions, Context context) { + Mono> getIndexWithResponse(String indexName, RequestOptions requestOptions, Context context) { try { return restClient.indexes() .getWithRestResponseAsync(indexName, requestOptions, context) @@ -712,7 +717,7 @@ Mono> getIndexStatisticsWithResponse(String i * * @return a reactive response emitting the list of indexes. */ - public PagedFlux listIndexes() { + public PagedFlux listIndexes() { return listIndexes(null, null); } @@ -725,7 +730,7 @@ public PagedFlux listIndexes() { * help with debugging * @return a reactive response emitting the list of indexes. */ - public PagedFlux listIndexes(String select, RequestOptions requestOptions) { + public PagedFlux listIndexes(String select, RequestOptions requestOptions) { try { return new PagedFlux<>(() -> withContext(context -> this.listIndexesWithResponse(select, requestOptions, context))); @@ -734,7 +739,7 @@ public PagedFlux listIndexes(String select, RequestOptions requestOptions } } - PagedFlux listIndexes(String select, RequestOptions requestOptions, Context context) { + PagedFlux listIndexes(String select, RequestOptions requestOptions, Context context) { try { return new PagedFlux<>(() -> this.listIndexesWithResponse(select, requestOptions, context)); } catch (RuntimeException ex) { @@ -742,7 +747,7 @@ PagedFlux listIndexes(String select, RequestOptions requestOptions, Conte } } - private Mono> listIndexesWithResponse(String select, RequestOptions requestOptions, + private Mono> listIndexesWithResponse(String select, RequestOptions requestOptions, Context context) { return restClient.indexes() .listWithRestResponseAsync(select, requestOptions, context) @@ -758,10 +763,10 @@ private Mono> listIndexesWithResponse(String select, Reques /** * Creates a new Azure Cognitive Search index or updates an index if it already exists. * - * @param index the definition of the {@link Index} to create or update. + * @param index the definition of the {@link SearchIndex} to create or update. * @return the index that was created or updated. */ - public Mono createOrUpdateIndex(Index index) { + public Mono createOrUpdateIndex(SearchIndex index) { return createOrUpdateIndexWithResponse(index, false, false, null).map(Response::getValue); } @@ -779,13 +784,13 @@ public Mono createOrUpdateIndex(Index index) { * help with debugging * @return a response containing the index that was created or updated */ - public Mono> createOrUpdateIndexWithResponse(Index index, boolean allowIndexDowntime, + public Mono> createOrUpdateIndexWithResponse(SearchIndex index, boolean allowIndexDowntime, boolean onlyIfUnchanged, RequestOptions requestOptions) { return withContext(context -> createOrUpdateIndexWithResponse(index, allowIndexDowntime, onlyIfUnchanged, requestOptions, context)); } - Mono> createOrUpdateIndexWithResponse(Index index, boolean allowIndexDowntime, + Mono> createOrUpdateIndexWithResponse(SearchIndex index, boolean allowIndexDowntime, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { try { Objects.requireNonNull(index, "'Index' cannot null."); @@ -812,14 +817,14 @@ public Mono deleteIndex(String indexName) { /** * Deletes an Azure Cognitive Search index and all the documents it contains. * - * @param index the {@link Index} to delete. + * @param index the {@link SearchIndex} to delete. * @param onlyIfUnchanged {@code true} to delete if the {@code index} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @return a response signalling completion. */ - public Mono> deleteIndexWithResponse(Index index, boolean onlyIfUnchanged, + public Mono> deleteIndexWithResponse(SearchIndex index, boolean onlyIfUnchanged, RequestOptions requestOptions) { Objects.requireNonNull(index, "'Index' cannot be null."); String etag = onlyIfUnchanged ? index.getETag() : null; @@ -844,7 +849,7 @@ Mono> deleteIndexWithResponse(String indexName, String etag, Requ * @param analyzeRequest the text and analyzer or analysis components to test * @return analyze result. */ - public PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest) { + public PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest) { return analyzeText(indexName, analyzeRequest, null); } @@ -857,7 +862,7 @@ public PagedFlux analyzeText(String indexName, AnalyzeRequest analyze * help with debugging * @return a response containing analyze result. */ - public PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest, + public PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest, RequestOptions requestOptions) { try { return new PagedFlux<>(() -> @@ -867,8 +872,8 @@ public PagedFlux analyzeText(String indexName, AnalyzeRequest analyze } } - PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest, RequestOptions requestOptions, - Context context) { + PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest, + RequestOptions requestOptions, Context context) { try { return new PagedFlux<>(() -> analyzeTextWithResponse(indexName, analyzeRequest, requestOptions, context)); } catch (RuntimeException ex) { @@ -876,8 +881,8 @@ PagedFlux analyzeText(String indexName, AnalyzeRequest analyzeRequest } } - private Mono> analyzeTextWithResponse(String indexName, AnalyzeRequest analyzeRequest, - RequestOptions requestOptions, Context context) { + private Mono> analyzeTextWithResponse(String indexName, + AnalyzeRequest analyzeRequest, RequestOptions requestOptions, Context context) { return restClient.indexes() .analyzeWithRestResponseAsync(indexName, analyzeRequest, requestOptions, context) .map(response -> new PagedResponseBase<>( @@ -895,7 +900,7 @@ private Mono> analyzeTextWithResponse(String indexName, * @param skillset definition of the skillset containing one or more cognitive skills * @return the created Skillset. */ - public Mono createSkillset(Skillset skillset) { + public Mono createSkillset(SearchIndexerSkillset skillset) { return createSkillsetWithResponse(skillset, null).map(Response::getValue); } @@ -907,11 +912,13 @@ public Mono createSkillset(Skillset skillset) { * help with debugging * @return a response containing the created Skillset. */ - public Mono> createSkillsetWithResponse(Skillset skillset, RequestOptions requestOptions) { + public Mono> createSkillsetWithResponse(SearchIndexerSkillset skillset, + RequestOptions requestOptions) { return withContext(context -> createSkillsetWithResponse(skillset, requestOptions, context)); } - Mono> createSkillsetWithResponse(Skillset skillset, RequestOptions requestOptions, + Mono> createSkillsetWithResponse(SearchIndexerSkillset skillset, + RequestOptions requestOptions, Context context) { Objects.requireNonNull(skillset, "'Skillset' cannot be null."); try { @@ -929,7 +936,7 @@ Mono> createSkillsetWithResponse(Skillset skillset, RequestOp * @param skillsetName the name of the skillset to retrieve * @return the Skillset. */ - public Mono getSkillset(String skillsetName) { + public Mono getSkillset(String skillsetName) { return getSkillsetWithResponse(skillsetName, null).map(Response::getValue); } @@ -941,11 +948,12 @@ public Mono getSkillset(String skillsetName) { * help with debugging * @return a response containing the Skillset. */ - public Mono> getSkillsetWithResponse(String skillsetName, RequestOptions requestOptions) { + public Mono> getSkillsetWithResponse(String skillsetName, + RequestOptions requestOptions) { return withContext(context -> getSkillsetWithResponse(skillsetName, requestOptions, context)); } - Mono> getSkillsetWithResponse(String skillsetName, RequestOptions requestOptions, + Mono> getSkillsetWithResponse(String skillsetName, RequestOptions requestOptions, Context context) { try { return this.restClient.skillsets() @@ -961,7 +969,7 @@ Mono> getSkillsetWithResponse(String skillsetName, RequestOpt * * @return a reactive response emitting the list of skillsets. */ - public PagedFlux listSkillsets() { + public PagedFlux listSkillsets() { return listSkillsets(null, null); } @@ -974,7 +982,7 @@ public PagedFlux listSkillsets() { * help with debugging * @return a reactive response emitting the list of skillsets. */ - public PagedFlux listSkillsets(String select, RequestOptions requestOptions) { + public PagedFlux listSkillsets(String select, RequestOptions requestOptions) { try { return new PagedFlux<>(() -> withContext(context -> listSkillsetsWithResponse(select, requestOptions, context))); @@ -983,7 +991,7 @@ public PagedFlux listSkillsets(String select, RequestOptions requestOp } } - PagedFlux listSkillsets(String select, RequestOptions requestOptions, Context context) { + PagedFlux listSkillsets(String select, RequestOptions requestOptions, Context context) { try { return new PagedFlux<>(() -> listSkillsetsWithResponse(select, requestOptions, context)); } catch (RuntimeException ex) { @@ -991,7 +999,7 @@ PagedFlux listSkillsets(String select, RequestOptions requestOptions, } } - private Mono> listSkillsetsWithResponse(String select, + private Mono> listSkillsetsWithResponse(String select, RequestOptions requestOptions, Context context) { return this.restClient.skillsets() @@ -1011,7 +1019,7 @@ private Mono> listSkillsetsWithResponse(String select, * @param skillset the definition of the skillset to create or update * @return the skillset that was created or updated. */ - public Mono createOrUpdateSkillset(Skillset skillset) { + public Mono createOrUpdateSkillset(SearchIndexerSkillset skillset) { return createOrUpdateSkillsetWithResponse(skillset, false, null).map(Response::getValue); } @@ -1025,14 +1033,14 @@ public Mono createOrUpdateSkillset(Skillset skillset) { * help with debugging * @return a response containing the skillset that was created or updated. */ - public Mono> createOrUpdateSkillsetWithResponse(Skillset skillset, + public Mono> createOrUpdateSkillsetWithResponse(SearchIndexerSkillset skillset, boolean onlyIfUnchanged, RequestOptions requestOptions) { return withContext(context -> createOrUpdateSkillsetWithResponse(skillset, onlyIfUnchanged, requestOptions, context)); } - Mono> createOrUpdateSkillsetWithResponse(Skillset skillset, boolean onlyIfUnchanged, - RequestOptions requestOptions, Context context) { + Mono> createOrUpdateSkillsetWithResponse(SearchIndexerSkillset skillset, + boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { Objects.requireNonNull(skillset, "'Skillset' cannot be null."); String ifMatch = onlyIfUnchanged ? skillset.getETag() : null; try { @@ -1059,14 +1067,14 @@ public Mono deleteSkillset(String skillsetName) { /** * Deletes a cognitive skillset in an Azure Cognitive Search service. * - * @param skillset the {@link Skillset} to delete. + * @param skillset the {@link SearchIndexerSkillset} to delete. * @param onlyIfUnchanged {@code true} to delete if the {@code skillset} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @return a response signalling completion. */ - public Mono> deleteSkillsetWithResponse(Skillset skillset, boolean onlyIfUnchanged, + public Mono> deleteSkillsetWithResponse(SearchIndexerSkillset skillset, boolean onlyIfUnchanged, RequestOptions requestOptions) { Objects.requireNonNull(skillset, "'Skillset' cannot be null."); String etag = onlyIfUnchanged ? skillset.getETag() : null; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceClient.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceClient.java index cfda27e5bda5..b96f5b2b65dd 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceClient.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/SearchServiceClient.java @@ -8,16 +8,16 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.search.documents.models.AnalyzeRequest; -import com.azure.search.documents.models.DataSource; +import com.azure.search.documents.models.AnalyzedTokenInfo; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.Index; -import com.azure.search.documents.models.Indexer; -import com.azure.search.documents.models.IndexerExecutionInfo; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerSkillset; +import com.azure.search.documents.models.SearchIndexerStatus; import com.azure.search.documents.models.ServiceStatistics; -import com.azure.search.documents.models.Skillset; import com.azure.search.documents.models.SynonymMap; -import com.azure.search.documents.models.TokenInfo; /** * Synchronous Client to manage and query indexes, as well as manage other resources, on a Cognitive Search service @@ -74,14 +74,14 @@ public String getEndpoint() { * @param dataSource The definition of the data source to create or update. * @return the data source that was created or updated. */ - public DataSource createOrUpdateDataSource(DataSource dataSource) { + public SearchIndexerDataSource createOrUpdateDataSource(SearchIndexerDataSource dataSource) { return createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE).getValue(); } /** * Creates a new Azure Cognitive Search data source or updates a data source if it already exists. * - * @param dataSource the {@link DataSource} to create or update + * @param dataSource the {@link SearchIndexerDataSource} to create or update * @param onlyIfUnchanged {@code true} to update if the {@code dataSource} is the same as the current service value. * {@code false} to always update existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -89,8 +89,8 @@ public DataSource createOrUpdateDataSource(DataSource dataSource) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing data source that was created or updated. */ - public Response createOrUpdateDataSourceWithResponse(DataSource dataSource, boolean onlyIfUnchanged, - RequestOptions requestOptions, Context context) { + public Response createOrUpdateDataSourceWithResponse(SearchIndexerDataSource dataSource, + boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { return asyncClient.createOrUpdateDataSourceWithResponse(dataSource, onlyIfUnchanged, requestOptions, context) .block(); } @@ -101,7 +101,7 @@ public Response createOrUpdateDataSourceWithResponse(DataSource data * @param dataSource The definition of the data source to create * @return the data source that was created. */ - public DataSource createDataSource(DataSource dataSource) { + public SearchIndexerDataSource createDataSource(SearchIndexerDataSource dataSource) { return createDataSourceWithResponse(dataSource, null, Context.NONE).getValue(); } @@ -114,8 +114,8 @@ public DataSource createDataSource(DataSource dataSource) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing data source that was created. */ - public Response createDataSourceWithResponse(DataSource dataSource, RequestOptions requestOptions, - Context context) { + public Response createDataSourceWithResponse(SearchIndexerDataSource dataSource, + RequestOptions requestOptions, Context context) { return asyncClient.createDataSourceWithResponse(dataSource, requestOptions, context).block(); } @@ -125,7 +125,7 @@ public Response createDataSourceWithResponse(DataSource dataSource, * @param dataSourceName the name of the data source to retrieve * @return the DataSource. */ - public DataSource getDataSource(String dataSourceName) { + public SearchIndexerDataSource getDataSource(String dataSourceName) { return getDataSourceWithResponse(dataSourceName, null, Context.NONE).getValue(); } @@ -138,8 +138,8 @@ public DataSource getDataSource(String dataSourceName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the DataSource. */ - public Response getDataSourceWithResponse(String dataSourceName, RequestOptions requestOptions, - Context context) { + public Response getDataSourceWithResponse(String dataSourceName, + RequestOptions requestOptions, Context context) { return asyncClient.getDataSourceWithResponse(dataSourceName, requestOptions, context).block(); } @@ -148,7 +148,7 @@ public Response getDataSourceWithResponse(String dataSourceName, Req * * @return a list of DataSources */ - public PagedIterable listDataSources() { + public PagedIterable listDataSources() { return listDataSources(null, null, Context.NONE); } @@ -162,7 +162,8 @@ public PagedIterable listDataSources() { * @param context Additional context that is passed through the HTTP pipeline during the service call. * @return a response containing the list of DataSources. */ - public PagedIterable listDataSources(String select, RequestOptions requestOptions, Context context) { + public PagedIterable listDataSources(String select, RequestOptions requestOptions, + Context context) { return new PagedIterable<>(asyncClient.listDataSources(select, requestOptions, context)); } @@ -172,13 +173,13 @@ public PagedIterable listDataSources(String select, RequestOptions r * @param dataSourceName the name of the data source to be deleted */ public void deleteDataSource(String dataSourceName) { - deleteDataSourceWithResponse(new DataSource().setName(dataSourceName), false, null, Context.NONE); + deleteDataSourceWithResponse(new SearchIndexerDataSource().setName(dataSourceName), false, null, Context.NONE); } /** * Delete a DataSource with Response * - * @param dataSource the {@link DataSource} to be deleted. + * @param dataSource the {@link SearchIndexerDataSource} to be deleted. * @param onlyIfUnchanged {@code true} to delete if the {@code dataSource} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -186,7 +187,7 @@ public void deleteDataSource(String dataSourceName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return an empty response */ - public Response deleteDataSourceWithResponse(DataSource dataSource, boolean onlyIfUnchanged, + public Response deleteDataSourceWithResponse(SearchIndexerDataSource dataSource, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { String etag = onlyIfUnchanged ? dataSource.getETag() : null; return asyncClient.deleteDataSourceWithResponse(dataSource.getName(), etag, requestOptions, context).block(); @@ -198,7 +199,7 @@ public Response deleteDataSourceWithResponse(DataSource dataSource, boolea * @param indexer definition of the indexer to create. * @return the created Indexer. */ - public Indexer createIndexer(Indexer indexer) { + public SearchIndexer createIndexer(SearchIndexer indexer) { return createIndexerWithResponse(indexer, null, Context.NONE).getValue(); } @@ -211,7 +212,7 @@ public Indexer createIndexer(Indexer indexer) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the created Indexer. */ - public Response createIndexerWithResponse(Indexer indexer, RequestOptions requestOptions, + public Response createIndexerWithResponse(SearchIndexer indexer, RequestOptions requestOptions, Context context) { return asyncClient.createIndexerWithResponse(indexer, requestOptions, context).block(); } @@ -222,14 +223,14 @@ public Response createIndexerWithResponse(Indexer indexer, RequestOptio * @param indexer The definition of the indexer to create or update. * @return a response containing the created Indexer. */ - public Indexer createOrUpdateIndexer(Indexer indexer) { + public SearchIndexer createOrUpdateIndexer(SearchIndexer indexer) { return createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE).getValue(); } /** * Creates a new Azure Cognitive Search indexer or updates an indexer if it already exists. * - * @param indexer The {@link Indexer} to create or update. + * @param indexer The {@link SearchIndexer} to create or update. * @param onlyIfUnchanged {@code true} to update if the {@code indexer} is the same as the current service value. * {@code false} to always update existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -237,7 +238,7 @@ public Indexer createOrUpdateIndexer(Indexer indexer) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return A response object containing the Indexer. */ - public Response createOrUpdateIndexerWithResponse(Indexer indexer, boolean onlyIfUnchanged, + public Response createOrUpdateIndexerWithResponse(SearchIndexer indexer, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { return asyncClient.createOrUpdateIndexerWithResponse(indexer, onlyIfUnchanged, requestOptions, context).block(); } @@ -247,7 +248,7 @@ public Response createOrUpdateIndexerWithResponse(Indexer indexer, bool * * @return all Indexers from the Search service. */ - public PagedIterable listIndexers() { + public PagedIterable listIndexers() { return listIndexers(null, null, Context.NONE); } @@ -260,7 +261,7 @@ public PagedIterable listIndexers() { * @param context additional context that is passed through the HTTP pipeline during the service call * @return all Indexers from the Search service. */ - public PagedIterable listIndexers(String select, RequestOptions requestOptions, Context context) { + public PagedIterable listIndexers(String select, RequestOptions requestOptions, Context context) { return new PagedIterable<>(asyncClient.listIndexers(select, requestOptions, context)); } @@ -270,7 +271,7 @@ public PagedIterable listIndexers(String select, RequestOptions request * @param indexerName the name of the indexer to retrieve * @return the indexer. */ - public Indexer getIndexer(String indexerName) { + public SearchIndexer getIndexer(String indexerName) { return getIndexerWithResponse(indexerName, null, Context.NONE).getValue(); } @@ -283,7 +284,7 @@ public Indexer getIndexer(String indexerName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the indexer. */ - public Response getIndexerWithResponse(String indexerName, RequestOptions requestOptions, + public Response getIndexerWithResponse(String indexerName, RequestOptions requestOptions, Context context) { return asyncClient.getIndexerWithResponse(indexerName, requestOptions, context).block(); } @@ -294,13 +295,13 @@ public Response getIndexerWithResponse(String indexerName, RequestOptio * @param indexerName the name of the indexer to delete */ public void deleteIndexer(String indexerName) { - deleteIndexerWithResponse(new Indexer().setName(indexerName), false, null, Context.NONE); + deleteIndexerWithResponse(new SearchIndexer().setName(indexerName), false, null, Context.NONE); } /** * Deletes an Azure Cognitive Search indexer. * - * @param indexer the search {@link Indexer} + * @param indexer the search {@link SearchIndexer} * @param onlyIfUnchanged {@code true} to delete if the {@code indexer} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -308,7 +309,7 @@ public void deleteIndexer(String indexerName) { * @param context the context * @return a response signalling completion. */ - public Response deleteIndexerWithResponse(Indexer indexer, boolean onlyIfUnchanged, + public Response deleteIndexerWithResponse(SearchIndexer indexer, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { String etag = onlyIfUnchanged ? indexer.getETag() : null; return asyncClient.deleteIndexerWithResponse(indexer.getName(), etag, requestOptions, context).block(); @@ -364,7 +365,7 @@ public Response runIndexerWithResponse(String indexerName, RequestOptions * @param indexerName the name of the indexer for which to retrieve status * @return a response with the indexer execution info. */ - public IndexerExecutionInfo getIndexerStatus(String indexerName) { + public SearchIndexerStatus getIndexerStatus(String indexerName) { return getIndexerStatusWithResponse(indexerName, null, Context.NONE).getValue(); } @@ -377,7 +378,7 @@ public IndexerExecutionInfo getIndexerStatus(String indexerName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response with the indexer execution info. */ - public Response getIndexerStatusWithResponse(String indexerName, + public Response getIndexerStatusWithResponse(String indexerName, RequestOptions requestOptions, Context context) { return asyncClient.getIndexerStatusWithResponse(indexerName, requestOptions, context).block(); } @@ -388,7 +389,7 @@ public Response getIndexerStatusWithResponse(String indexe * @param index definition of the index to create * @return the created Index. */ - public Index createIndex(Index index) { + public SearchIndex createIndex(SearchIndex index) { return createIndexWithResponse(index, null, Context.NONE).getValue(); } @@ -401,7 +402,8 @@ public Index createIndex(Index index) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the created Index. */ - public Response createIndexWithResponse(Index index, RequestOptions requestOptions, Context context) { + public Response createIndexWithResponse(SearchIndex index, RequestOptions requestOptions, + Context context) { return asyncClient.createIndexWithResponse(index, requestOptions, context).block(); } @@ -411,7 +413,7 @@ public Response createIndexWithResponse(Index index, RequestOptions reque * @param indexName the name of the index to retrieve * @return the Index. */ - public Index getIndex(String indexName) { + public SearchIndex getIndex(String indexName) { return getIndexWithResponse(indexName, null, Context.NONE).getValue(); } @@ -424,7 +426,8 @@ public Index getIndex(String indexName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the Index. */ - public Response getIndexWithResponse(String indexName, RequestOptions requestOptions, Context context) { + public Response getIndexWithResponse(String indexName, RequestOptions requestOptions, + Context context) { return asyncClient.getIndexWithResponse(indexName, requestOptions, context).block(); } @@ -457,7 +460,7 @@ public Response getIndexStatisticsWithResponse(String * * @return the list of indexes. */ - public PagedIterable listIndexes() { + public PagedIterable listIndexes() { return listIndexes(null, null, Context.NONE); } @@ -471,7 +474,7 @@ public PagedIterable listIndexes() { * @param context additional context that is passed through the HTTP pipeline during the service call * @return the list of indexes. */ - public PagedIterable listIndexes(String select, RequestOptions requestOptions, Context context) { + public PagedIterable listIndexes(String select, RequestOptions requestOptions, Context context) { return new PagedIterable<>(asyncClient.listIndexes(select, requestOptions, context)); } @@ -481,14 +484,14 @@ public PagedIterable listIndexes(String select, RequestOptions requestOpt * @param index the definition of the index to create or update * @return the index that was created or updated. */ - public Index createOrUpdateIndex(Index index) { + public SearchIndex createOrUpdateIndex(SearchIndex index) { return createOrUpdateIndexWithResponse(index, false, false, null, Context.NONE).getValue(); } /** * Creates a new Azure Cognitive Search index or updates an index if it already exists. * - * @param index the {@link Index} to create or update + * @param index the {@link SearchIndex} to create or update * @param allowIndexDowntime allows new analyzers, tokenizers, token filters, or char filters to be added to an * index by taking the index offline for at least a few seconds. This temporarily causes indexing and query requests * to fail. Performance and write availability of the index can be impaired for several minutes after the index is @@ -500,7 +503,7 @@ public Index createOrUpdateIndex(Index index) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the Index that was created or updated. */ - public Response createOrUpdateIndexWithResponse(Index index, boolean allowIndexDowntime, + public Response createOrUpdateIndexWithResponse(SearchIndex index, boolean allowIndexDowntime, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { return asyncClient.createOrUpdateIndexWithResponse(index, allowIndexDowntime, onlyIfUnchanged, requestOptions, context).block(); @@ -512,13 +515,13 @@ public Response createOrUpdateIndexWithResponse(Index index, boolean allo * @param indexName the name of the index to delete */ public void deleteIndex(String indexName) { - deleteIndexWithResponse(new Index().setName(indexName), false, null, Context.NONE); + deleteIndexWithResponse(new SearchIndex().setName(indexName), false, null, Context.NONE); } /** * Deletes an Azure Cognitive Search index and all the documents it contains. * - * @param index the Search {@link Index} to delete. + * @param index the Search {@link SearchIndex} to delete. * @param onlyIfUnchanged {@code true} to delete if the {@code index} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -526,7 +529,7 @@ public void deleteIndex(String indexName) { * @param context additional context that is passed through the Http pipeline during the service call * @return a response signalling completion. */ - public Response deleteIndexWithResponse(Index index, boolean onlyIfUnchanged, + public Response deleteIndexWithResponse(SearchIndex index, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { String etag = onlyIfUnchanged ? index.getETag() : null; return asyncClient.deleteIndexWithResponse(index.getName(), etag, requestOptions, context).block(); @@ -539,7 +542,7 @@ public Response deleteIndexWithResponse(Index index, boolean onlyIfUnchang * @param analyzeRequest the text and analyzer or analysis components to test * @return analyze result. */ - public PagedIterable analyzeText(String indexName, AnalyzeRequest analyzeRequest) { + public PagedIterable analyzeText(String indexName, AnalyzeRequest analyzeRequest) { return analyzeText(indexName, analyzeRequest, null, Context.NONE); } @@ -553,7 +556,7 @@ public PagedIterable analyzeText(String indexName, AnalyzeRequest ana * @param context additional context that is passed through the HTTP pipeline during the service call * @return analyze result. */ - public PagedIterable analyzeText(String indexName, AnalyzeRequest analyzeRequest, + public PagedIterable analyzeText(String indexName, AnalyzeRequest analyzeRequest, RequestOptions requestOptions, Context context) { return new PagedIterable<>(asyncClient.analyzeText(indexName, analyzeRequest, requestOptions, context)); } @@ -562,9 +565,9 @@ public PagedIterable analyzeText(String indexName, AnalyzeRequest ana * Creates a new skillset in an Azure Cognitive Search service. * * @param skillset definition of the skillset containing one or more cognitive skills - * @return the created Skillset. + * @return the created SearchIndexerSkillset. */ - public Skillset createSkillset(Skillset skillset) { + public SearchIndexerSkillset createSkillset(SearchIndexerSkillset skillset) { return createSkillsetWithResponse(skillset, null, Context.NONE).getValue(); } @@ -575,9 +578,10 @@ public Skillset createSkillset(Skillset skillset) { * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @param context additional context that is passed through the HTTP pipeline during the service call - * @return a response containing the created Skillset. + * @return a response containing the created SearchIndexerSkillset. */ - public Response createSkillsetWithResponse(Skillset skillset, RequestOptions requestOptions, + public Response createSkillsetWithResponse(SearchIndexerSkillset skillset, + RequestOptions requestOptions, Context context) { return asyncClient.createSkillsetWithResponse(skillset, requestOptions, context).block(); } @@ -586,9 +590,9 @@ public Response createSkillsetWithResponse(Skillset skillset, RequestO * Retrieves a skillset definition. * * @param skillsetName the name of the skillset to retrieve - * @return the Skillset. + * @return the SearchIndexerSkillset. */ - public Skillset getSkillset(String skillsetName) { + public SearchIndexerSkillset getSkillset(String skillsetName) { return getSkillsetWithResponse(skillsetName, null, Context.NONE).getValue(); } @@ -599,9 +603,9 @@ public Skillset getSkillset(String skillsetName) { * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to * help with debugging * @param context additional context that is passed through the HTTP pipeline during the service call - * @return a response containing the Skillset. + * @return a response containing the SearchIndexerSkillset. */ - public Response getSkillsetWithResponse(String skillsetName, RequestOptions requestOptions, + public Response getSkillsetWithResponse(String skillsetName, RequestOptions requestOptions, Context context) { return asyncClient.getSkillsetWithResponse(skillsetName, requestOptions, context).block(); } @@ -611,7 +615,7 @@ public Response getSkillsetWithResponse(String skillsetName, RequestOp * * @return the list of skillsets. */ - public PagedIterable listSkillsets() { + public PagedIterable listSkillsets() { return listSkillsets(null, null, Context.NONE); } @@ -625,24 +629,25 @@ public PagedIterable listSkillsets() { * @param context additional context that is passed through the HTTP pipeline during the service call * @return the list of skillsets. */ - public PagedIterable listSkillsets(String select, RequestOptions requestOptions, Context context) { + public PagedIterable listSkillsets(String select, RequestOptions requestOptions, + Context context) { return new PagedIterable<>(asyncClient.listSkillsets(select, requestOptions, context)); } /** * Creates a new Azure Cognitive Search skillset or updates a skillset if it already exists. * - * @param skillset the {@link Skillset} to create or update. + * @param skillset the {@link SearchIndexerSkillset} to create or update. * @return the skillset that was created or updated. */ - public Skillset createOrUpdateSkillset(Skillset skillset) { + public SearchIndexerSkillset createOrUpdateSkillset(SearchIndexerSkillset skillset) { return createOrUpdateSkillsetWithResponse(skillset, false, null, Context.NONE).getValue(); } /** * Creates a new Azure Cognitive Search skillset or updates a skillset if it already exists. * - * @param skillset the {@link Skillset} to create or update. + * @param skillset the {@link SearchIndexerSkillset} to create or update. * @param onlyIfUnchanged {@code true} to update if the {@code skillset} is the same as the current service value. * {@code false} to always update existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -650,8 +655,8 @@ public Skillset createOrUpdateSkillset(Skillset skillset) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response containing the skillset that was created or updated. */ - public Response createOrUpdateSkillsetWithResponse(Skillset skillset, boolean onlyIfUnchanged, - RequestOptions requestOptions, Context context) { + public Response createOrUpdateSkillsetWithResponse(SearchIndexerSkillset skillset, + boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { return asyncClient.createOrUpdateSkillsetWithResponse(skillset, onlyIfUnchanged, requestOptions, context) .block(); } @@ -662,13 +667,13 @@ public Response createOrUpdateSkillsetWithResponse(Skillset skillset, * @param skillsetName the name of the skillset to delete */ public void deleteSkillset(String skillsetName) { - deleteSkillsetWithResponse(new Skillset().setName(skillsetName), false, null, Context.NONE); + deleteSkillsetWithResponse(new SearchIndexerSkillset().setName(skillsetName), false, null, Context.NONE); } /** * Deletes a cognitive skillset in an Azure Cognitive Search service. * - * @param skillset the {@link Skillset} to delete. + * @param skillset the {@link SearchIndexerSkillset} to delete. * @param onlyIfUnchanged {@code true} to delete if the {@code skillset} is the same as the current service value. * {@code false} to always delete existing value. * @param requestOptions additional parameters for the operation. Contains the tracking ID sent with the request to @@ -676,7 +681,7 @@ public void deleteSkillset(String skillsetName) { * @param context additional context that is passed through the HTTP pipeline during the service call * @return a response signalling completion. */ - public Response deleteSkillsetWithResponse(Skillset skillset, boolean onlyIfUnchanged, + public Response deleteSkillsetWithResponse(SearchIndexerSkillset skillset, boolean onlyIfUnchanged, RequestOptions requestOptions, Context context) { String etag = onlyIfUnchanged ? skillset.getETag() : null; return asyncClient.deleteSkillsetWithResponse(skillset.getName(), etag, requestOptions, context).block(); diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DataSourcesImpl.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DataSourcesImpl.java index b2f670f22bc6..3382f1b13648 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DataSourcesImpl.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DataSourcesImpl.java @@ -26,9 +26,9 @@ import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.search.documents.implementation.models.ListDataSourcesResult; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchIndexerDataSource; import java.util.UUID; import reactor.core.publisher.Mono; @@ -68,7 +68,7 @@ private interface DataSourcesService { @Put("datasources('{dataSourceName}')") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> createOrUpdate(@PathParam("dataSourceName") String dataSourceName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") DataSource dataSource, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> createOrUpdate(@PathParam("dataSourceName") String dataSourceName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexerDataSource dataSource, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Delete("datasources('{dataSourceName}')") @ExpectedResponses({204, 404}) @@ -78,7 +78,7 @@ private interface DataSourcesService { @Get("datasources('{dataSourceName}')") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> get(@PathParam("dataSourceName") String dataSourceName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> get(@PathParam("dataSourceName") String dataSourceName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("datasources") @ExpectedResponses({200}) @@ -88,7 +88,7 @@ private interface DataSourcesService { @Post("datasources") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") DataSource dataSource, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexerDataSource dataSource, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); } /** @@ -101,7 +101,7 @@ private interface DataSourcesService { * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String dataSourceName, DataSource dataSource, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String dataSourceName, SearchIndexerDataSource dataSource, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String ifMatch = null; @@ -124,7 +124,7 @@ public Mono> createOrUpdateWithRestResponseAsync(Stri * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String dataSourceName, DataSource dataSource, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String dataSourceName, SearchIndexerDataSource dataSource, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String prefer = "return=representation"; @@ -184,7 +184,7 @@ public Mono> deleteWithRestResponseAsync(String dataSourceName, S * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String dataSourceName, Context context) { + public Mono> getWithRestResponseAsync(String dataSourceName, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -201,7 +201,7 @@ public Mono> getWithRestResponseAsync(String dataSour * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String dataSourceName, RequestOptions requestOptions, Context context) { + public Mono> getWithRestResponseAsync(String dataSourceName, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; @@ -256,7 +256,7 @@ public Mono> listWithRestResponseAsync(Str * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(DataSource dataSource, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexerDataSource dataSource, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -273,7 +273,7 @@ public Mono> createWithRestResponseAsync(DataSource d * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(DataSource dataSource, RequestOptions requestOptions, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexerDataSource dataSource, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DocumentsImpl.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DocumentsImpl.java index dc025ae8b7e7..c11b1f6dfe7a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DocumentsImpl.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/DocumentsImpl.java @@ -25,11 +25,13 @@ import com.azure.core.util.serializer.CollectionFormat; import com.azure.core.util.serializer.JacksonAdapter; import com.azure.core.util.serializer.SerializerAdapter; +import com.azure.search.documents.implementation.models.AutocompleteRequest; import com.azure.search.documents.implementation.models.SearchDocumentsResult; +import com.azure.search.documents.implementation.models.SearchRequest; import com.azure.search.documents.implementation.models.SuggestDocumentsResult; +import com.azure.search.documents.implementation.models.SuggestRequest; import com.azure.search.documents.models.AutocompleteMode; import com.azure.search.documents.models.AutocompleteOptions; -import com.azure.search.documents.models.AutocompleteRequest; import com.azure.search.documents.models.AutocompleteResult; import com.azure.search.documents.models.IndexBatchBase; import com.azure.search.documents.models.IndexDocumentsResult; @@ -39,9 +41,7 @@ import com.azure.search.documents.models.SearchErrorException; import com.azure.search.documents.models.SearchMode; import com.azure.search.documents.models.SearchOptions; -import com.azure.search.documents.models.SearchRequest; import com.azure.search.documents.models.SuggestOptions; -import com.azure.search.documents.models.SuggestRequest; import java.util.List; import java.util.Map; import java.util.UUID; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexersImpl.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexersImpl.java index a718b508744d..7030277bf29e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexersImpl.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexersImpl.java @@ -26,10 +26,10 @@ import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.search.documents.implementation.models.ListIndexersResult; -import com.azure.search.documents.models.Indexer; -import com.azure.search.documents.models.IndexerExecutionInfo; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerStatus; import java.util.UUID; import reactor.core.publisher.Mono; @@ -79,7 +79,7 @@ private interface IndexersService { @Put("indexers('{indexerName}')") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> createOrUpdate(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Indexer indexer, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> createOrUpdate(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexer indexer, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Delete("indexers('{indexerName}')") @ExpectedResponses({204, 404}) @@ -89,7 +89,7 @@ private interface IndexersService { @Get("indexers('{indexerName}')") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> get(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> get(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("indexers") @ExpectedResponses({200}) @@ -99,12 +99,12 @@ private interface IndexersService { @Post("indexers") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Indexer indexer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexer indexer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("indexers('{indexerName}')/search.status") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> getStatus(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> getStatus(@PathParam("indexerName") String indexerName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); } /** @@ -189,7 +189,7 @@ public Mono> runWithRestResponseAsync(String indexerName, Request * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String indexerName, Indexer indexer, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String indexerName, SearchIndexer indexer, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String ifMatch = null; @@ -212,7 +212,7 @@ public Mono> createOrUpdateWithRestResponseAsync(String * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String indexerName, Indexer indexer, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String indexerName, SearchIndexer indexer, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String prefer = "return=representation"; @@ -272,7 +272,7 @@ public Mono> deleteWithRestResponseAsync(String indexerName, Stri * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String indexerName, Context context) { + public Mono> getWithRestResponseAsync(String indexerName, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -289,7 +289,7 @@ public Mono> getWithRestResponseAsync(String indexerName * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String indexerName, RequestOptions requestOptions, Context context) { + public Mono> getWithRestResponseAsync(String indexerName, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; @@ -344,7 +344,7 @@ public Mono> listWithRestResponseAsync(String * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Indexer indexer, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexer indexer, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -361,7 +361,7 @@ public Mono> createWithRestResponseAsync(Indexer indexer * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Indexer indexer, RequestOptions requestOptions, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexer indexer, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; @@ -380,7 +380,7 @@ public Mono> createWithRestResponseAsync(Indexer indexer * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getStatusWithRestResponseAsync(String indexerName, Context context) { + public Mono> getStatusWithRestResponseAsync(String indexerName, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -397,7 +397,7 @@ public Mono> getStatusWithRestResponseAsync * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getStatusWithRestResponseAsync(String indexerName, RequestOptions requestOptions, Context context) { + public Mono> getStatusWithRestResponseAsync(String indexerName, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexesImpl.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexesImpl.java index 8aa9b80b1f2f..882ca42d28cf 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexesImpl.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/IndexesImpl.java @@ -29,9 +29,9 @@ import com.azure.search.documents.implementation.models.ListIndexesResult; import com.azure.search.documents.models.AnalyzeRequest; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchIndex; import java.util.UUID; import reactor.core.publisher.Mono; @@ -71,7 +71,7 @@ private interface IndexesService { @Post("indexes") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Index index, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndex index, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("indexes") @ExpectedResponses({200}) @@ -81,7 +81,7 @@ private interface IndexesService { @Put("indexes('{indexName}')") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> createOrUpdate(@PathParam("indexName") String indexName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Index index, @QueryParam("allowIndexDowntime") Boolean allowIndexDowntime, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> createOrUpdate(@PathParam("indexName") String indexName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndex index, @QueryParam("allowIndexDowntime") Boolean allowIndexDowntime, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Delete("indexes('{indexName}')") @ExpectedResponses({204, 404}) @@ -91,7 +91,7 @@ private interface IndexesService { @Get("indexes('{indexName}')") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> get(@PathParam("indexName") String indexName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> get(@PathParam("indexName") String indexName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("indexes('{indexName}')/search.stats") @ExpectedResponses({200}) @@ -113,7 +113,7 @@ private interface IndexesService { * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Index index, Context context) { + public Mono> createWithRestResponseAsync(SearchIndex index, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -130,7 +130,7 @@ public Mono> createWithRestResponseAsync(Index index, Cont * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Index index, RequestOptions requestOptions, Context context) { + public Mono> createWithRestResponseAsync(SearchIndex index, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; @@ -186,7 +186,7 @@ public Mono> listWithRestResponseAsync(String * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String indexName, Index index, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String indexName, SearchIndex index, Context context) { final String accept = "application/json;odata.metadata=minimal"; final Boolean allowIndexDowntime = null; @@ -211,7 +211,7 @@ public Mono> createOrUpdateWithRestResponseAsync(String in * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String indexName, Index index, Boolean allowIndexDowntime, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String indexName, SearchIndex index, Boolean allowIndexDowntime, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String prefer = "return=representation"; @@ -223,7 +223,7 @@ public Mono> createOrUpdateWithRestResponseAsync(String in } /** - * Deletes a search index and all the documents it contains. + * Deletes a search index and all the documents it contains. This operation is permanent, with no recovery option. Make sure you have a master copy of your index definition, data ingestion code, and a backup of the primary data source in case you need to re-build the index. * * @param indexName The name of the index to delete. * @param context The context to associate with this operation. @@ -241,7 +241,7 @@ public Mono> deleteWithRestResponseAsync(String indexName, Contex } /** - * Deletes a search index and all the documents it contains. + * Deletes a search index and all the documents it contains. This operation is permanent, with no recovery option. Make sure you have a master copy of your index definition, data ingestion code, and a backup of the primary data source in case you need to re-build the index. * * @param indexName The name of the index to delete. * @param ifMatch Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. @@ -271,7 +271,7 @@ public Mono> deleteWithRestResponseAsync(String indexName, String * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String indexName, Context context) { + public Mono> getWithRestResponseAsync(String indexName, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -288,7 +288,7 @@ public Mono> getWithRestResponseAsync(String indexName, Co * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String indexName, RequestOptions requestOptions, Context context) { + public Mono> getWithRestResponseAsync(String indexName, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/SkillsetsImpl.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/SkillsetsImpl.java index 06bbb8195a9c..a06b421d8c5a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/SkillsetsImpl.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/SkillsetsImpl.java @@ -28,7 +28,7 @@ import com.azure.search.documents.implementation.models.ListSkillsetsResult; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; -import com.azure.search.documents.models.Skillset; +import com.azure.search.documents.models.SearchIndexerSkillset; import java.util.UUID; import reactor.core.publisher.Mono; @@ -68,7 +68,7 @@ private interface SkillsetsService { @Put("skillsets('{skillsetName}')") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> createOrUpdate(@PathParam("skillsetName") String skillsetName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Skillset skillset, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> createOrUpdate(@PathParam("skillsetName") String skillsetName, @HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexerSkillset skillset, @HeaderParam("If-Match") String ifMatch, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Prefer") String prefer, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Delete("skillsets('{skillsetName}')") @ExpectedResponses({204, 404}) @@ -78,7 +78,7 @@ private interface SkillsetsService { @Get("skillsets('{skillsetName}')") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> get(@PathParam("skillsetName") String skillsetName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> get(@PathParam("skillsetName") String skillsetName, @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); @Get("skillsets") @ExpectedResponses({200}) @@ -88,7 +88,7 @@ private interface SkillsetsService { @Post("skillsets") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(SearchErrorException.class) - Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") Skillset skillset, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); + Mono> create(@HostParam("endpoint") String endpoint, @BodyParam("application/json; charset=utf-8") SearchIndexerSkillset skillset, @QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept, @HeaderParam("x-ms-client-request-id") UUID xMsClientRequestId, Context context); } /** @@ -101,7 +101,7 @@ private interface SkillsetsService { * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String skillsetName, Skillset skillset, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String skillsetName, SearchIndexerSkillset skillset, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String ifMatch = null; @@ -124,7 +124,7 @@ public Mono> createOrUpdateWithRestResponseAsync(String * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createOrUpdateWithRestResponseAsync(String skillsetName, Skillset skillset, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { + public Mono> createOrUpdateWithRestResponseAsync(String skillsetName, SearchIndexerSkillset skillset, String ifMatch, String ifNoneMatch, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; final String prefer = "return=representation"; @@ -184,7 +184,7 @@ public Mono> deleteWithRestResponseAsync(String skillsetName, Str * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String skillsetName, Context context) { + public Mono> getWithRestResponseAsync(String skillsetName, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -201,7 +201,7 @@ public Mono> getWithRestResponseAsync(String skillsetNa * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> getWithRestResponseAsync(String skillsetName, RequestOptions requestOptions, Context context) { + public Mono> getWithRestResponseAsync(String skillsetName, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; @@ -256,7 +256,7 @@ public Mono> listWithRestResponseAsync(Strin * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Skillset skillset, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexerSkillset skillset, Context context) { final String accept = "application/json;odata.metadata=minimal"; final UUID xMsClientRequestId = null; @@ -273,7 +273,7 @@ public Mono> createWithRestResponseAsync(Skillset skill * @return a Mono which performs the network request upon subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono> createWithRestResponseAsync(Skillset skillset, RequestOptions requestOptions, Context context) { + public Mono> createWithRestResponseAsync(SearchIndexerSkillset skillset, RequestOptions requestOptions, Context context) { final String accept = "application/json;odata.metadata=minimal"; UUID xMsClientRequestId = null; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AccessCondition.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AccessCondition.java deleted file mode 100644 index 75ae810a1b02..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AccessCondition.java +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.implementation.models; - -import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * Additional parameters for a set of operations. - */ -@Fluent -public final class AccessCondition { - /* - * Defines the If-Match condition. The operation will be performed only if - * the ETag on the server matches this value. - */ - @JsonProperty(value = "") - private String ifMatch; - - /* - * Defines the If-None-Match condition. The operation will be performed - * only if the ETag on the server does not match this value. - */ - @JsonProperty(value = "") - private String ifNoneMatch; - - /** - * Get the ifMatch property: Defines the If-Match condition. The operation - * will be performed only if the ETag on the server matches this value. - * - * @return the ifMatch value. - */ - public String getIfMatch() { - return this.ifMatch; - } - - /** - * Set the ifMatch property: Defines the If-Match condition. The operation - * will be performed only if the ETag on the server matches this value. - * - * @param ifMatch the ifMatch value to set. - * @return the AccessCondition object itself. - */ - public AccessCondition setIfMatch(String ifMatch) { - this.ifMatch = ifMatch; - return this; - } - - /** - * Get the ifNoneMatch property: Defines the If-None-Match condition. The - * operation will be performed only if the ETag on the server does not - * match this value. - * - * @return the ifNoneMatch value. - */ - public String getIfNoneMatch() { - return this.ifNoneMatch; - } - - /** - * Set the ifNoneMatch property: Defines the If-None-Match condition. The - * operation will be performed only if the ETag on the server does not - * match this value. - * - * @param ifNoneMatch the ifNoneMatch value to set. - * @return the AccessCondition object itself. - */ - public AccessCondition setIfNoneMatch(String ifNoneMatch) { - this.ifNoneMatch = ifNoneMatch; - return this; - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AnalyzeResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AnalyzeResult.java index 20774a9d8ace..5e07fb435ecd 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AnalyzeResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AnalyzeResult.java @@ -7,7 +7,7 @@ package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; -import com.azure.search.documents.models.TokenInfo; +import com.azure.search.documents.models.AnalyzedTokenInfo; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -20,7 +20,7 @@ public final class AnalyzeResult { * The list of tokens returned by the analyzer specified in the request. */ @JsonProperty(value = "tokens", required = true) - private List tokens; + private List tokens; /** * Get the tokens property: The list of tokens returned by the analyzer @@ -28,7 +28,7 @@ public final class AnalyzeResult { * * @return the tokens value. */ - public List getTokens() { + public List getTokens() { return this.tokens; } @@ -39,7 +39,7 @@ public List getTokens() { * @param tokens the tokens value to set. * @return the AnalyzeResult object itself. */ - public AnalyzeResult setTokens(List tokens) { + public AnalyzeResult setTokens(List tokens) { this.tokens = tokens; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteRequest.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AutocompleteRequest.java similarity index 96% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteRequest.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AutocompleteRequest.java index a85a2bbdbc42..b946b98ede53 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteRequest.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/AutocompleteRequest.java @@ -4,9 +4,10 @@ // Changes may cause incorrect behavior and will be lost if the code is // regenerated. -package com.azure.search.documents.models; +package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; +import com.azure.search.documents.models.AutocompleteMode; import com.fasterxml.jackson.annotation.JsonProperty; /** @@ -24,7 +25,7 @@ public final class AutocompleteRequest { * Specifies the mode for Autocomplete. The default is 'oneTerm'. Use * 'twoTerms' to get shingles and 'oneTermWithContext' to use the current * context while producing auto-completed terms. Possible values include: - * 'oneTerm', 'twoTerms', 'oneTermWithContext' + * 'OneTerm', 'TwoTerms', 'OneTermWithContext' */ @JsonProperty(value = "autocompleteMode") private AutocompleteMode autocompleteMode; @@ -118,8 +119,8 @@ public AutocompleteRequest setSearchText(String searchText) { * Get the autocompleteMode property: Specifies the mode for Autocomplete. * The default is 'oneTerm'. Use 'twoTerms' to get shingles and * 'oneTermWithContext' to use the current context while producing - * auto-completed terms. Possible values include: 'oneTerm', 'twoTerms', - * 'oneTermWithContext'. + * auto-completed terms. Possible values include: 'OneTerm', 'TwoTerms', + * 'OneTermWithContext'. * * @return the autocompleteMode value. */ @@ -131,8 +132,8 @@ public AutocompleteMode getAutocompleteMode() { * Set the autocompleteMode property: Specifies the mode for Autocomplete. * The default is 'oneTerm'. Use 'twoTerms' to get shingles and * 'oneTermWithContext' to use the current context while producing - * auto-completed terms. Possible values include: 'oneTerm', 'twoTerms', - * 'oneTermWithContext'. + * auto-completed terms. Possible values include: 'OneTerm', 'TwoTerms', + * 'OneTermWithContext'. * * @param autocompleteMode the autocompleteMode value to set. * @return the AutocompleteRequest object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListDataSourcesResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListDataSourcesResult.java index 616ea16fef3c..23121bc1d2cd 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListDataSourcesResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListDataSourcesResult.java @@ -7,7 +7,7 @@ package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; -import com.azure.search.documents.models.DataSource; +import com.azure.search.documents.models.SearchIndexerDataSource; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -21,14 +21,14 @@ public final class ListDataSourcesResult { * The datasources in the Search service. */ @JsonProperty(value = "value", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List dataSources; + private List dataSources; /** * Get the dataSources property: The datasources in the Search service. * * @return the dataSources value. */ - public List getDataSources() { + public List getDataSources() { return this.dataSources; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexersResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexersResult.java index abd6b8c1649e..b4767ccc076f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexersResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexersResult.java @@ -7,7 +7,7 @@ package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; -import com.azure.search.documents.models.Indexer; +import com.azure.search.documents.models.SearchIndexer; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -21,14 +21,14 @@ public final class ListIndexersResult { * The indexers in the Search service. */ @JsonProperty(value = "value", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List indexers; + private List indexers; /** * Get the indexers property: The indexers in the Search service. * * @return the indexers value. */ - public List getIndexers() { + public List getIndexers() { return this.indexers; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexesResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexesResult.java index a8be29cf934b..ceb8a6710795 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexesResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListIndexesResult.java @@ -7,7 +7,7 @@ package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.SearchIndex; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -21,14 +21,14 @@ public final class ListIndexesResult { * The indexes in the Search service. */ @JsonProperty(value = "value", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List indexes; + private List indexes; /** * Get the indexes property: The indexes in the Search service. * * @return the indexes value. */ - public List getIndexes() { + public List getIndexes() { return this.indexes; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListSkillsetsResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListSkillsetsResult.java index bba71f8c5f55..9c44fd682a1d 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListSkillsetsResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/ListSkillsetsResult.java @@ -7,12 +7,12 @@ package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; -import com.azure.search.documents.models.Skillset; +import com.azure.search.documents.models.SearchIndexerSkillset; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; /** - * Response from a list Skillset request. If successful, it includes the full + * Response from a list skillset request. If successful, it includes the full * definitions of all skillsets. */ @Fluent @@ -21,14 +21,14 @@ public final class ListSkillsetsResult { * The skillsets defined in the Search service. */ @JsonProperty(value = "value", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List skillsets; + private List skillsets; /** * Get the skillsets property: The skillsets defined in the Search service. * * @return the skillsets value. */ - public List getSkillsets() { + public List getSkillsets() { return this.skillsets; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchContinuationToken.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchContinuationToken.java index 51cb26f95767..caf88ef67158 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchContinuationToken.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchContinuationToken.java @@ -5,7 +5,6 @@ import com.azure.core.util.serializer.JacksonAdapter; import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.search.documents.models.SearchRequest; import com.azure.search.documents.util.SearchPagedResponse; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchDocumentsResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchDocumentsResult.java index 43769a6cf8c6..70f04eda4d1a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchDocumentsResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchDocumentsResult.java @@ -8,7 +8,6 @@ import com.azure.core.annotation.Fluent; import com.azure.search.documents.models.FacetResult; -import com.azure.search.documents.models.SearchRequest; import com.azure.search.documents.models.SearchResult; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchRequest.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchRequest.java similarity index 97% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchRequest.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchRequest.java index 2e867ae07982..c83a719db9c8 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchRequest.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SearchRequest.java @@ -4,9 +4,12 @@ // Changes may cause incorrect behavior and will be lost if the code is // regenerated. -package com.azure.search.documents.models; +package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; +import com.azure.search.documents.models.QueryType; +import com.azure.search.documents.models.ScoringParameter; +import com.azure.search.documents.models.SearchMode; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -85,7 +88,7 @@ public final class SearchRequest { /* * A value that specifies the syntax of the search query. The default is * 'simple'. Use 'full' if your query uses the Lucene query syntax. - * Possible values include: 'simple', 'full' + * Possible values include: 'Simple', 'Full' */ @JsonProperty(value = "queryType") private QueryType queryType; @@ -126,7 +129,7 @@ public final class SearchRequest { /* * A value that specifies whether any or all of the search terms must be * matched in order to count the document as a match. Possible values - * include: 'any', 'all' + * include: 'Any', 'All' */ @JsonProperty(value = "searchMode") private SearchMode searchMode; @@ -366,7 +369,7 @@ public SearchRequest setOrderBy(String orderBy) { /** * Get the queryType property: A value that specifies the syntax of the * search query. The default is 'simple'. Use 'full' if your query uses the - * Lucene query syntax. Possible values include: 'simple', 'full'. + * Lucene query syntax. Possible values include: 'Simple', 'Full'. * * @return the queryType value. */ @@ -377,7 +380,7 @@ public QueryType getQueryType() { /** * Set the queryType property: A value that specifies the syntax of the * search query. The default is 'simple'. Use 'full' if your query uses the - * Lucene query syntax. Possible values include: 'simple', 'full'. + * Lucene query syntax. Possible values include: 'Simple', 'Full'. * * @param queryType the queryType value to set. * @return the SearchRequest object itself. @@ -492,7 +495,7 @@ public SearchRequest setSearchFields(String searchFields) { /** * Get the searchMode property: A value that specifies whether any or all * of the search terms must be matched in order to count the document as a - * match. Possible values include: 'any', 'all'. + * match. Possible values include: 'Any', 'All'. * * @return the searchMode value. */ @@ -503,7 +506,7 @@ public SearchMode getSearchMode() { /** * Set the searchMode property: A value that specifies whether any or all * of the search terms must be matched in order to count the document as a - * match. Possible values include: 'any', 'all'. + * match. Possible values include: 'Any', 'All'. * * @param searchMode the searchMode value to set. * @return the SearchRequest object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SuggestRequest.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SuggestRequest.java similarity index 99% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SuggestRequest.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SuggestRequest.java index 6cd06ca5a355..8700337daf8e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SuggestRequest.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/models/SuggestRequest.java @@ -4,7 +4,7 @@ // Changes may cause incorrect behavior and will be lost if the code is // regenerated. -package com.azure.search.documents.models; +package com.azure.search.documents.implementation.models; import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerDeserializer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerDeserializer.java index 425d36e1d98f..b2f1d65f9a4b 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerDeserializer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerDeserializer.java @@ -4,7 +4,6 @@ package com.azure.search.documents.implementation.util; import com.azure.search.documents.models.PatternAnalyzer; -import com.azure.search.documents.models.PatternTokenizer; import com.azure.search.documents.models.RegexFlags; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerSerializer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerSerializer.java index df62df7d614c..f96a5a8b9923 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerSerializer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/implementation/util/CustomPatternAnalyzerSerializer.java @@ -4,7 +4,6 @@ package com.azure.search.documents.implementation.util; import com.azure.search.documents.models.PatternAnalyzer; -import com.azure.search.documents.models.PatternTokenizer; import com.azure.search.documents.models.RegexFlags; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializer; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SearchableFieldProperty.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SearchableFieldProperty.java index 69a5546d1498..8f45e555f276 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SearchableFieldProperty.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SearchableFieldProperty.java @@ -3,7 +3,7 @@ package com.azure.search.documents.indexes; -import com.azure.search.documents.models.AnalyzerName; +import com.azure.search.documents.models.LexicalAnalyzerName; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -55,21 +55,21 @@ /** * Optional arguments defines the name of the analyzer used for the field. * - * @return {@link AnalyzerName} String value. Or default to "null" String type. + * @return {@link LexicalAnalyzerName} String value. Or default to "null" String type. */ String analyzer() default ""; /** * Optional arguments defines the name of the search analyzer used for the field. * - * @return {@link AnalyzerName} String value. Or default to an empty String. + * @return {@link LexicalAnalyzerName} String value. Or default to an empty String. */ String searchAnalyzer() default ""; /** * Optional arguments defines the name of the analyzer used for the field. * - * @return {@link AnalyzerName} String value. Or default to an empty String. + * @return {@link LexicalAnalyzerName} String value. Or default to an empty String. */ String indexAnalyzer() default ""; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SimpleFieldProperty.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SimpleFieldProperty.java index c8d7db1a541a..6c0fe8fdf59e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SimpleFieldProperty.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/SimpleFieldProperty.java @@ -3,7 +3,7 @@ package com.azure.search.documents.indexes; -import com.azure.search.documents.models.Field; +import com.azure.search.documents.models.SearchField; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -12,7 +12,7 @@ /** * The annotation is to indicate whether the field is a simple field. This annotation can only set boolean field of - * {@link Field}. {@code isSearchable} will set to {@code false}. + * {@link SearchField}. {@code isSearchable} will set to {@code false}. */ @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzeRequest.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzeRequest.java index cb93715e2d07..389760003461 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzeRequest.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzeRequest.java @@ -48,7 +48,7 @@ public final class AnalyzeRequest { * 'Simple', 'Stop', 'Whitespace' */ @JsonProperty(value = "analyzer") - private AnalyzerName analyzer; + private LexicalAnalyzerName analyzer; /* * The name of the tokenizer to use to break the given text. If this @@ -60,7 +60,7 @@ public final class AnalyzeRequest { * 'Pattern', 'Standard', 'UaxUrlEmail', 'Whitespace' */ @JsonProperty(value = "tokenizer") - private TokenizerName tokenizer; + private LexicalTokenizerName tokenizer; /* * An optional list of token filters to use when breaking the given text. @@ -124,7 +124,7 @@ public AnalyzeRequest setText(String text) { * * @return the analyzer value. */ - public AnalyzerName getAnalyzer() { + public LexicalAnalyzerName getAnalyzer() { return this.analyzer; } @@ -157,7 +157,7 @@ public AnalyzerName getAnalyzer() { * @param analyzer the analyzer value to set. * @return the AnalyzeRequest object itself. */ - public AnalyzeRequest setAnalyzer(AnalyzerName analyzer) { + public AnalyzeRequest setAnalyzer(LexicalAnalyzerName analyzer) { this.analyzer = analyzer; return this; } @@ -173,7 +173,7 @@ public AnalyzeRequest setAnalyzer(AnalyzerName analyzer) { * * @return the tokenizer value. */ - public TokenizerName getTokenizer() { + public LexicalTokenizerName getTokenizer() { return this.tokenizer; } @@ -189,7 +189,7 @@ public TokenizerName getTokenizer() { * @param tokenizer the tokenizer value to set. * @return the AnalyzeRequest object itself. */ - public AnalyzeRequest setTokenizer(TokenizerName tokenizer) { + public AnalyzeRequest setTokenizer(LexicalTokenizerName tokenizer) { this.tokenizer = tokenizer; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenInfo.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzedTokenInfo.java similarity index 98% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenInfo.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzedTokenInfo.java index 894dc4bd5e6c..66b6343923f1 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenInfo.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzedTokenInfo.java @@ -13,7 +13,7 @@ * Information about a token returned by an analyzer. */ @Fluent -public final class TokenInfo { +public final class AnalyzedTokenInfo { /* * The token returned by the analyzer. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzerName.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzerName.java deleted file mode 100644 index 86a0d0395043..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AnalyzerName.java +++ /dev/null @@ -1,499 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import java.util.Collection; - -/** - * Defines values for AnalyzerName. - */ -public final class AnalyzerName extends ExpandableStringEnum { - /** - * Static value ar.microsoft for AnalyzerName. - */ - public static final AnalyzerName AR_MICROSOFT = fromString("ar.microsoft"); - - /** - * Static value ar.lucene for AnalyzerName. - */ - public static final AnalyzerName AR_LUCENE = fromString("ar.lucene"); - - /** - * Static value hy.lucene for AnalyzerName. - */ - public static final AnalyzerName HY_LUCENE = fromString("hy.lucene"); - - /** - * Static value bn.microsoft for AnalyzerName. - */ - public static final AnalyzerName BN_MICROSOFT = fromString("bn.microsoft"); - - /** - * Static value eu.lucene for AnalyzerName. - */ - public static final AnalyzerName EU_LUCENE = fromString("eu.lucene"); - - /** - * Static value bg.microsoft for AnalyzerName. - */ - public static final AnalyzerName BG_MICROSOFT = fromString("bg.microsoft"); - - /** - * Static value bg.lucene for AnalyzerName. - */ - public static final AnalyzerName BG_LUCENE = fromString("bg.lucene"); - - /** - * Static value ca.microsoft for AnalyzerName. - */ - public static final AnalyzerName CA_MICROSOFT = fromString("ca.microsoft"); - - /** - * Static value ca.lucene for AnalyzerName. - */ - public static final AnalyzerName CA_LUCENE = fromString("ca.lucene"); - - /** - * Static value zh-Hans.microsoft for AnalyzerName. - */ - public static final AnalyzerName ZH_HANS_MICROSOFT = fromString("zh-Hans.microsoft"); - - /** - * Static value zh-Hans.lucene for AnalyzerName. - */ - public static final AnalyzerName ZH_HANS_LUCENE = fromString("zh-Hans.lucene"); - - /** - * Static value zh-Hant.microsoft for AnalyzerName. - */ - public static final AnalyzerName ZH_HANT_MICROSOFT = fromString("zh-Hant.microsoft"); - - /** - * Static value zh-Hant.lucene for AnalyzerName. - */ - public static final AnalyzerName ZH_HANT_LUCENE = fromString("zh-Hant.lucene"); - - /** - * Static value hr.microsoft for AnalyzerName. - */ - public static final AnalyzerName HR_MICROSOFT = fromString("hr.microsoft"); - - /** - * Static value cs.microsoft for AnalyzerName. - */ - public static final AnalyzerName CS_MICROSOFT = fromString("cs.microsoft"); - - /** - * Static value cs.lucene for AnalyzerName. - */ - public static final AnalyzerName CS_LUCENE = fromString("cs.lucene"); - - /** - * Static value da.microsoft for AnalyzerName. - */ - public static final AnalyzerName DA_MICROSOFT = fromString("da.microsoft"); - - /** - * Static value da.lucene for AnalyzerName. - */ - public static final AnalyzerName DA_LUCENE = fromString("da.lucene"); - - /** - * Static value nl.microsoft for AnalyzerName. - */ - public static final AnalyzerName NL_MICROSOFT = fromString("nl.microsoft"); - - /** - * Static value nl.lucene for AnalyzerName. - */ - public static final AnalyzerName NL_LUCENE = fromString("nl.lucene"); - - /** - * Static value en.microsoft for AnalyzerName. - */ - public static final AnalyzerName EN_MICROSOFT = fromString("en.microsoft"); - - /** - * Static value en.lucene for AnalyzerName. - */ - public static final AnalyzerName EN_LUCENE = fromString("en.lucene"); - - /** - * Static value et.microsoft for AnalyzerName. - */ - public static final AnalyzerName ET_MICROSOFT = fromString("et.microsoft"); - - /** - * Static value fi.microsoft for AnalyzerName. - */ - public static final AnalyzerName FI_MICROSOFT = fromString("fi.microsoft"); - - /** - * Static value fi.lucene for AnalyzerName. - */ - public static final AnalyzerName FI_LUCENE = fromString("fi.lucene"); - - /** - * Static value fr.microsoft for AnalyzerName. - */ - public static final AnalyzerName FR_MICROSOFT = fromString("fr.microsoft"); - - /** - * Static value fr.lucene for AnalyzerName. - */ - public static final AnalyzerName FR_LUCENE = fromString("fr.lucene"); - - /** - * Static value gl.lucene for AnalyzerName. - */ - public static final AnalyzerName GL_LUCENE = fromString("gl.lucene"); - - /** - * Static value de.microsoft for AnalyzerName. - */ - public static final AnalyzerName DE_MICROSOFT = fromString("de.microsoft"); - - /** - * Static value de.lucene for AnalyzerName. - */ - public static final AnalyzerName DE_LUCENE = fromString("de.lucene"); - - /** - * Static value el.microsoft for AnalyzerName. - */ - public static final AnalyzerName EL_MICROSOFT = fromString("el.microsoft"); - - /** - * Static value el.lucene for AnalyzerName. - */ - public static final AnalyzerName EL_LUCENE = fromString("el.lucene"); - - /** - * Static value gu.microsoft for AnalyzerName. - */ - public static final AnalyzerName GU_MICROSOFT = fromString("gu.microsoft"); - - /** - * Static value he.microsoft for AnalyzerName. - */ - public static final AnalyzerName HE_MICROSOFT = fromString("he.microsoft"); - - /** - * Static value hi.microsoft for AnalyzerName. - */ - public static final AnalyzerName HI_MICROSOFT = fromString("hi.microsoft"); - - /** - * Static value hi.lucene for AnalyzerName. - */ - public static final AnalyzerName HI_LUCENE = fromString("hi.lucene"); - - /** - * Static value hu.microsoft for AnalyzerName. - */ - public static final AnalyzerName HU_MICROSOFT = fromString("hu.microsoft"); - - /** - * Static value hu.lucene for AnalyzerName. - */ - public static final AnalyzerName HU_LUCENE = fromString("hu.lucene"); - - /** - * Static value is.microsoft for AnalyzerName. - */ - public static final AnalyzerName IS_MICROSOFT = fromString("is.microsoft"); - - /** - * Static value id.microsoft for AnalyzerName. - */ - public static final AnalyzerName ID_MICROSOFT = fromString("id.microsoft"); - - /** - * Static value id.lucene for AnalyzerName. - */ - public static final AnalyzerName ID_LUCENE = fromString("id.lucene"); - - /** - * Static value ga.lucene for AnalyzerName. - */ - public static final AnalyzerName GA_LUCENE = fromString("ga.lucene"); - - /** - * Static value it.microsoft for AnalyzerName. - */ - public static final AnalyzerName IT_MICROSOFT = fromString("it.microsoft"); - - /** - * Static value it.lucene for AnalyzerName. - */ - public static final AnalyzerName IT_LUCENE = fromString("it.lucene"); - - /** - * Static value ja.microsoft for AnalyzerName. - */ - public static final AnalyzerName JA_MICROSOFT = fromString("ja.microsoft"); - - /** - * Static value ja.lucene for AnalyzerName. - */ - public static final AnalyzerName JA_LUCENE = fromString("ja.lucene"); - - /** - * Static value kn.microsoft for AnalyzerName. - */ - public static final AnalyzerName KN_MICROSOFT = fromString("kn.microsoft"); - - /** - * Static value ko.microsoft for AnalyzerName. - */ - public static final AnalyzerName KO_MICROSOFT = fromString("ko.microsoft"); - - /** - * Static value ko.lucene for AnalyzerName. - */ - public static final AnalyzerName KO_LUCENE = fromString("ko.lucene"); - - /** - * Static value lv.microsoft for AnalyzerName. - */ - public static final AnalyzerName LV_MICROSOFT = fromString("lv.microsoft"); - - /** - * Static value lv.lucene for AnalyzerName. - */ - public static final AnalyzerName LV_LUCENE = fromString("lv.lucene"); - - /** - * Static value lt.microsoft for AnalyzerName. - */ - public static final AnalyzerName LT_MICROSOFT = fromString("lt.microsoft"); - - /** - * Static value ml.microsoft for AnalyzerName. - */ - public static final AnalyzerName ML_MICROSOFT = fromString("ml.microsoft"); - - /** - * Static value ms.microsoft for AnalyzerName. - */ - public static final AnalyzerName MS_MICROSOFT = fromString("ms.microsoft"); - - /** - * Static value mr.microsoft for AnalyzerName. - */ - public static final AnalyzerName MR_MICROSOFT = fromString("mr.microsoft"); - - /** - * Static value nb.microsoft for AnalyzerName. - */ - public static final AnalyzerName NB_MICROSOFT = fromString("nb.microsoft"); - - /** - * Static value no.lucene for AnalyzerName. - */ - public static final AnalyzerName NO_LUCENE = fromString("no.lucene"); - - /** - * Static value fa.lucene for AnalyzerName. - */ - public static final AnalyzerName FA_LUCENE = fromString("fa.lucene"); - - /** - * Static value pl.microsoft for AnalyzerName. - */ - public static final AnalyzerName PL_MICROSOFT = fromString("pl.microsoft"); - - /** - * Static value pl.lucene for AnalyzerName. - */ - public static final AnalyzerName PL_LUCENE = fromString("pl.lucene"); - - /** - * Static value pt-BR.microsoft for AnalyzerName. - */ - public static final AnalyzerName PT_BR_MICROSOFT = fromString("pt-BR.microsoft"); - - /** - * Static value pt-BR.lucene for AnalyzerName. - */ - public static final AnalyzerName PT_BR_LUCENE = fromString("pt-BR.lucene"); - - /** - * Static value pt-PT.microsoft for AnalyzerName. - */ - public static final AnalyzerName PT_PT_MICROSOFT = fromString("pt-PT.microsoft"); - - /** - * Static value pt-PT.lucene for AnalyzerName. - */ - public static final AnalyzerName PT_PT_LUCENE = fromString("pt-PT.lucene"); - - /** - * Static value pa.microsoft for AnalyzerName. - */ - public static final AnalyzerName PA_MICROSOFT = fromString("pa.microsoft"); - - /** - * Static value ro.microsoft for AnalyzerName. - */ - public static final AnalyzerName RO_MICROSOFT = fromString("ro.microsoft"); - - /** - * Static value ro.lucene for AnalyzerName. - */ - public static final AnalyzerName RO_LUCENE = fromString("ro.lucene"); - - /** - * Static value ru.microsoft for AnalyzerName. - */ - public static final AnalyzerName RU_MICROSOFT = fromString("ru.microsoft"); - - /** - * Static value ru.lucene for AnalyzerName. - */ - public static final AnalyzerName RU_LUCENE = fromString("ru.lucene"); - - /** - * Static value sr-cyrillic.microsoft for AnalyzerName. - */ - public static final AnalyzerName SR_CYRILLIC_MICROSOFT = fromString("sr-cyrillic.microsoft"); - - /** - * Static value sr-latin.microsoft for AnalyzerName. - */ - public static final AnalyzerName SR_LATIN_MICROSOFT = fromString("sr-latin.microsoft"); - - /** - * Static value sk.microsoft for AnalyzerName. - */ - public static final AnalyzerName SK_MICROSOFT = fromString("sk.microsoft"); - - /** - * Static value sl.microsoft for AnalyzerName. - */ - public static final AnalyzerName SL_MICROSOFT = fromString("sl.microsoft"); - - /** - * Static value es.microsoft for AnalyzerName. - */ - public static final AnalyzerName ES_MICROSOFT = fromString("es.microsoft"); - - /** - * Static value es.lucene for AnalyzerName. - */ - public static final AnalyzerName ES_LUCENE = fromString("es.lucene"); - - /** - * Static value sv.microsoft for AnalyzerName. - */ - public static final AnalyzerName SV_MICROSOFT = fromString("sv.microsoft"); - - /** - * Static value sv.lucene for AnalyzerName. - */ - public static final AnalyzerName SV_LUCENE = fromString("sv.lucene"); - - /** - * Static value ta.microsoft for AnalyzerName. - */ - public static final AnalyzerName TA_MICROSOFT = fromString("ta.microsoft"); - - /** - * Static value te.microsoft for AnalyzerName. - */ - public static final AnalyzerName TE_MICROSOFT = fromString("te.microsoft"); - - /** - * Static value th.microsoft for AnalyzerName. - */ - public static final AnalyzerName TH_MICROSOFT = fromString("th.microsoft"); - - /** - * Static value th.lucene for AnalyzerName. - */ - public static final AnalyzerName TH_LUCENE = fromString("th.lucene"); - - /** - * Static value tr.microsoft for AnalyzerName. - */ - public static final AnalyzerName TR_MICROSOFT = fromString("tr.microsoft"); - - /** - * Static value tr.lucene for AnalyzerName. - */ - public static final AnalyzerName TR_LUCENE = fromString("tr.lucene"); - - /** - * Static value uk.microsoft for AnalyzerName. - */ - public static final AnalyzerName UK_MICROSOFT = fromString("uk.microsoft"); - - /** - * Static value ur.microsoft for AnalyzerName. - */ - public static final AnalyzerName UR_MICROSOFT = fromString("ur.microsoft"); - - /** - * Static value vi.microsoft for AnalyzerName. - */ - public static final AnalyzerName VI_MICROSOFT = fromString("vi.microsoft"); - - /** - * Static value standard.lucene for AnalyzerName. - */ - public static final AnalyzerName STANDARD_LUCENE = fromString("standard.lucene"); - - /** - * Static value standardasciifolding.lucene for AnalyzerName. - */ - public static final AnalyzerName STANDARD_ASCII_FOLDING_LUCENE = fromString("standardasciifolding.lucene"); - - /** - * Static value keyword for AnalyzerName. - */ - public static final AnalyzerName KEYWORD = fromString("keyword"); - - /** - * Static value pattern for AnalyzerName. - */ - public static final AnalyzerName PATTERN = fromString("pattern"); - - /** - * Static value simple for AnalyzerName. - */ - public static final AnalyzerName SIMPLE = fromString("simple"); - - /** - * Static value stop for AnalyzerName. - */ - public static final AnalyzerName STOP = fromString("stop"); - - /** - * Static value whitespace for AnalyzerName. - */ - public static final AnalyzerName WHITESPACE = fromString("whitespace"); - - /** - * Creates or finds a AnalyzerName from its string representation. - * - * @param name a name to look for. - * @return the corresponding AnalyzerName. - */ - @JsonCreator - public static AnalyzerName fromString(String name) { - return fromString(name, AnalyzerName.class); - } - - /** - * @return known AnalyzerName values. - */ - public static Collection values() { - return values(AnalyzerName.class); - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteOptions.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteOptions.java index b4ce9934efa0..553e8ccfdf36 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteOptions.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/AutocompleteOptions.java @@ -20,7 +20,7 @@ public final class AutocompleteOptions { * Specifies the mode for Autocomplete. The default is 'oneTerm'. Use * 'twoTerms' to get shingles and 'oneTermWithContext' to use the current * context while producing auto-completed terms. Possible values include: - * 'oneTerm', 'twoTerms', 'oneTermWithContext' + * 'OneTerm', 'TwoTerms', 'OneTermWithContext' */ @JsonProperty(value = "") private AutocompleteMode autocompleteMode; @@ -84,8 +84,8 @@ public final class AutocompleteOptions { * Get the autocompleteMode property: Specifies the mode for Autocomplete. * The default is 'oneTerm'. Use 'twoTerms' to get shingles and * 'oneTermWithContext' to use the current context while producing - * auto-completed terms. Possible values include: 'oneTerm', 'twoTerms', - * 'oneTermWithContext'. + * auto-completed terms. Possible values include: 'OneTerm', 'TwoTerms', + * 'OneTermWithContext'. * * @return the autocompleteMode value. */ @@ -97,8 +97,8 @@ public AutocompleteMode getAutocompleteMode() { * Set the autocompleteMode property: Specifies the mode for Autocomplete. * The default is 'oneTerm'. Use 'twoTerms' to get shingles and * 'oneTermWithContext' to use the current context while producing - * auto-completed terms. Possible values include: 'oneTerm', 'twoTerms', - * 'oneTermWithContext'. + * auto-completed terms. Possible values include: 'OneTerm', 'TwoTerms', + * 'OneTermWithContext'. * * @param autocompleteMode the autocompleteMode value to set. * @return the AutocompleteOptions object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CjkBigramTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CjkBigramTokenFilter.java index d508794c66df..6bc03a81f7a4 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CjkBigramTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CjkBigramTokenFilter.java @@ -13,8 +13,8 @@ import java.util.List; /** - * Forms bigrams of CJK terms that are generated from StandardTokenizer. This - * token filter is implemented using Apache Lucene. + * Forms bigrams of CJK terms that are generated from the standard tokenizer. + * This token filter is implemented using Apache Lucene. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.CjkBigramTokenFilter") diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ClassicTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ClassicTokenizer.java index 87e6af4cce3f..16e2e6122de3 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ClassicTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ClassicTokenizer.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.ClassicTokenizer") @Fluent -public final class ClassicTokenizer extends Tokenizer { +public final class ClassicTokenizer extends LexicalTokenizer { /* * The maximum token length. Default is 255. Tokens longer than the maximum * length are split. The maximum token length that can be used is 300 diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexField.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexSearchField.java similarity index 54% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexField.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexSearchField.java index 1c9297785a17..52115495b851 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexField.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ComplexSearchField.java @@ -6,49 +6,50 @@ import java.util.List; /** - * A helper Field model to build a complex field which uses {@code DataType.EDM_COMPLEX_TYPE} or collection of - * {@code DataType.EDM_COMPLEX_TYPE}. + * A helper Field model to build a complex field which uses {@code SearchFieldDataType.EDM_COMPLEX_TYPE} or collection of + * {@code SearchFieldDataType.EDM_COMPLEX_TYPE}. */ -public class ComplexField extends FieldBase { - private List fields; +public class ComplexSearchField extends SearchFieldBase { + private List fields; /** - * Initializes a new instance of the {@link ComplexField} class. + * Initializes a new instance of the {@link ComplexSearchField} class. * * @param name The name of the field, which must be unique within the index or parent field. * @param collection Whether the field is a collection of strings. */ - public ComplexField(String name, boolean collection) { - super(name, collection ? DataType.collection(DataType.EDM_COMPLEX_TYPE) : DataType.EDM_COMPLEX_TYPE); + public ComplexSearchField(String name, boolean collection) { + super(name, collection ? SearchFieldDataType.collection(SearchFieldDataType.COMPLEX) : + SearchFieldDataType.COMPLEX); } /** - * Gets a collection of {@link SimpleField} or {@link ComplexField} child fields. + * Gets a collection of {@link SimpleField} or {@link ComplexSearchField} child fields. * * @return The list of sub-fields. */ - public List getFields() { + public List getFields() { return fields; } /** - * Sets a collection of {@link SimpleField} or {@link ComplexField} child fields. + * Sets a collection of {@link SimpleField} or {@link ComplexSearchField} child fields. * * @param fields The list of sub-fields. - * @return The {@link ComplexField} object itself. + * @return The {@link ComplexSearchField} object itself. */ - public ComplexField setFields(List fields) { + public ComplexSearchField setFields(List fields) { this.fields = fields; return this; } /** - * Convert ComplexField to {@link Field}. + * Convert ComplexField to {@link SearchField}. * - * @return The {@link Field} object. + * @return The {@link SearchField} object. */ - public Field build() { - return new Field().setName(super.getName()) + public SearchField build() { + return new SearchField().setName(super.getName()) .setType(super.getDataType()) .setFields(fields) .setKey(false) diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ConditionalSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ConditionalSkill.java index 3aa356d043d2..626f5db67744 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ConditionalSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ConditionalSkill.java @@ -17,5 +17,5 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Util.ConditionalSkill") @Fluent -public final class ConditionalSkill extends Skill { +public final class ConditionalSkill extends SearchIndexerSkill { } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CustomAnalyzer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CustomAnalyzer.java index 5e86b5b2d978..306b99b27b31 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CustomAnalyzer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/CustomAnalyzer.java @@ -22,7 +22,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.CustomAnalyzer") @Fluent -public final class CustomAnalyzer extends Analyzer { +public final class CustomAnalyzer extends LexicalAnalyzer { /* * The name of the tokenizer to use to divide continuous text into a * sequence of tokens, such as breaking a sentence into words. Possible @@ -32,7 +32,7 @@ public final class CustomAnalyzer extends Analyzer { * 'Pattern', 'Standard', 'UaxUrlEmail', 'Whitespace' */ @JsonProperty(value = "tokenizer", required = true) - private TokenizerName tokenizer; + private LexicalTokenizerName tokenizer; /* * A list of token filters used to filter out or modify the tokens @@ -62,7 +62,7 @@ public final class CustomAnalyzer extends Analyzer { * * @return the tokenizer value. */ - public TokenizerName getTokenizer() { + public LexicalTokenizerName getTokenizer() { return this.tokenizer; } @@ -77,7 +77,7 @@ public TokenizerName getTokenizer() { * @param tokenizer the tokenizer value to set. * @return the CustomAnalyzer object itself. */ - public CustomAnalyzer setTokenizer(TokenizerName tokenizer) { + public CustomAnalyzer setTokenizer(LexicalTokenizerName tokenizer) { this.tokenizer = tokenizer; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSourceType.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSourceType.java deleted file mode 100644 index e7ac0a12c21b..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSourceType.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import java.util.Collection; - -/** - * Defines values for DataSourceType. - */ -public final class DataSourceType extends ExpandableStringEnum { - /** - * Static value azuresql for DataSourceType. - */ - public static final DataSourceType AZURE_SQL = fromString("azuresql"); - - /** - * Static value cosmosdb for DataSourceType. - */ - public static final DataSourceType COSMOS = fromString("cosmosdb"); - - /** - * Static value azureblob for DataSourceType. - */ - public static final DataSourceType AZURE_BLOB = fromString("azureblob"); - - /** - * Static value azuretable for DataSourceType. - */ - public static final DataSourceType AZURE_TABLE = fromString("azuretable"); - - /** - * Static value mysql for DataSourceType. - */ - public static final DataSourceType MY_SQL = fromString("mysql"); - - /** - * Creates or finds a DataSourceType from its string representation. - * - * @param name a name to look for. - * @return the corresponding DataSourceType. - */ - @JsonCreator - public static DataSourceType fromString(String name) { - return fromString(name, DataSourceType.class); - } - - /** - * @return known DataSourceType values. - */ - public static Collection values() { - return values(DataSourceType.class); - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataType.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataType.java deleted file mode 100644 index 7c8025befd3c..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataType.java +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import java.util.Collection; - -/** - * Defines values for DataType. - */ -public final class DataType extends ExpandableStringEnum { - /** - * Static value Edm.String for DataType. - */ - public static final DataType EDM_STRING = fromString("Edm.String"); - - /** - * Static value Edm.Int32 for DataType. - */ - public static final DataType EDM_INT32 = fromString("Edm.Int32"); - - /** - * Static value Edm.Int64 for DataType. - */ - public static final DataType EDM_INT64 = fromString("Edm.Int64"); - - /** - * Static value Edm.Double for DataType. - */ - public static final DataType EDM_DOUBLE = fromString("Edm.Double"); - - /** - * Static value Edm.Boolean for DataType. - */ - public static final DataType EDM_BOOLEAN = fromString("Edm.Boolean"); - - /** - * Static value Edm.DateTimeOffset for DataType. - */ - public static final DataType EDM_DATE_TIME_OFFSET = fromString("Edm.DateTimeOffset"); - - /** - * Static value Edm.GeographyPoint for DataType. - */ - public static final DataType EDM_GEOGRAPHY_POINT = fromString("Edm.GeographyPoint"); - - /** - * Static value Edm.ComplexType for DataType. - */ - public static final DataType EDM_COMPLEX_TYPE = fromString("Edm.ComplexType"); - - /** - * Returns a collection of a specific DataType - * @param dataType the corresponding DataType - * @return a Collection of the corresponding DataType - */ - @JsonCreator - public static DataType collection(DataType dataType) { - return fromString(String.format("Collection(%s)", dataType.toString())); - } - - /** - * Creates or finds a DataType from its string representation. - * - * @param name a name to look for. - * @return the corresponding DataType. - */ - @JsonCreator - public static DataType fromString(String name) { - return fromString(name, DataType.class); - } - - /** - * @return known DataType values. - */ - public static Collection values() { - return values(DataType.class); - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilter.java index 28b345acea73..4d95601707ea 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilter.java @@ -34,7 +34,7 @@ public final class EdgeNGramTokenFilter extends TokenFilter { /* * Specifies which side of the input the n-gram should be generated from. - * Default is "front". Possible values include: 'front', 'back' + * Default is "front". Possible values include: 'Front', 'Back' */ @JsonProperty(value = "side") private EdgeNGramTokenFilterSide side; @@ -84,7 +84,7 @@ public EdgeNGramTokenFilter setMaxGram(Integer maxGram) { /** * Get the side property: Specifies which side of the input the n-gram * should be generated from. Default is "front". Possible values include: - * 'front', 'back'. + * 'Front', 'Back'. * * @return the side value. */ @@ -95,7 +95,7 @@ public EdgeNGramTokenFilterSide getSide() { /** * Set the side property: Specifies which side of the input the n-gram * should be generated from. Default is "front". Possible values include: - * 'front', 'back'. + * 'Front', 'Back'. * * @param side the side value to set. * @return the EdgeNGramTokenFilter object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilterV2.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilterV2.java index 4a712897c01b..9e851770f264 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilterV2.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenFilterV2.java @@ -34,7 +34,7 @@ public final class EdgeNGramTokenFilterV2 extends TokenFilter { /* * Specifies which side of the input the n-gram should be generated from. - * Default is "front". Possible values include: 'front', 'back' + * Default is "front". Possible values include: 'Front', 'Back' */ @JsonProperty(value = "side") private EdgeNGramTokenFilterSide side; @@ -86,7 +86,7 @@ public EdgeNGramTokenFilterV2 setMaxGram(Integer maxGram) { /** * Get the side property: Specifies which side of the input the n-gram * should be generated from. Default is "front". Possible values include: - * 'front', 'back'. + * 'Front', 'Back'. * * @return the side value. */ @@ -97,7 +97,7 @@ public EdgeNGramTokenFilterSide getSide() { /** * Set the side property: Specifies which side of the input the n-gram * should be generated from. Default is "front". Possible values include: - * 'front', 'back'. + * 'Front', 'Back'. * * @param side the side value to set. * @return the EdgeNGramTokenFilterV2 object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenizer.java index 0914f5b93e21..ed7a07a02142 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EdgeNGramTokenizer.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.EdgeNGramTokenizer") @Fluent -public final class EdgeNGramTokenizer extends Tokenizer { +public final class EdgeNGramTokenizer extends LexicalTokenizer { /* * The minimum n-gram length. Default is 1. Maximum is 300. Must be less * than the value of maxGram. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EntityRecognitionSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EntityRecognitionSkill.java index 5c6629f6e344..524a6818bc08 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EntityRecognitionSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EntityRecognitionSkill.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.EntityRecognitionSkill") @Fluent -public final class EntityRecognitionSkill extends Skill { +public final class EntityRecognitionSkill extends SearchIndexerSkill { /* * A list of entity categories that should be extracted. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ImageAnalysisSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ImageAnalysisSkill.java index 4420e786e07d..40efce99380b 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ImageAnalysisSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ImageAnalysisSkill.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Vision.ImageAnalysisSkill") @Fluent -public final class ImageAnalysisSkill extends Skill { +public final class ImageAnalysisSkill extends SearchIndexerSkill { /* * A value indicating which language code to use. Default is en. Possible * values include: 'en', 'es', 'ja', 'pt', 'zh' diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexAction.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexAction.java index 3e393b683158..92ab65aafb13 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexAction.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexAction.java @@ -35,7 +35,7 @@ public Map getParamMap() { /* * The operation to perform on a document in an indexing batch. Possible - * values include: 'upload', 'merge', 'mergeOrUpload', 'delete' + * values include: 'Upload', 'Merge', 'MergeOrUpload', 'Delete' */ @JsonProperty(value = "@search.action") private IndexActionType actionType; @@ -69,8 +69,8 @@ public IndexAction setDocument(T document) { /** * Get the actionType property: The operation to perform on a document in - * an indexing batch. Possible values include: 'upload', 'merge', - * 'mergeOrUpload', 'delete'. + * an indexing batch. Possible values include: 'Upload', 'Merge', + * 'MergeOrUpload', 'Delete'. * * @return the actionType value. */ @@ -80,8 +80,8 @@ public IndexActionType getActionType() { /** * Set the actionType property: The operation to perform on a document in - * an indexing batch. Possible values include: 'upload', 'merge', - * 'mergeOrUpload', 'delete'. + * an indexing batch. Possible values include: 'Upload', 'Merge', + * 'MergeOrUpload', 'Delete'. * * @param actionType the actionType value to set. * @return the IndexAction object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionResult.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionResult.java index 18de01579f12..2091822bfcaf 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionResult.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionResult.java @@ -18,7 +18,7 @@ public final class IndexerExecutionResult { /* * The outcome of this indexer execution. Possible values include: - * 'transientFailure', 'success', 'inProgress', 'reset' + * 'TransientFailure', 'Success', 'InProgress', 'Reset' */ @JsonProperty(value = "status", required = true, access = JsonProperty.Access.WRITE_ONLY) private IndexerExecutionStatus status; @@ -46,13 +46,13 @@ public final class IndexerExecutionResult { * The item-level indexing errors. */ @JsonProperty(value = "errors", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List errors; + private List errors; /* * The item-level indexing warnings. */ @JsonProperty(value = "warnings", required = true, access = JsonProperty.Access.WRITE_ONLY) - private List warnings; + private List warnings; /* * The number of items that were processed during this indexer execution. @@ -83,7 +83,7 @@ public final class IndexerExecutionResult { /** * Get the status property: The outcome of this indexer execution. Possible - * values include: 'transientFailure', 'success', 'inProgress', 'reset'. + * values include: 'TransientFailure', 'Success', 'InProgress', 'Reset'. * * @return the status value. */ @@ -125,7 +125,7 @@ public OffsetDateTime getEndTime() { * * @return the errors value. */ - public List getErrors() { + public List getErrors() { return this.errors; } @@ -134,7 +134,7 @@ public List getErrors() { * * @return the warnings value. */ - public List getWarnings() { + public List getWarnings() { return this.warnings; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeyPhraseExtractionSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeyPhraseExtractionSkill.java index 9594ecaa8105..282e7720cecd 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeyPhraseExtractionSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeyPhraseExtractionSkill.java @@ -17,7 +17,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.KeyPhraseExtractionSkill") @Fluent -public final class KeyPhraseExtractionSkill extends Skill { +public final class KeyPhraseExtractionSkill extends SearchIndexerSkill { /* * A value indicating which language code to use. Default is en. Possible * values include: 'da', 'nl', 'en', 'fi', 'fr', 'de', 'it', 'ja', 'ko', diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizer.java index a465ad94f154..6fc939f54759 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizer.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.KeywordTokenizer") @Fluent -public final class KeywordTokenizer extends Tokenizer { +public final class KeywordTokenizer extends LexicalTokenizer { /* * The read buffer size in bytes. Default is 256. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizerV2.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizerV2.java index c4d1fbea12cf..e0f3c1c4dd68 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizerV2.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/KeywordTokenizerV2.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.KeywordTokenizerV2") @Fluent -public final class KeywordTokenizerV2 extends Tokenizer { +public final class KeywordTokenizerV2 extends LexicalTokenizer { /* * The maximum token length. Default is 256. Tokens longer than the maximum * length are split. The maximum token length that can be used is 300 diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LanguageDetectionSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LanguageDetectionSkill.java index 92088f2d351a..3ae754b54a95 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LanguageDetectionSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LanguageDetectionSkill.java @@ -18,5 +18,5 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.LanguageDetectionSkill") @Fluent -public final class LanguageDetectionSkill extends Skill { +public final class LanguageDetectionSkill extends SearchIndexerSkill { } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LengthTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LengthTokenFilter.java index c7c8e809df8d..4af9c781e2ab 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LengthTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LengthTokenFilter.java @@ -24,55 +24,55 @@ public final class LengthTokenFilter extends TokenFilter { * less than the value of max. */ @JsonProperty(value = "min") - private Integer min; + private Integer minLength; /* * The maximum length in characters. Default and maximum is 300. */ @JsonProperty(value = "max") - private Integer max; + private Integer maxLength; /** - * Get the min property: The minimum length in characters. Default is 0. - * Maximum is 300. Must be less than the value of max. + * Get the minLength property: The minimum length in characters. Default is + * 0. Maximum is 300. Must be less than the value of max. * - * @return the min value. + * @return the minLength value. */ - public Integer getMin() { - return this.min; + public Integer getMinLength() { + return this.minLength; } /** - * Set the min property: The minimum length in characters. Default is 0. - * Maximum is 300. Must be less than the value of max. + * Set the minLength property: The minimum length in characters. Default is + * 0. Maximum is 300. Must be less than the value of max. * - * @param min the min value to set. + * @param minLength the minLength value to set. * @return the LengthTokenFilter object itself. */ - public LengthTokenFilter setMin(Integer min) { - this.min = min; + public LengthTokenFilter setMinLength(Integer minLength) { + this.minLength = minLength; return this; } /** - * Get the max property: The maximum length in characters. Default and - * maximum is 300. + * Get the maxLength property: The maximum length in characters. Default + * and maximum is 300. * - * @return the max value. + * @return the maxLength value. */ - public Integer getMax() { - return this.max; + public Integer getMaxLength() { + return this.maxLength; } /** - * Set the max property: The maximum length in characters. Default and - * maximum is 300. + * Set the maxLength property: The maximum length in characters. Default + * and maximum is 300. * - * @param max the max value to set. + * @param maxLength the maxLength value to set. * @return the LengthTokenFilter object itself. */ - public LengthTokenFilter setMax(Integer max) { - this.max = max; + public LengthTokenFilter setMaxLength(Integer maxLength) { + this.maxLength = maxLength; return this; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Analyzer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzer.java similarity index 87% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Analyzer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzer.java index b502dbd8ac39..d9317cd74cc1 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Analyzer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzer.java @@ -15,16 +15,16 @@ /** * Base type for analyzers. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = Analyzer.class) -@JsonTypeName("Analyzer") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = LexicalAnalyzer.class) +@JsonTypeName("LexicalAnalyzer") @JsonSubTypes({ @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.CustomAnalyzer", value = CustomAnalyzer.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.PatternAnalyzer", value = PatternAnalyzer.class), - @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardAnalyzer", value = StandardAnalyzer.class), + @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardAnalyzer", value = LuceneStandardAnalyzer.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StopAnalyzer", value = StopAnalyzer.class) }) @Fluent -public abstract class Analyzer { +public class LexicalAnalyzer { /* * The name of the analyzer. It must only contain letters, digits, spaces, * dashes or underscores, can only start and end with alphanumeric @@ -50,9 +50,9 @@ public String getName() { * with alphanumeric characters, and is limited to 128 characters. * * @param name the name value to set. - * @return the Analyzer object itself. + * @return the LexicalAnalyzer object itself. */ - public Analyzer setName(String name) { + public LexicalAnalyzer setName(String name) { this.name = name; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzerName.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzerName.java new file mode 100644 index 000000000000..eda2a1cb386a --- /dev/null +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalAnalyzerName.java @@ -0,0 +1,499 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. + +package com.azure.search.documents.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** + * Defines values for LexicalAnalyzerName. + */ +public final class LexicalAnalyzerName extends ExpandableStringEnum { + /** + * Static value ar.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName AR_MICROSOFT = fromString("ar.microsoft"); + + /** + * Static value ar.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName AR_LUCENE = fromString("ar.lucene"); + + /** + * Static value hy.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HY_LUCENE = fromString("hy.lucene"); + + /** + * Static value bn.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName BN_MICROSOFT = fromString("bn.microsoft"); + + /** + * Static value eu.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName EU_LUCENE = fromString("eu.lucene"); + + /** + * Static value bg.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName BG_MICROSOFT = fromString("bg.microsoft"); + + /** + * Static value bg.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName BG_LUCENE = fromString("bg.lucene"); + + /** + * Static value ca.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName CA_MICROSOFT = fromString("ca.microsoft"); + + /** + * Static value ca.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName CA_LUCENE = fromString("ca.lucene"); + + /** + * Static value zh-Hans.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ZH_HANS_MICROSOFT = fromString("zh-Hans.microsoft"); + + /** + * Static value zh-Hans.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ZH_HANS_LUCENE = fromString("zh-Hans.lucene"); + + /** + * Static value zh-Hant.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ZH_HANT_MICROSOFT = fromString("zh-Hant.microsoft"); + + /** + * Static value zh-Hant.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ZH_HANT_LUCENE = fromString("zh-Hant.lucene"); + + /** + * Static value hr.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HR_MICROSOFT = fromString("hr.microsoft"); + + /** + * Static value cs.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName CS_MICROSOFT = fromString("cs.microsoft"); + + /** + * Static value cs.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName CS_LUCENE = fromString("cs.lucene"); + + /** + * Static value da.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName DA_MICROSOFT = fromString("da.microsoft"); + + /** + * Static value da.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName DA_LUCENE = fromString("da.lucene"); + + /** + * Static value nl.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName NL_MICROSOFT = fromString("nl.microsoft"); + + /** + * Static value nl.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName NL_LUCENE = fromString("nl.lucene"); + + /** + * Static value en.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName EN_MICROSOFT = fromString("en.microsoft"); + + /** + * Static value en.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName EN_LUCENE = fromString("en.lucene"); + + /** + * Static value et.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ET_MICROSOFT = fromString("et.microsoft"); + + /** + * Static value fi.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName FI_MICROSOFT = fromString("fi.microsoft"); + + /** + * Static value fi.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName FI_LUCENE = fromString("fi.lucene"); + + /** + * Static value fr.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName FR_MICROSOFT = fromString("fr.microsoft"); + + /** + * Static value fr.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName FR_LUCENE = fromString("fr.lucene"); + + /** + * Static value gl.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName GL_LUCENE = fromString("gl.lucene"); + + /** + * Static value de.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName DE_MICROSOFT = fromString("de.microsoft"); + + /** + * Static value de.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName DE_LUCENE = fromString("de.lucene"); + + /** + * Static value el.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName EL_MICROSOFT = fromString("el.microsoft"); + + /** + * Static value el.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName EL_LUCENE = fromString("el.lucene"); + + /** + * Static value gu.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName GU_MICROSOFT = fromString("gu.microsoft"); + + /** + * Static value he.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HE_MICROSOFT = fromString("he.microsoft"); + + /** + * Static value hi.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HI_MICROSOFT = fromString("hi.microsoft"); + + /** + * Static value hi.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HI_LUCENE = fromString("hi.lucene"); + + /** + * Static value hu.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HU_MICROSOFT = fromString("hu.microsoft"); + + /** + * Static value hu.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName HU_LUCENE = fromString("hu.lucene"); + + /** + * Static value is.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName IS_MICROSOFT = fromString("is.microsoft"); + + /** + * Static value id.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ID_MICROSOFT = fromString("id.microsoft"); + + /** + * Static value id.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ID_LUCENE = fromString("id.lucene"); + + /** + * Static value ga.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName GA_LUCENE = fromString("ga.lucene"); + + /** + * Static value it.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName IT_MICROSOFT = fromString("it.microsoft"); + + /** + * Static value it.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName IT_LUCENE = fromString("it.lucene"); + + /** + * Static value ja.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName JA_MICROSOFT = fromString("ja.microsoft"); + + /** + * Static value ja.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName JA_LUCENE = fromString("ja.lucene"); + + /** + * Static value kn.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName KN_MICROSOFT = fromString("kn.microsoft"); + + /** + * Static value ko.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName KO_MICROSOFT = fromString("ko.microsoft"); + + /** + * Static value ko.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName KO_LUCENE = fromString("ko.lucene"); + + /** + * Static value lv.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName LV_MICROSOFT = fromString("lv.microsoft"); + + /** + * Static value lv.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName LV_LUCENE = fromString("lv.lucene"); + + /** + * Static value lt.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName LT_MICROSOFT = fromString("lt.microsoft"); + + /** + * Static value ml.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ML_MICROSOFT = fromString("ml.microsoft"); + + /** + * Static value ms.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName MS_MICROSOFT = fromString("ms.microsoft"); + + /** + * Static value mr.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName MR_MICROSOFT = fromString("mr.microsoft"); + + /** + * Static value nb.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName NB_MICROSOFT = fromString("nb.microsoft"); + + /** + * Static value no.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName NO_LUCENE = fromString("no.lucene"); + + /** + * Static value fa.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName FA_LUCENE = fromString("fa.lucene"); + + /** + * Static value pl.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PL_MICROSOFT = fromString("pl.microsoft"); + + /** + * Static value pl.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PL_LUCENE = fromString("pl.lucene"); + + /** + * Static value pt-BR.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PT_BR_MICROSOFT = fromString("pt-BR.microsoft"); + + /** + * Static value pt-BR.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PT_BR_LUCENE = fromString("pt-BR.lucene"); + + /** + * Static value pt-PT.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PT_PT_MICROSOFT = fromString("pt-PT.microsoft"); + + /** + * Static value pt-PT.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PT_PT_LUCENE = fromString("pt-PT.lucene"); + + /** + * Static value pa.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PA_MICROSOFT = fromString("pa.microsoft"); + + /** + * Static value ro.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName RO_MICROSOFT = fromString("ro.microsoft"); + + /** + * Static value ro.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName RO_LUCENE = fromString("ro.lucene"); + + /** + * Static value ru.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName RU_MICROSOFT = fromString("ru.microsoft"); + + /** + * Static value ru.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName RU_LUCENE = fromString("ru.lucene"); + + /** + * Static value sr-cyrillic.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SR_CYRILLIC_MICROSOFT = fromString("sr-cyrillic.microsoft"); + + /** + * Static value sr-latin.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SR_LATIN_MICROSOFT = fromString("sr-latin.microsoft"); + + /** + * Static value sk.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SK_MICROSOFT = fromString("sk.microsoft"); + + /** + * Static value sl.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SL_MICROSOFT = fromString("sl.microsoft"); + + /** + * Static value es.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ES_MICROSOFT = fromString("es.microsoft"); + + /** + * Static value es.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName ES_LUCENE = fromString("es.lucene"); + + /** + * Static value sv.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SV_MICROSOFT = fromString("sv.microsoft"); + + /** + * Static value sv.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SV_LUCENE = fromString("sv.lucene"); + + /** + * Static value ta.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TA_MICROSOFT = fromString("ta.microsoft"); + + /** + * Static value te.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TE_MICROSOFT = fromString("te.microsoft"); + + /** + * Static value th.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TH_MICROSOFT = fromString("th.microsoft"); + + /** + * Static value th.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TH_LUCENE = fromString("th.lucene"); + + /** + * Static value tr.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TR_MICROSOFT = fromString("tr.microsoft"); + + /** + * Static value tr.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName TR_LUCENE = fromString("tr.lucene"); + + /** + * Static value uk.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName UK_MICROSOFT = fromString("uk.microsoft"); + + /** + * Static value ur.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName UR_MICROSOFT = fromString("ur.microsoft"); + + /** + * Static value vi.microsoft for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName VI_MICROSOFT = fromString("vi.microsoft"); + + /** + * Static value standard.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName STANDARD_LUCENE = fromString("standard.lucene"); + + /** + * Static value standardasciifolding.lucene for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName STANDARD_ASCII_FOLDING_LUCENE = fromString("standardasciifolding.lucene"); + + /** + * Static value keyword for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName KEYWORD = fromString("keyword"); + + /** + * Static value pattern for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName PATTERN = fromString("pattern"); + + /** + * Static value simple for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName SIMPLE = fromString("simple"); + + /** + * Static value stop for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName STOP = fromString("stop"); + + /** + * Static value whitespace for LexicalAnalyzerName. + */ + public static final LexicalAnalyzerName WHITESPACE = fromString("whitespace"); + + /** + * Creates or finds a LexicalAnalyzerName from its string representation. + * + * @param name a name to look for. + * @return the corresponding LexicalAnalyzerName. + */ + @JsonCreator + public static LexicalAnalyzerName fromString(String name) { + return fromString(name, LexicalAnalyzerName.class); + } + + /** + * @return known LexicalAnalyzerName values. + */ + public static Collection values() { + return values(LexicalAnalyzerName.class); + } +} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Tokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizer.java similarity index 89% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Tokenizer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizer.java index b7704391b14f..dc3a7e1d29a7 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Tokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizer.java @@ -15,8 +15,8 @@ /** * Base type for tokenizers. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = Tokenizer.class) -@JsonTypeName("Tokenizer") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = LexicalTokenizer.class) +@JsonTypeName("LexicalTokenizer") @JsonSubTypes({ @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.ClassicTokenizer", value = ClassicTokenizer.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.EdgeNGramTokenizer", value = EdgeNGramTokenizer.class), @@ -27,12 +27,12 @@ @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.NGramTokenizer", value = NGramTokenizer.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.PathHierarchyTokenizerV2", value = PathHierarchyTokenizerV2.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.PatternTokenizer", value = PatternTokenizer.class), - @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardTokenizer", value = StandardTokenizer.class), - @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardTokenizerV2", value = StandardTokenizerV2.class), + @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardTokenizer", value = LuceneStandardTokenizer.class), + @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.StandardTokenizerV2", value = LuceneStandardTokenizerV2.class), @JsonSubTypes.Type(name = "#Microsoft.Azure.Search.UaxUrlEmailTokenizer", value = UaxUrlEmailTokenizer.class) }) @Fluent -public abstract class Tokenizer { +public class LexicalTokenizer { /* * The name of the tokenizer. It must only contain letters, digits, spaces, * dashes or underscores, can only start and end with alphanumeric @@ -58,9 +58,9 @@ public String getName() { * with alphanumeric characters, and is limited to 128 characters. * * @param name the name value to set. - * @return the Tokenizer object itself. + * @return the LexicalTokenizer object itself. */ - public Tokenizer setName(String name) { + public LexicalTokenizer setName(String name) { this.name = name; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizerName.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizerName.java new file mode 100644 index 000000000000..a8c1b3051ab7 --- /dev/null +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LexicalTokenizerName.java @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. + +package com.azure.search.documents.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** + * Defines values for LexicalTokenizerName. + */ +public final class LexicalTokenizerName extends ExpandableStringEnum { + /** + * Static value classic for LexicalTokenizerName. + */ + public static final LexicalTokenizerName CLASSIC = fromString("classic"); + + /** + * Static value edgeNGram for LexicalTokenizerName. + */ + public static final LexicalTokenizerName EDGE_NGRAM = fromString("edgeNGram"); + + /** + * Static value keyword_v2 for LexicalTokenizerName. + */ + public static final LexicalTokenizerName KEYWORD = fromString("keyword_v2"); + + /** + * Static value letter for LexicalTokenizerName. + */ + public static final LexicalTokenizerName LETTER = fromString("letter"); + + /** + * Static value lowercase for LexicalTokenizerName. + */ + public static final LexicalTokenizerName LOWERCASE = fromString("lowercase"); + + /** + * Static value microsoft_language_tokenizer for LexicalTokenizerName. + */ + public static final LexicalTokenizerName MICROSOFT_LANGUAGE_TOKENIZER = fromString("microsoft_language_tokenizer"); + + /** + * Static value microsoft_language_stemming_tokenizer for LexicalTokenizerName. + */ + public static final LexicalTokenizerName MICROSOFT_LANGUAGE_STEMMING_TOKENIZER = fromString("microsoft_language_stemming_tokenizer"); + + /** + * Static value nGram for LexicalTokenizerName. + */ + public static final LexicalTokenizerName NGRAM = fromString("nGram"); + + /** + * Static value path_hierarchy_v2 for LexicalTokenizerName. + */ + public static final LexicalTokenizerName PATH_HIERARCHY = fromString("path_hierarchy_v2"); + + /** + * Static value pattern for LexicalTokenizerName. + */ + public static final LexicalTokenizerName PATTERN = fromString("pattern"); + + /** + * Static value standard_v2 for LexicalTokenizerName. + */ + public static final LexicalTokenizerName STANDARD = fromString("standard_v2"); + + /** + * Static value uax_url_email for LexicalTokenizerName. + */ + public static final LexicalTokenizerName UAX_URL_EMAIL = fromString("uax_url_email"); + + /** + * Static value whitespace for LexicalTokenizerName. + */ + public static final LexicalTokenizerName WHITESPACE = fromString("whitespace"); + + /** + * Creates or finds a LexicalTokenizerName from its string representation. + * + * @param name a name to look for. + * @return the corresponding LexicalTokenizerName. + */ + @JsonCreator + public static LexicalTokenizerName fromString(String name) { + return fromString(name, LexicalTokenizerName.class); + } + + /** + * @return known LexicalTokenizerName values. + */ + public static Collection values() { + return values(LexicalTokenizerName.class); + } +} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardAnalyzer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardAnalyzer.java similarity index 87% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardAnalyzer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardAnalyzer.java index e1c843d86bb3..8dbefea4078e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardAnalyzer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardAnalyzer.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.StandardAnalyzer") @Fluent -public final class StandardAnalyzer extends Analyzer { +public final class LuceneStandardAnalyzer extends LexicalAnalyzer { /* * The maximum token length. Default is 255. Tokens longer than the maximum * length are split. The maximum token length that can be used is 300 @@ -51,9 +51,9 @@ public Integer getMaxTokenLength() { * length that can be used is 300 characters. * * @param maxTokenLength the maxTokenLength value to set. - * @return the StandardAnalyzer object itself. + * @return the LuceneStandardAnalyzer object itself. */ - public StandardAnalyzer setMaxTokenLength(Integer maxTokenLength) { + public LuceneStandardAnalyzer setMaxTokenLength(Integer maxTokenLength) { this.maxTokenLength = maxTokenLength; return this; } @@ -71,9 +71,9 @@ public List getStopwords() { * Set the stopwords property: A list of stopwords. * * @param stopwords the stopwords value to set. - * @return the StandardAnalyzer object itself. + * @return the LuceneStandardAnalyzer object itself. */ - public StandardAnalyzer setStopwords(List stopwords) { + public LuceneStandardAnalyzer setStopwords(List stopwords) { this.stopwords = stopwords; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizer.java similarity index 88% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizer.java index 2a30a7d51a77..262ea362f23a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizer.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.StandardTokenizer") @Fluent -public final class StandardTokenizer extends Tokenizer { +public final class LuceneStandardTokenizer extends LexicalTokenizer { /* * The maximum token length. Default is 255. Tokens longer than the maximum * length are split. @@ -41,9 +41,9 @@ public Integer getMaxTokenLength() { * 255. Tokens longer than the maximum length are split. * * @param maxTokenLength the maxTokenLength value to set. - * @return the StandardTokenizer object itself. + * @return the LuceneStandardTokenizer object itself. */ - public StandardTokenizer setMaxTokenLength(Integer maxTokenLength) { + public LuceneStandardTokenizer setMaxTokenLength(Integer maxTokenLength) { this.maxTokenLength = maxTokenLength; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizerV2.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizerV2.java similarity index 89% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizerV2.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizerV2.java index 66c8f6d75bd1..c6fb7f111f1b 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StandardTokenizerV2.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/LuceneStandardTokenizerV2.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.StandardTokenizerV2") @Fluent -public final class StandardTokenizerV2 extends Tokenizer { +public final class LuceneStandardTokenizerV2 extends LexicalTokenizer { /* * The maximum token length. Default is 255. Tokens longer than the maximum * length are split. The maximum token length that can be used is 300 @@ -44,9 +44,9 @@ public Integer getMaxTokenLength() { * length that can be used is 300 characters. * * @param maxTokenLength the maxTokenLength value to set. - * @return the StandardTokenizerV2 object itself. + * @return the LuceneStandardTokenizerV2 object itself. */ - public StandardTokenizerV2 setMaxTokenLength(Integer maxTokenLength) { + public LuceneStandardTokenizerV2 setMaxTokenLength(Integer maxTokenLength) { this.maxTokenLength = maxTokenLength; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MergeSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MergeSkill.java index 370f403dc7be..4739fe50fe1a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MergeSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MergeSkill.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.MergeSkill") @Fluent -public final class MergeSkill extends Skill { +public final class MergeSkill extends SearchIndexerSkill { /* * The tag indicates the start of the merged text. By default, the tag is * an empty space. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageStemmingTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageStemmingTokenizer.java index 3d563a8c2edf..16cd0e17219d 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageStemmingTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageStemmingTokenizer.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer") @Fluent -public final class MicrosoftLanguageStemmingTokenizer extends Tokenizer { +public final class MicrosoftLanguageStemmingTokenizer extends LexicalTokenizer { /* * The maximum token length. Tokens longer than the maximum length are * split. Maximum token length that can be used is 300 characters. Tokens @@ -39,14 +39,14 @@ public final class MicrosoftLanguageStemmingTokenizer extends Tokenizer { /* * The language to use. The default is English. Possible values include: - * 'arabic', 'bangla', 'bulgarian', 'catalan', 'croatian', 'czech', - * 'danish', 'dutch', 'english', 'estonian', 'finnish', 'french', 'german', - * 'greek', 'gujarati', 'hebrew', 'hindi', 'hungarian', 'icelandic', - * 'indonesian', 'italian', 'kannada', 'latvian', 'lithuanian', 'malay', - * 'malayalam', 'marathi', 'norwegianBokmaal', 'polish', 'portuguese', - * 'portugueseBrazilian', 'punjabi', 'romanian', 'russian', - * 'serbianCyrillic', 'serbianLatin', 'slovak', 'slovenian', 'spanish', - * 'swedish', 'tamil', 'telugu', 'turkish', 'ukrainian', 'urdu' + * 'Arabic', 'Bangla', 'Bulgarian', 'Catalan', 'Croatian', 'Czech', + * 'Danish', 'Dutch', 'English', 'Estonian', 'Finnish', 'French', 'German', + * 'Greek', 'Gujarati', 'Hebrew', 'Hindi', 'Hungarian', 'Icelandic', + * 'Indonesian', 'Italian', 'Kannada', 'Latvian', 'Lithuanian', 'Malay', + * 'Malayalam', 'Marathi', 'NorwegianBokmaal', 'Polish', 'Portuguese', + * 'PortugueseBrazilian', 'Punjabi', 'Romanian', 'Russian', + * 'SerbianCyrillic', 'SerbianLatin', 'Slovak', 'Slovenian', 'Spanish', + * 'Swedish', 'Tamil', 'Telugu', 'Turkish', 'Ukrainian', 'Urdu' */ @JsonProperty(value = "language") private MicrosoftStemmingTokenizerLanguage language; @@ -105,14 +105,14 @@ public MicrosoftLanguageStemmingTokenizer setIsSearchTokenizer(Boolean isSearchT /** * Get the language property: The language to use. The default is English. - * Possible values include: 'arabic', 'bangla', 'bulgarian', 'catalan', - * 'croatian', 'czech', 'danish', 'dutch', 'english', 'estonian', - * 'finnish', 'french', 'german', 'greek', 'gujarati', 'hebrew', 'hindi', - * 'hungarian', 'icelandic', 'indonesian', 'italian', 'kannada', 'latvian', - * 'lithuanian', 'malay', 'malayalam', 'marathi', 'norwegianBokmaal', - * 'polish', 'portuguese', 'portugueseBrazilian', 'punjabi', 'romanian', - * 'russian', 'serbianCyrillic', 'serbianLatin', 'slovak', 'slovenian', - * 'spanish', 'swedish', 'tamil', 'telugu', 'turkish', 'ukrainian', 'urdu'. + * Possible values include: 'Arabic', 'Bangla', 'Bulgarian', 'Catalan', + * 'Croatian', 'Czech', 'Danish', 'Dutch', 'English', 'Estonian', + * 'Finnish', 'French', 'German', 'Greek', 'Gujarati', 'Hebrew', 'Hindi', + * 'Hungarian', 'Icelandic', 'Indonesian', 'Italian', 'Kannada', 'Latvian', + * 'Lithuanian', 'Malay', 'Malayalam', 'Marathi', 'NorwegianBokmaal', + * 'Polish', 'Portuguese', 'PortugueseBrazilian', 'Punjabi', 'Romanian', + * 'Russian', 'SerbianCyrillic', 'SerbianLatin', 'Slovak', 'Slovenian', + * 'Spanish', 'Swedish', 'Tamil', 'Telugu', 'Turkish', 'Ukrainian', 'Urdu'. * * @return the language value. */ @@ -122,14 +122,14 @@ public MicrosoftStemmingTokenizerLanguage getLanguage() { /** * Set the language property: The language to use. The default is English. - * Possible values include: 'arabic', 'bangla', 'bulgarian', 'catalan', - * 'croatian', 'czech', 'danish', 'dutch', 'english', 'estonian', - * 'finnish', 'french', 'german', 'greek', 'gujarati', 'hebrew', 'hindi', - * 'hungarian', 'icelandic', 'indonesian', 'italian', 'kannada', 'latvian', - * 'lithuanian', 'malay', 'malayalam', 'marathi', 'norwegianBokmaal', - * 'polish', 'portuguese', 'portugueseBrazilian', 'punjabi', 'romanian', - * 'russian', 'serbianCyrillic', 'serbianLatin', 'slovak', 'slovenian', - * 'spanish', 'swedish', 'tamil', 'telugu', 'turkish', 'ukrainian', 'urdu'. + * Possible values include: 'Arabic', 'Bangla', 'Bulgarian', 'Catalan', + * 'Croatian', 'Czech', 'Danish', 'Dutch', 'English', 'Estonian', + * 'Finnish', 'French', 'German', 'Greek', 'Gujarati', 'Hebrew', 'Hindi', + * 'Hungarian', 'Icelandic', 'Indonesian', 'Italian', 'Kannada', 'Latvian', + * 'Lithuanian', 'Malay', 'Malayalam', 'Marathi', 'NorwegianBokmaal', + * 'Polish', 'Portuguese', 'PortugueseBrazilian', 'Punjabi', 'Romanian', + * 'Russian', 'SerbianCyrillic', 'SerbianLatin', 'Slovak', 'Slovenian', + * 'Spanish', 'Swedish', 'Tamil', 'Telugu', 'Turkish', 'Ukrainian', 'Urdu'. * * @param language the language value to set. * @return the MicrosoftLanguageStemmingTokenizer object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageTokenizer.java index 245003b97102..07d69c7a12db 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/MicrosoftLanguageTokenizer.java @@ -17,7 +17,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.MicrosoftLanguageTokenizer") @Fluent -public final class MicrosoftLanguageTokenizer extends Tokenizer { +public final class MicrosoftLanguageTokenizer extends LexicalTokenizer { /* * The maximum token length. Tokens longer than the maximum length are * split. Maximum token length that can be used is 300 characters. Tokens @@ -38,14 +38,14 @@ public final class MicrosoftLanguageTokenizer extends Tokenizer { /* * The language to use. The default is English. Possible values include: - * 'bangla', 'bulgarian', 'catalan', 'chineseSimplified', - * 'chineseTraditional', 'croatian', 'czech', 'danish', 'dutch', 'english', - * 'french', 'german', 'greek', 'gujarati', 'hindi', 'icelandic', - * 'indonesian', 'italian', 'japanese', 'kannada', 'korean', 'malay', - * 'malayalam', 'marathi', 'norwegianBokmaal', 'polish', 'portuguese', - * 'portugueseBrazilian', 'punjabi', 'romanian', 'russian', - * 'serbianCyrillic', 'serbianLatin', 'slovenian', 'spanish', 'swedish', - * 'tamil', 'telugu', 'thai', 'ukrainian', 'urdu', 'vietnamese' + * 'Bangla', 'Bulgarian', 'Catalan', 'ChineseSimplified', + * 'ChineseTraditional', 'Croatian', 'Czech', 'Danish', 'Dutch', 'English', + * 'French', 'German', 'Greek', 'Gujarati', 'Hindi', 'Icelandic', + * 'Indonesian', 'Italian', 'Japanese', 'Kannada', 'Korean', 'Malay', + * 'Malayalam', 'Marathi', 'NorwegianBokmaal', 'Polish', 'Portuguese', + * 'PortugueseBrazilian', 'Punjabi', 'Romanian', 'Russian', + * 'SerbianCyrillic', 'SerbianLatin', 'Slovenian', 'Spanish', 'Swedish', + * 'Tamil', 'Telugu', 'Thai', 'Ukrainian', 'Urdu', 'Vietnamese' */ @JsonProperty(value = "language") private MicrosoftTokenizerLanguage language; @@ -104,14 +104,14 @@ public MicrosoftLanguageTokenizer setIsSearchTokenizer(Boolean isSearchTokenizer /** * Get the language property: The language to use. The default is English. - * Possible values include: 'bangla', 'bulgarian', 'catalan', - * 'chineseSimplified', 'chineseTraditional', 'croatian', 'czech', - * 'danish', 'dutch', 'english', 'french', 'german', 'greek', 'gujarati', - * 'hindi', 'icelandic', 'indonesian', 'italian', 'japanese', 'kannada', - * 'korean', 'malay', 'malayalam', 'marathi', 'norwegianBokmaal', 'polish', - * 'portuguese', 'portugueseBrazilian', 'punjabi', 'romanian', 'russian', - * 'serbianCyrillic', 'serbianLatin', 'slovenian', 'spanish', 'swedish', - * 'tamil', 'telugu', 'thai', 'ukrainian', 'urdu', 'vietnamese'. + * Possible values include: 'Bangla', 'Bulgarian', 'Catalan', + * 'ChineseSimplified', 'ChineseTraditional', 'Croatian', 'Czech', + * 'Danish', 'Dutch', 'English', 'French', 'German', 'Greek', 'Gujarati', + * 'Hindi', 'Icelandic', 'Indonesian', 'Italian', 'Japanese', 'Kannada', + * 'Korean', 'Malay', 'Malayalam', 'Marathi', 'NorwegianBokmaal', 'Polish', + * 'Portuguese', 'PortugueseBrazilian', 'Punjabi', 'Romanian', 'Russian', + * 'SerbianCyrillic', 'SerbianLatin', 'Slovenian', 'Spanish', 'Swedish', + * 'Tamil', 'Telugu', 'Thai', 'Ukrainian', 'Urdu', 'Vietnamese'. * * @return the language value. */ @@ -121,14 +121,14 @@ public MicrosoftTokenizerLanguage getLanguage() { /** * Set the language property: The language to use. The default is English. - * Possible values include: 'bangla', 'bulgarian', 'catalan', - * 'chineseSimplified', 'chineseTraditional', 'croatian', 'czech', - * 'danish', 'dutch', 'english', 'french', 'german', 'greek', 'gujarati', - * 'hindi', 'icelandic', 'indonesian', 'italian', 'japanese', 'kannada', - * 'korean', 'malay', 'malayalam', 'marathi', 'norwegianBokmaal', 'polish', - * 'portuguese', 'portugueseBrazilian', 'punjabi', 'romanian', 'russian', - * 'serbianCyrillic', 'serbianLatin', 'slovenian', 'spanish', 'swedish', - * 'tamil', 'telugu', 'thai', 'ukrainian', 'urdu', 'vietnamese'. + * Possible values include: 'Bangla', 'Bulgarian', 'Catalan', + * 'ChineseSimplified', 'ChineseTraditional', 'Croatian', 'Czech', + * 'Danish', 'Dutch', 'English', 'French', 'German', 'Greek', 'Gujarati', + * 'Hindi', 'Icelandic', 'Indonesian', 'Italian', 'Japanese', 'Kannada', + * 'Korean', 'Malay', 'Malayalam', 'Marathi', 'NorwegianBokmaal', 'Polish', + * 'Portuguese', 'PortugueseBrazilian', 'Punjabi', 'Romanian', 'Russian', + * 'SerbianCyrillic', 'SerbianLatin', 'Slovenian', 'Spanish', 'Swedish', + * 'Tamil', 'Telugu', 'Thai', 'Ukrainian', 'Urdu', 'Vietnamese'. * * @param language the language value to set. * @return the MicrosoftLanguageTokenizer object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NGramTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NGramTokenizer.java index 97aa3156a3d0..20247e739b4f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NGramTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NGramTokenizer.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.NGramTokenizer") @Fluent -public final class NGramTokenizer extends Tokenizer { +public final class NGramTokenizer extends LexicalTokenizer { /* * The minimum n-gram length. Default is 1. Maximum is 300. Must be less * than the value of maxGram. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkill.java deleted file mode 100644 index 4213aeeeb897..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkill.java +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; -import java.util.List; - -/** - * Text analytics named entity recognition. This skill is deprecated in favor - * of EntityRecognitionSkill. - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") -@JsonTypeName("#Microsoft.Skills.Text.NamedEntityRecognitionSkill") -@Fluent -public final class NamedEntityRecognitionSkill extends Skill { - /* - * A list of named entity categories. - */ - @JsonProperty(value = "categories") - private List categories; - - /* - * A value indicating which language code to use. Default is en. Possible - * values include: 'ar', 'cs', 'da', 'de', 'en', 'es', 'fi', 'fr', 'he', - * 'hu', 'it', 'ko', 'pt-br', 'pt' - */ - @JsonProperty(value = "defaultLanguageCode") - private NamedEntityRecognitionSkillLanguage defaultLanguageCode; - - /* - * A value between 0 and 1 to indicate the confidence of the results. - */ - @JsonProperty(value = "minimumPrecision") - private Double minimumPrecision; - - /** - * Get the categories property: A list of named entity categories. - * - * @return the categories value. - */ - public List getCategories() { - return this.categories; - } - - /** - * Set the categories property: A list of named entity categories. - * - * @param categories the categories value to set. - * @return the NamedEntityRecognitionSkill object itself. - */ - public NamedEntityRecognitionSkill setCategories(List categories) { - this.categories = categories; - return this; - } - - /** - * Get the defaultLanguageCode property: A value indicating which language - * code to use. Default is en. Possible values include: 'ar', 'cs', 'da', - * 'de', 'en', 'es', 'fi', 'fr', 'he', 'hu', 'it', 'ko', 'pt-br', 'pt'. - * - * @return the defaultLanguageCode value. - */ - public NamedEntityRecognitionSkillLanguage getDefaultLanguageCode() { - return this.defaultLanguageCode; - } - - /** - * Set the defaultLanguageCode property: A value indicating which language - * code to use. Default is en. Possible values include: 'ar', 'cs', 'da', - * 'de', 'en', 'es', 'fi', 'fr', 'he', 'hu', 'it', 'ko', 'pt-br', 'pt'. - * - * @param defaultLanguageCode the defaultLanguageCode value to set. - * @return the NamedEntityRecognitionSkill object itself. - */ - public NamedEntityRecognitionSkill setDefaultLanguageCode(NamedEntityRecognitionSkillLanguage defaultLanguageCode) { - this.defaultLanguageCode = defaultLanguageCode; - return this; - } - - /** - * Get the minimumPrecision property: A value between 0 and 1 to indicate - * the confidence of the results. - * - * @return the minimumPrecision value. - */ - public Double getMinimumPrecision() { - return this.minimumPrecision; - } - - /** - * Set the minimumPrecision property: A value between 0 and 1 to indicate - * the confidence of the results. - * - * @param minimumPrecision the minimumPrecision value to set. - * @return the NamedEntityRecognitionSkill object itself. - */ - public NamedEntityRecognitionSkill setMinimumPrecision(Double minimumPrecision) { - this.minimumPrecision = minimumPrecision; - return this; - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkillLanguage.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkillLanguage.java deleted file mode 100644 index 7709a8e2ec23..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/NamedEntityRecognitionSkillLanguage.java +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * Defines values for NamedEntityRecognitionSkillLanguage. - */ -public enum NamedEntityRecognitionSkillLanguage { - /** - * Enum value ar. - */ - AR("ar"), - - /** - * Enum value cs. - */ - CS("cs"), - - /** - * Enum value da. - */ - DA("da"), - - /** - * Enum value de. - */ - DE("de"), - - /** - * Enum value en. - */ - EN("en"), - - /** - * Enum value es. - */ - ES("es"), - - /** - * Enum value fi. - */ - FI("fi"), - - /** - * Enum value fr. - */ - FR("fr"), - - /** - * Enum value he. - */ - HE("he"), - - /** - * Enum value hu. - */ - HU("hu"), - - /** - * Enum value it. - */ - IT("it"), - - /** - * Enum value ko. - */ - KO("ko"), - - /** - * Enum value pt-br. - */ - PT_BR("pt-br"), - - /** - * Enum value pt. - */ - PT("pt"); - - /** - * The actual serialized value for a NamedEntityRecognitionSkillLanguage instance. - */ - private final String value; - - NamedEntityRecognitionSkillLanguage(String value) { - this.value = value; - } - - /** - * Parses a serialized value to a NamedEntityRecognitionSkillLanguage instance. - * - * @param value the serialized value to parse. - * @return the parsed NamedEntityRecognitionSkillLanguage object, or null if unable to parse. - */ - @JsonCreator - public static NamedEntityRecognitionSkillLanguage fromString(String value) { - NamedEntityRecognitionSkillLanguage[] items = NamedEntityRecognitionSkillLanguage.values(); - for (NamedEntityRecognitionSkillLanguage item : items) { - if (item.toString().equalsIgnoreCase(value)) { - return item; - } - } - return null; - } - - @JsonValue - @Override - public String toString() { - return this.value; - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/OcrSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/OcrSkill.java index be67d55cb502..a15a5a51a24b 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/OcrSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/OcrSkill.java @@ -17,10 +17,10 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Vision.OcrSkill") @Fluent -public final class OcrSkill extends Skill { +public final class OcrSkill extends SearchIndexerSkill { /* * A value indicating which algorithm to use for extracting text. Default - * is printed. Possible values include: 'printed', 'handwritten' + * is printed. Possible values include: 'Printed', 'Handwritten' */ @JsonProperty(value = "textExtractionAlgorithm") private TextExtractionAlgorithm textExtractionAlgorithm; @@ -44,7 +44,7 @@ public final class OcrSkill extends Skill { /** * Get the textExtractionAlgorithm property: A value indicating which * algorithm to use for extracting text. Default is printed. Possible - * values include: 'printed', 'handwritten'. + * values include: 'Printed', 'Handwritten'. * * @return the textExtractionAlgorithm value. */ @@ -55,7 +55,7 @@ public TextExtractionAlgorithm getTextExtractionAlgorithm() { /** * Set the textExtractionAlgorithm property: A value indicating which * algorithm to use for extracting text. Default is printed. Possible - * values include: 'printed', 'handwritten'. + * values include: 'Printed', 'Handwritten'. * * @param textExtractionAlgorithm the textExtractionAlgorithm value to set. * @return the OcrSkill object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizer.java deleted file mode 100644 index c3c72678c954..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizer.java +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; - -/** - * Tokenizer for path-like hierarchies. This tokenizer is implemented using - * Apache Lucene. - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") -@JsonTypeName("#Microsoft.Azure.Search.PathHierarchyTokenizer") -@Fluent -public final class PathHierarchyTokenizer extends Tokenizer { - /* - * The delimiter character to use. Default is "/". - */ - @JsonProperty(value = "delimiter") - private String delimiter; - - /* - * A value that, if set, replaces the delimiter character. Default is "/". - */ - @JsonProperty(value = "replacement") - private String replacement; - - /* - * The buffer size. Default is 1024. - */ - @JsonProperty(value = "bufferSize") - private Integer bufferSize; - - /* - * A value indicating whether to generate tokens in reverse order. Default - * is false. - */ - @JsonProperty(value = "reverse") - private Boolean reverseTokenOrder; - - /* - * The number of initial tokens to skip. Default is 0. - */ - @JsonProperty(value = "skip") - private Integer numberOfTokensToSkip; - - /** - * Get the delimiter property: The delimiter character to use. Default is - * "/". - * - * @return the delimiter value. - */ - public String getDelimiter() { - return this.delimiter; - } - - /** - * Set the delimiter property: The delimiter character to use. Default is - * "/". - * - * @param delimiter the delimiter value to set. - * @return the PathHierarchyTokenizer object itself. - */ - public PathHierarchyTokenizer setDelimiter(String delimiter) { - this.delimiter = delimiter; - return this; - } - - /** - * Get the replacement property: A value that, if set, replaces the - * delimiter character. Default is "/". - * - * @return the replacement value. - */ - public String getReplacement() { - return this.replacement; - } - - /** - * Set the replacement property: A value that, if set, replaces the - * delimiter character. Default is "/". - * - * @param replacement the replacement value to set. - * @return the PathHierarchyTokenizer object itself. - */ - public PathHierarchyTokenizer setReplacement(String replacement) { - this.replacement = replacement; - return this; - } - - /** - * Get the bufferSize property: The buffer size. Default is 1024. - * - * @return the bufferSize value. - */ - public Integer getBufferSize() { - return this.bufferSize; - } - - /** - * Set the bufferSize property: The buffer size. Default is 1024. - * - * @param bufferSize the bufferSize value to set. - * @return the PathHierarchyTokenizer object itself. - */ - public PathHierarchyTokenizer setBufferSize(Integer bufferSize) { - this.bufferSize = bufferSize; - return this; - } - - /** - * Get the reverseTokenOrder property: A value indicating whether to - * generate tokens in reverse order. Default is false. - * - * @return the reverseTokenOrder value. - */ - public Boolean isReverseTokenOrder() { - return this.reverseTokenOrder; - } - - /** - * Set the reverseTokenOrder property: A value indicating whether to - * generate tokens in reverse order. Default is false. - * - * @param reverseTokenOrder the reverseTokenOrder value to set. - * @return the PathHierarchyTokenizer object itself. - */ - public PathHierarchyTokenizer setReverseTokenOrder(Boolean reverseTokenOrder) { - this.reverseTokenOrder = reverseTokenOrder; - return this; - } - - /** - * Get the numberOfTokensToSkip property: The number of initial tokens to - * skip. Default is 0. - * - * @return the numberOfTokensToSkip value. - */ - public Integer getNumberOfTokensToSkip() { - return this.numberOfTokensToSkip; - } - - /** - * Set the numberOfTokensToSkip property: The number of initial tokens to - * skip. Default is 0. - * - * @param numberOfTokensToSkip the numberOfTokensToSkip value to set. - * @return the PathHierarchyTokenizer object itself. - */ - public PathHierarchyTokenizer setNumberOfTokensToSkip(Integer numberOfTokensToSkip) { - this.numberOfTokensToSkip = numberOfTokensToSkip; - return this; - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizerV2.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizerV2.java index 90d2f5d01876..c3f00bd4c403 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizerV2.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PathHierarchyTokenizerV2.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.PathHierarchyTokenizerV2") @Fluent -public final class PathHierarchyTokenizerV2 extends Tokenizer { +public final class PathHierarchyTokenizerV2 extends LexicalTokenizer { /* * The delimiter character to use. Default is "/". */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternAnalyzer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternAnalyzer.java index a0819801c7ba..9b3c37dd7dfc 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternAnalyzer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternAnalyzer.java @@ -25,7 +25,7 @@ @Fluent @JsonSerialize(using = CustomPatternAnalyzerSerializer.class) @JsonDeserialize(using = CustomPatternAnalyzerDeserializer.class) -public final class PatternAnalyzer extends Analyzer { +public final class PatternAnalyzer extends LexicalAnalyzer { /* * A value indicating whether terms should be lower-cased. Default is true. */ @@ -34,7 +34,7 @@ public final class PatternAnalyzer extends Analyzer { /* * A regular expression pattern to match token separators. Default is an - * expression that matches one or more whitespace characters. + * expression that matches one or more non-word characters. */ @JsonProperty(value = "pattern") private String pattern; @@ -75,7 +75,7 @@ public PatternAnalyzer setLowerCaseTerms(Boolean lowerCaseTerms) { /** * Get the pattern property: A regular expression pattern to match token - * separators. Default is an expression that matches one or more whitespace + * separators. Default is an expression that matches one or more non-word * characters. * * @return the pattern value. @@ -86,7 +86,7 @@ public String getPattern() { /** * Set the pattern property: A regular expression pattern to match token - * separators. Default is an expression that matches one or more whitespace + * separators. Default is an expression that matches one or more non-word * characters. * * @param pattern the pattern value to set. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternTokenizer.java index 50ca676a66c0..95519c9c04d7 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PatternTokenizer.java @@ -25,10 +25,10 @@ @Fluent @JsonSerialize(using = CustomPatternTokenizerSerializer.class) @JsonDeserialize(using = CustomPatternTokenizerDeserializer.class) -public final class PatternTokenizer extends Tokenizer { +public final class PatternTokenizer extends LexicalTokenizer { /* * A regular expression pattern to match token separators. Default is an - * expression that matches one or more whitespace characters. + * expression that matches one or more non-word characters. */ @JsonProperty(value = "pattern") private String pattern; @@ -50,7 +50,7 @@ public final class PatternTokenizer extends Tokenizer { /** * Get the pattern property: A regular expression pattern to match token - * separators. Default is an expression that matches one or more whitespace + * separators. Default is an expression that matches one or more non-word * characters. * * @return the pattern value. @@ -61,7 +61,7 @@ public String getPattern() { /** * Set the pattern property: A regular expression pattern to match token - * separators. Default is an expression that matches one or more whitespace + * separators. Default is an expression that matches one or more non-word * characters. * * @param pattern the pattern value to set. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PhoneticTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PhoneticTokenFilter.java index 88f01e95299b..8edba4a93eed 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PhoneticTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/PhoneticTokenFilter.java @@ -21,9 +21,9 @@ public final class PhoneticTokenFilter extends TokenFilter { /* * The phonetic encoder to use. Default is "metaphone". Possible values - * include: 'metaphone', 'doubleMetaphone', 'soundex', 'refinedSoundex', - * 'caverphone1', 'caverphone2', 'cologne', 'nysiis', 'koelnerPhonetik', - * 'haasePhonetik', 'beiderMorse' + * include: 'Metaphone', 'DoubleMetaphone', 'Soundex', 'RefinedSoundex', + * 'Caverphone1', 'Caverphone2', 'Cologne', 'Nysiis', 'KoelnerPhonetik', + * 'HaasePhonetik', 'BeiderMorse' */ @JsonProperty(value = "encoder") private PhoneticEncoder encoder; @@ -37,9 +37,9 @@ public final class PhoneticTokenFilter extends TokenFilter { /** * Get the encoder property: The phonetic encoder to use. Default is - * "metaphone". Possible values include: 'metaphone', 'doubleMetaphone', - * 'soundex', 'refinedSoundex', 'caverphone1', 'caverphone2', 'cologne', - * 'nysiis', 'koelnerPhonetik', 'haasePhonetik', 'beiderMorse'. + * "metaphone". Possible values include: 'Metaphone', 'DoubleMetaphone', + * 'Soundex', 'RefinedSoundex', 'Caverphone1', 'Caverphone2', 'Cologne', + * 'Nysiis', 'KoelnerPhonetik', 'HaasePhonetik', 'BeiderMorse'. * * @return the encoder value. */ @@ -49,9 +49,9 @@ public PhoneticEncoder getEncoder() { /** * Set the encoder property: The phonetic encoder to use. Default is - * "metaphone". Possible values include: 'metaphone', 'doubleMetaphone', - * 'soundex', 'refinedSoundex', 'caverphone1', 'caverphone2', 'cologne', - * 'nysiis', 'koelnerPhonetik', 'haasePhonetik', 'beiderMorse'. + * "metaphone". Possible values include: 'Metaphone', 'DoubleMetaphone', + * 'Soundex', 'RefinedSoundex', 'Caverphone1', 'Caverphone2', 'Cologne', + * 'Nysiis', 'KoelnerPhonetik', 'HaasePhonetik', 'BeiderMorse'. * * @param encoder the encoder value to set. * @return the PhoneticTokenFilter object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/RegexFlags.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/RegexFlags.java index cecbfe9dc841..d8f63311d7c7 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/RegexFlags.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/RegexFlags.java @@ -32,7 +32,7 @@ public final class RegexFlags extends ExpandableStringEnum { /** * Static value DOTALL for RegexFlags. */ - public static final RegexFlags DOTALL = fromString("DOTALL"); + public static final RegexFlags DOT_ALL = fromString("DOTALL"); /** * Static value LITERAL for RegexFlags. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringFunction.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringFunction.java index c29a2ab8c6ac..ca2112094b96 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringFunction.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringFunction.java @@ -40,8 +40,8 @@ public abstract class ScoringFunction { /* * A value indicating how boosting will be interpolated across document - * scores; defaults to "Linear". Possible values include: 'linear', - * 'constant', 'quadratic', 'logarithmic' + * scores; defaults to "Linear". Possible values include: 'Linear', + * 'Constant', 'Quadratic', 'Logarithmic' */ @JsonProperty(value = "interpolation") private ScoringFunctionInterpolation interpolation; @@ -93,7 +93,7 @@ public ScoringFunction setBoost(double boost) { /** * Get the interpolation property: A value indicating how boosting will be * interpolated across document scores; defaults to "Linear". Possible - * values include: 'linear', 'constant', 'quadratic', 'logarithmic'. + * values include: 'Linear', 'Constant', 'Quadratic', 'Logarithmic'. * * @return the interpolation value. */ @@ -104,7 +104,7 @@ public ScoringFunctionInterpolation getInterpolation() { /** * Set the interpolation property: A value indicating how boosting will be * interpolated across document scores; defaults to "Linear". Possible - * values include: 'linear', 'constant', 'quadratic', 'logarithmic'. + * values include: 'Linear', 'Constant', 'Quadratic', 'Logarithmic'. * * @param interpolation the interpolation value to set. * @return the ScoringFunction object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringProfile.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringProfile.java index f42dbbf787ae..2c8891d53371 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringProfile.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ScoringProfile.java @@ -38,8 +38,8 @@ public final class ScoringProfile { /* * A value indicating how the results of individual scoring functions * should be combined. Defaults to "Sum". Ignored if there are no scoring - * functions. Possible values include: 'sum', 'average', 'minimum', - * 'maximum', 'firstMatching' + * functions. Possible values include: 'Sum', 'Average', 'Minimum', + * 'Maximum', 'FirstMatching' */ @JsonProperty(value = "functionAggregation") private ScoringFunctionAggregation functionAggregation; @@ -112,7 +112,7 @@ public ScoringProfile setFunctions(List functions) { * Get the functionAggregation property: A value indicating how the results * of individual scoring functions should be combined. Defaults to "Sum". * Ignored if there are no scoring functions. Possible values include: - * 'sum', 'average', 'minimum', 'maximum', 'firstMatching'. + * 'Sum', 'Average', 'Minimum', 'Maximum', 'FirstMatching'. * * @return the functionAggregation value. */ @@ -124,7 +124,7 @@ public ScoringFunctionAggregation getFunctionAggregation() { * Set the functionAggregation property: A value indicating how the results * of individual scoring functions should be combined. Defaults to "Sum". * Ignored if there are no scoring functions. Possible values include: - * 'sum', 'average', 'minimum', 'maximum', 'firstMatching'. + * 'Sum', 'Average', 'Minimum', 'Maximum', 'FirstMatching'. * * @param functionAggregation the functionAggregation value to set. * @return the ScoringProfile object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Field.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchField.java similarity index 94% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Field.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchField.java index f7c9ae51692d..b9dffb5dd944 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Field.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchField.java @@ -16,7 +16,7 @@ * type, and search behavior of a field. */ @Fluent -public final class Field { +public final class SearchField { /* * The name of the field, which must be unique within the fields collection * of the index or parent field. @@ -25,12 +25,12 @@ public final class Field { private String name; /* - * The data type of the field. Possible values include: 'Edm.String', - * 'Edm.Int32', 'Edm.Int64', 'Edm.Double', 'Edm.Boolean', - * 'Edm.DateTimeOffset', 'Edm.GeographyPoint', 'Edm.ComplexType' + * The data type of the field. Possible values include: 'String', 'Int32', + * 'Int64', 'Double', 'Boolean', 'DateTimeOffset', 'GeographyPoint', + * 'Complex' */ @JsonProperty(value = "type", required = true) - private DataType type; + private SearchFieldDataType type; /* * A value indicating whether the field uniquely identifies documents in @@ -142,7 +142,7 @@ public final class Field { * 'Simple', 'Stop', 'Whitespace' */ @JsonProperty(value = "analyzer") - private AnalyzerName analyzer; + private LexicalAnalyzerName analyzer; /* * The name of the analyzer used at search time for the field. This option @@ -173,7 +173,7 @@ public final class Field { * 'Simple', 'Stop', 'Whitespace' */ @JsonProperty(value = "searchAnalyzer") - private AnalyzerName searchAnalyzer; + private LexicalAnalyzerName searchAnalyzer; /* * The name of the analyzer used at indexing time for the field. This @@ -205,7 +205,7 @@ public final class Field { * 'Whitespace' */ @JsonProperty(value = "indexAnalyzer") - private AnalyzerName indexAnalyzer; + private LexicalAnalyzerName indexAnalyzer; /* * A list of the names of synonym maps to associate with this field. This @@ -223,7 +223,7 @@ public final class Field { * Collection(Edm.ComplexType). Must be null or empty for simple fields. */ @JsonProperty(value = "fields") - private List fields; + private List fields; /* * A value indicating whether the field will be returned in a search @@ -251,35 +251,33 @@ public String getName() { * within the fields collection of the index or parent field. * * @param name the name value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setName(String name) { + public SearchField setName(String name) { this.name = name; return this; } /** * Get the type property: The data type of the field. Possible values - * include: 'Edm.String', 'Edm.Int32', 'Edm.Int64', 'Edm.Double', - * 'Edm.Boolean', 'Edm.DateTimeOffset', 'Edm.GeographyPoint', - * 'Edm.ComplexType'. + * include: 'String', 'Int32', 'Int64', 'Double', 'Boolean', + * 'DateTimeOffset', 'GeographyPoint', 'Complex'. * * @return the type value. */ - public DataType getType() { + public SearchFieldDataType getType() { return this.type; } /** * Set the type property: The data type of the field. Possible values - * include: 'Edm.String', 'Edm.Int32', 'Edm.Int64', 'Edm.Double', - * 'Edm.Boolean', 'Edm.DateTimeOffset', 'Edm.GeographyPoint', - * 'Edm.ComplexType'. + * include: 'String', 'Int32', 'Int64', 'Double', 'Boolean', + * 'DateTimeOffset', 'GeographyPoint', 'Complex'. * * @param type the type value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setType(DataType type) { + public SearchField setType(SearchFieldDataType type) { this.type = type; return this; } @@ -307,9 +305,9 @@ public Boolean isKey() { * for complex fields. * * @param key the key value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setKey(Boolean key) { + public SearchField setKey(Boolean key) { this.key = key; return this; } @@ -341,9 +339,9 @@ private Boolean isRetrievable() { * Default is true for simple fields and null for complex fields. * * @param retrievable the retrievable value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - private Field setRetrievable(Boolean retrievable) { + private SearchField setRetrievable(Boolean retrievable) { this.retrievable = retrievable; return this; } @@ -385,9 +383,9 @@ public Boolean isSearchable() { * false. * * @param searchable the searchable value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setSearchable(Boolean searchable) { + public SearchField setSearchable(Boolean searchable) { this.searchable = searchable; return this; } @@ -419,9 +417,9 @@ public Boolean isFilterable() { * fields. Default is true for simple fields and null for complex fields. * * @param filterable the filterable value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setFilterable(Boolean filterable) { + public SearchField setFilterable(Boolean filterable) { this.filterable = filterable; return this; } @@ -463,9 +461,9 @@ public Boolean isSortable() { * and null for complex fields. * * @param sortable the sortable value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setSortable(Boolean sortable) { + public SearchField setSortable(Boolean sortable) { this.sortable = sortable; return this; } @@ -497,9 +495,9 @@ public Boolean isFacetable() { * all other simple fields. * * @param facetable the facetable value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setFacetable(Boolean facetable) { + public SearchField setFacetable(Boolean facetable) { this.facetable = facetable; return this; } @@ -533,7 +531,7 @@ public Field setFacetable(Boolean facetable) { * * @return the analyzer value. */ - public AnalyzerName getAnalyzer() { + public LexicalAnalyzerName getAnalyzer() { return this.analyzer; } @@ -565,9 +563,9 @@ public AnalyzerName getAnalyzer() { * 'Whitespace'. * * @param analyzer the analyzer value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setAnalyzer(AnalyzerName analyzer) { + public SearchField setAnalyzer(LexicalAnalyzerName analyzer) { this.analyzer = analyzer; return this; } @@ -603,7 +601,7 @@ public Field setAnalyzer(AnalyzerName analyzer) { * * @return the searchAnalyzer value. */ - public AnalyzerName getSearchAnalyzer() { + public LexicalAnalyzerName getSearchAnalyzer() { return this.searchAnalyzer; } @@ -637,9 +635,9 @@ public AnalyzerName getSearchAnalyzer() { * 'Simple', 'Stop', 'Whitespace'. * * @param searchAnalyzer the searchAnalyzer value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setSearchAnalyzer(AnalyzerName searchAnalyzer) { + public SearchField setSearchAnalyzer(LexicalAnalyzerName searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; return this; } @@ -675,7 +673,7 @@ public Field setSearchAnalyzer(AnalyzerName searchAnalyzer) { * * @return the indexAnalyzer value. */ - public AnalyzerName getIndexAnalyzer() { + public LexicalAnalyzerName getIndexAnalyzer() { return this.indexAnalyzer; } @@ -709,9 +707,9 @@ public AnalyzerName getIndexAnalyzer() { * 'Simple', 'Stop', 'Whitespace'. * * @param indexAnalyzer the indexAnalyzer value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setIndexAnalyzer(AnalyzerName indexAnalyzer) { + public SearchField setIndexAnalyzer(LexicalAnalyzerName indexAnalyzer) { this.indexAnalyzer = indexAnalyzer; return this; } @@ -741,9 +739,9 @@ public List getSynonymMaps() { * collection for complex fields. * * @param synonymMaps the synonymMaps value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setSynonymMaps(List synonymMaps) { + public SearchField setSynonymMaps(List synonymMaps) { this.synonymMaps = synonymMaps; return this; } @@ -755,7 +753,7 @@ public Field setSynonymMaps(List synonymMaps) { * * @return the fields value. */ - public List getFields() { + public List getFields() { return this.fields; } @@ -765,9 +763,9 @@ public List getFields() { * for simple fields. * * @param fields the fields value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setFields(List fields) { + public SearchField setFields(List fields) { this.fields = fields; return this; } @@ -795,9 +793,9 @@ public Boolean isHidden() { * enabling it does not cause an increase in index storage requirements. * * @param hidden the hidden value to set. - * @return the Field object itself. + * @return the SearchField object itself. */ - public Field setHidden(Boolean hidden) { + public SearchField setHidden(Boolean hidden) { this.hidden = hidden; retrievable = this.hidden == null ? null : !this.hidden; return this; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/FieldBase.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldBase.java similarity index 70% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/FieldBase.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldBase.java index 9970a31ab33c..599cb38ac279 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/FieldBase.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldBase.java @@ -9,19 +9,19 @@ import java.util.Objects; /** - * Base field type for helper classes to more easily create a {@link Index}. + * Base field type for helper classes to more easily create a {@link SearchIndex}. */ -public abstract class FieldBase { - private final ClientLogger logger = new ClientLogger(FieldBase.class); +public abstract class SearchFieldBase { + private final ClientLogger logger = new ClientLogger(SearchFieldBase.class); private final String name; - private final DataType dataType; + private final SearchFieldDataType dataType; /** - * Initializes a new instance of the {@link FieldBase} class. + * Initializes a new instance of the {@link SearchFieldBase} class. * @param name The name of the field, which must be unique within the index or parent field. * @param dataType The data type of the field. */ - protected FieldBase(String name, DataType dataType) { + protected SearchFieldBase(String name, SearchFieldDataType dataType) { if (CoreUtils.isNullOrEmpty(name)) { throw logger.logExceptionAsError(new IllegalArgumentException("The name of the field cannot be null")); } @@ -39,11 +39,11 @@ public String getName() { } /** - * Get the {@link DataType} of the field. + * Get the {@link SearchFieldDataType} of the field. * * @return The data type of the field. */ - public DataType getDataType() { + public SearchFieldDataType getDataType() { return dataType; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldDataType.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldDataType.java new file mode 100644 index 000000000000..3de6ca774d4b --- /dev/null +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchFieldDataType.java @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. + +package com.azure.search.documents.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** + * Defines values for SearchFieldDataType. + */ +public final class SearchFieldDataType extends ExpandableStringEnum { + /** + * Static value Edm.String for SearchFieldDataType. + */ + public static final SearchFieldDataType STRING = fromString("Edm.String"); + + /** + * Static value Edm.Int32 for SearchFieldDataType. + */ + public static final SearchFieldDataType INT32 = fromString("Edm.Int32"); + + /** + * Static value Edm.Int64 for SearchFieldDataType. + */ + public static final SearchFieldDataType INT64 = fromString("Edm.Int64"); + + /** + * Static value Edm.Double for SearchFieldDataType. + */ + public static final SearchFieldDataType DOUBLE = fromString("Edm.Double"); + + /** + * Static value Edm.Boolean for SearchFieldDataType. + */ + public static final SearchFieldDataType BOOLEAN = fromString("Edm.Boolean"); + + /** + * Static value Edm.DateTimeOffset for SearchFieldDataType. + */ + public static final SearchFieldDataType DATE_TIME_OFFSET = fromString("Edm.DateTimeOffset"); + + /** + * Static value Edm.GeographyPoint for SearchFieldDataType. + */ + public static final SearchFieldDataType GEOGRAPHY_POINT = fromString("Edm.GeographyPoint"); + + /** + * Static value Edm.ComplexType for SearchFieldDataType. + */ + public static final SearchFieldDataType COMPLEX = fromString("Edm.ComplexType"); + + /** + * Returns a collection of a specific SearchFieldDataType + * @param dataType the corresponding SearchFieldDataType + * @return a Collection of the corresponding SearchFieldDataType + */ + @JsonCreator + public static SearchFieldDataType collection(SearchFieldDataType dataType) { + return fromString(String.format("Collection(%s)", dataType.toString())); + } + + /** + * Creates or finds a SearchFieldDataType from its string representation. + * + * @param name a name to look for. + * @return the corresponding SearchFieldDataType. + */ + @JsonCreator + public static SearchFieldDataType fromString(String name) { + return fromString(name, SearchFieldDataType.class); + } + + /** + * @return known SearchFieldDataType values. + */ + public static Collection values() { + return values(SearchFieldDataType.class); + } +} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Index.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndex.java similarity index 85% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Index.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndex.java index e720f6e511cb..c48afeef091a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Index.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndex.java @@ -15,7 +15,7 @@ * behavior of an index. */ @Fluent -public final class Index { +public final class SearchIndex { /* * The name of the index. */ @@ -26,7 +26,7 @@ public final class Index { * The fields of the index. */ @JsonProperty(value = "fields", required = true) - private List fields; + private List fields; /* * The scoring profiles for the index. @@ -58,13 +58,13 @@ public final class Index { * The analyzers for the index. */ @JsonProperty(value = "analyzers") - private List analyzers; + private List analyzers; /* * The tokenizers for the index. */ @JsonProperty(value = "tokenizers") - private List tokenizers; + private List tokenizers; /* * The token filters for the index. @@ -91,7 +91,7 @@ public final class Index { * paid services created on or after January 1, 2019. */ @JsonProperty(value = "encryptionKey") - private EncryptionKey encryptionKey; + private SearchResourceEncryptionKey encryptionKey; /* * The type of similarity algorithm to be used when scoring and ranking the @@ -121,9 +121,9 @@ public String getName() { * Set the name property: The name of the index. * * @param name the name value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setName(String name) { + public SearchIndex setName(String name) { this.name = name; return this; } @@ -133,7 +133,7 @@ public Index setName(String name) { * * @return the fields value. */ - public List getFields() { + public List getFields() { return this.fields; } @@ -141,9 +141,9 @@ public List getFields() { * Set the fields property: The fields of the index. * * @param fields the fields value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setFields(List fields) { + public SearchIndex setFields(List fields) { this.fields = fields; return this; } @@ -161,9 +161,9 @@ public List getScoringProfiles() { * Set the scoringProfiles property: The scoring profiles for the index. * * @param scoringProfiles the scoringProfiles value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setScoringProfiles(List scoringProfiles) { + public SearchIndex setScoringProfiles(List scoringProfiles) { this.scoringProfiles = scoringProfiles; return this; } @@ -187,9 +187,9 @@ public String getDefaultScoringProfile() { * (tf-idf) will be used. * * @param defaultScoringProfile the defaultScoringProfile value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setDefaultScoringProfile(String defaultScoringProfile) { + public SearchIndex setDefaultScoringProfile(String defaultScoringProfile) { this.defaultScoringProfile = defaultScoringProfile; return this; } @@ -209,9 +209,9 @@ public CorsOptions getCorsOptions() { * Sharing (CORS) for the index. * * @param corsOptions the corsOptions value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setCorsOptions(CorsOptions corsOptions) { + public SearchIndex setCorsOptions(CorsOptions corsOptions) { this.corsOptions = corsOptions; return this; } @@ -229,9 +229,9 @@ public List getSuggesters() { * Set the suggesters property: The suggesters for the index. * * @param suggesters the suggesters value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setSuggesters(List suggesters) { + public SearchIndex setSuggesters(List suggesters) { this.suggesters = suggesters; return this; } @@ -241,7 +241,7 @@ public Index setSuggesters(List suggesters) { * * @return the analyzers value. */ - public List getAnalyzers() { + public List getAnalyzers() { return this.analyzers; } @@ -249,9 +249,9 @@ public List getAnalyzers() { * Set the analyzers property: The analyzers for the index. * * @param analyzers the analyzers value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setAnalyzers(List analyzers) { + public SearchIndex setAnalyzers(List analyzers) { this.analyzers = analyzers; return this; } @@ -261,7 +261,7 @@ public Index setAnalyzers(List analyzers) { * * @return the tokenizers value. */ - public List getTokenizers() { + public List getTokenizers() { return this.tokenizers; } @@ -269,9 +269,9 @@ public List getTokenizers() { * Set the tokenizers property: The tokenizers for the index. * * @param tokenizers the tokenizers value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setTokenizers(List tokenizers) { + public SearchIndex setTokenizers(List tokenizers) { this.tokenizers = tokenizers; return this; } @@ -289,9 +289,9 @@ public List getTokenFilters() { * Set the tokenFilters property: The token filters for the index. * * @param tokenFilters the tokenFilters value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setTokenFilters(List tokenFilters) { + public SearchIndex setTokenFilters(List tokenFilters) { this.tokenFilters = tokenFilters; return this; } @@ -309,9 +309,9 @@ public List getCharFilters() { * Set the charFilters property: The character filters for the index. * * @param charFilters the charFilters value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setCharFilters(List charFilters) { + public SearchIndex setCharFilters(List charFilters) { this.charFilters = charFilters; return this; } @@ -331,7 +331,7 @@ public Index setCharFilters(List charFilters) { * * @return the encryptionKey value. */ - public EncryptionKey getEncryptionKey() { + public SearchResourceEncryptionKey getEncryptionKey() { return this.encryptionKey; } @@ -349,9 +349,9 @@ public EncryptionKey getEncryptionKey() { * January 1, 2019. * * @param encryptionKey the encryptionKey value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setEncryptionKey(EncryptionKey encryptionKey) { + public SearchIndex setEncryptionKey(SearchResourceEncryptionKey encryptionKey) { this.encryptionKey = encryptionKey; return this; } @@ -377,9 +377,9 @@ public Similarity getSimilarity() { * algorithm is used. * * @param similarity the similarity value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setSimilarity(Similarity similarity) { + public SearchIndex setSimilarity(Similarity similarity) { this.similarity = similarity; return this; } @@ -397,9 +397,9 @@ public String getETag() { * Set the eTag property: The ETag of the index. * * @param eTag the eTag value to set. - * @return the Index object itself. + * @return the SearchIndex object itself. */ - public Index setETag(String eTag) { + public SearchIndex setETag(String eTag) { this.eTag = eTag; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Indexer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexer.java similarity index 83% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Indexer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexer.java index 43bce731b146..3195dd410100 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Indexer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexer.java @@ -14,7 +14,7 @@ * Represents an indexer. */ @Fluent -public final class Indexer { +public final class SearchIndexer { /* * The name of the indexer. */ @@ -78,7 +78,7 @@ public final class Indexer { private Boolean isDisabled; /* - * The ETag of the Indexer. + * The ETag of the indexer. */ @JsonProperty(value = "@odata.etag") private String eTag; @@ -96,9 +96,9 @@ public String getName() { * Set the name property: The name of the indexer. * * @param name the name value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setName(String name) { + public SearchIndexer setName(String name) { this.name = name; return this; } @@ -116,9 +116,9 @@ public String getDescription() { * Set the description property: The description of the indexer. * * @param description the description value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setDescription(String description) { + public SearchIndexer setDescription(String description) { this.description = description; return this; } @@ -138,9 +138,9 @@ public String getDataSourceName() { * this indexer reads data. * * @param dataSourceName the dataSourceName value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setDataSourceName(String dataSourceName) { + public SearchIndexer setDataSourceName(String dataSourceName) { this.dataSourceName = dataSourceName; return this; } @@ -160,9 +160,9 @@ public String getSkillsetName() { * this indexer. * * @param skillsetName the skillsetName value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setSkillsetName(String skillsetName) { + public SearchIndexer setSkillsetName(String skillsetName) { this.skillsetName = skillsetName; return this; } @@ -182,9 +182,9 @@ public String getTargetIndexName() { * indexer writes data. * * @param targetIndexName the targetIndexName value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setTargetIndexName(String targetIndexName) { + public SearchIndexer setTargetIndexName(String targetIndexName) { this.targetIndexName = targetIndexName; return this; } @@ -202,9 +202,9 @@ public IndexingSchedule getSchedule() { * Set the schedule property: The schedule for this indexer. * * @param schedule the schedule value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setSchedule(IndexingSchedule schedule) { + public SearchIndexer setSchedule(IndexingSchedule schedule) { this.schedule = schedule; return this; } @@ -222,9 +222,9 @@ public IndexingParameters getParameters() { * Set the parameters property: Parameters for indexer execution. * * @param parameters the parameters value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setParameters(IndexingParameters parameters) { + public SearchIndexer setParameters(IndexingParameters parameters) { this.parameters = parameters; return this; } @@ -244,9 +244,9 @@ public List getFieldMappings() { * data source and corresponding target fields in the index. * * @param fieldMappings the fieldMappings value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setFieldMappings(List fieldMappings) { + public SearchIndexer setFieldMappings(List fieldMappings) { this.fieldMappings = fieldMappings; return this; } @@ -266,9 +266,9 @@ public List getOutputFieldMappings() { * after enrichment and immediately before indexing. * * @param outputFieldMappings the outputFieldMappings value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setOutputFieldMappings(List outputFieldMappings) { + public SearchIndexer setOutputFieldMappings(List outputFieldMappings) { this.outputFieldMappings = outputFieldMappings; return this; } @@ -288,15 +288,15 @@ public Boolean isDisabled() { * disabled. Default is false. * * @param isDisabled the isDisabled value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setIsDisabled(Boolean isDisabled) { + public SearchIndexer setIsDisabled(Boolean isDisabled) { this.isDisabled = isDisabled; return this; } /** - * Get the eTag property: The ETag of the Indexer. + * Get the eTag property: The ETag of the indexer. * * @return the eTag value. */ @@ -305,12 +305,12 @@ public String getETag() { } /** - * Set the eTag property: The ETag of the Indexer. + * Set the eTag property: The ETag of the indexer. * * @param eTag the eTag value to set. - * @return the Indexer object itself. + * @return the SearchIndexer object itself. */ - public Indexer setETag(String eTag) { + public SearchIndexer setETag(String eTag) { this.eTag = eTag; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataContainer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataContainer.java similarity index 88% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataContainer.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataContainer.java index dc9f7e54380b..7820c15491e2 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataContainer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataContainer.java @@ -14,7 +14,7 @@ * collection) that will be indexed. */ @Fluent -public final class DataContainer { +public final class SearchIndexerDataContainer { /* * The name of the table or view (for Azure SQL data source) or collection * (for CosmosDB data source) that will be indexed. @@ -45,9 +45,9 @@ public String getName() { * source) or collection (for CosmosDB data source) that will be indexed. * * @param name the name value to set. - * @return the DataContainer object itself. + * @return the SearchIndexerDataContainer object itself. */ - public DataContainer setName(String name) { + public SearchIndexerDataContainer setName(String name) { this.name = name; return this; } @@ -69,9 +69,9 @@ public String getQuery() { * supported by Azure SQL datasources. * * @param query the query value to set. - * @return the DataContainer object itself. + * @return the SearchIndexerDataContainer object itself. */ - public DataContainer setQuery(String query) { + public SearchIndexerDataContainer setQuery(String query) { this.query = query; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSource.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSource.java similarity index 77% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSource.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSource.java index eccfdd0393a9..5e4f3452fe88 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/DataSource.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSource.java @@ -14,7 +14,7 @@ * indexer. */ @Fluent -public final class DataSource { +public final class SearchIndexerDataSource { /* * The name of the datasource. */ @@ -32,7 +32,7 @@ public final class DataSource { * 'CosmosDb', 'AzureBlob', 'AzureTable', 'MySql' */ @JsonProperty(value = "type", required = true) - private DataSourceType type; + private SearchIndexerDataSourceType type; /* * Credentials for the datasource. @@ -44,7 +44,7 @@ public final class DataSource { * The data container for the datasource. */ @JsonProperty(value = "container", required = true) - private DataContainer container; + private SearchIndexerDataContainer container; /* * The data change detection policy for the datasource. @@ -59,7 +59,7 @@ public final class DataSource { private DataDeletionDetectionPolicy dataDeletionDetectionPolicy; /* - * The ETag of the DataSource. + * The ETag of the data source. */ @JsonProperty(value = "@odata.etag") private String eTag; @@ -77,9 +77,9 @@ public String getName() { * Set the name property: The name of the datasource. * * @param name the name value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setName(String name) { + public SearchIndexerDataSource setName(String name) { this.name = name; return this; } @@ -97,9 +97,9 @@ public String getDescription() { * Set the description property: The description of the datasource. * * @param description the description value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setDescription(String description) { + public SearchIndexerDataSource setDescription(String description) { this.description = description; return this; } @@ -110,7 +110,7 @@ public DataSource setDescription(String description) { * * @return the type value. */ - public DataSourceType getType() { + public SearchIndexerDataSourceType getType() { return this.type; } @@ -119,9 +119,9 @@ public DataSourceType getType() { * include: 'AzureSql', 'CosmosDb', 'AzureBlob', 'AzureTable', 'MySql'. * * @param type the type value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setType(DataSourceType type) { + public SearchIndexerDataSource setType(SearchIndexerDataSourceType type) { this.type = type; return this; } @@ -139,9 +139,9 @@ public DataSourceCredentials getCredentials() { * Set the credentials property: Credentials for the datasource. * * @param credentials the credentials value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setCredentials(DataSourceCredentials credentials) { + public SearchIndexerDataSource setCredentials(DataSourceCredentials credentials) { this.credentials = credentials; return this; } @@ -151,7 +151,7 @@ public DataSource setCredentials(DataSourceCredentials credentials) { * * @return the container value. */ - public DataContainer getContainer() { + public SearchIndexerDataContainer getContainer() { return this.container; } @@ -159,9 +159,9 @@ public DataContainer getContainer() { * Set the container property: The data container for the datasource. * * @param container the container value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setContainer(DataContainer container) { + public SearchIndexerDataSource setContainer(SearchIndexerDataContainer container) { this.container = container; return this; } @@ -182,9 +182,9 @@ public DataChangeDetectionPolicy getDataChangeDetectionPolicy() { * * @param dataChangeDetectionPolicy the dataChangeDetectionPolicy value to * set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setDataChangeDetectionPolicy(DataChangeDetectionPolicy dataChangeDetectionPolicy) { + public SearchIndexerDataSource setDataChangeDetectionPolicy(DataChangeDetectionPolicy dataChangeDetectionPolicy) { this.dataChangeDetectionPolicy = dataChangeDetectionPolicy; return this; } @@ -205,15 +205,15 @@ public DataDeletionDetectionPolicy getDataDeletionDetectionPolicy() { * * @param dataDeletionDetectionPolicy the dataDeletionDetectionPolicy value * to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setDataDeletionDetectionPolicy(DataDeletionDetectionPolicy dataDeletionDetectionPolicy) { + public SearchIndexerDataSource setDataDeletionDetectionPolicy(DataDeletionDetectionPolicy dataDeletionDetectionPolicy) { this.dataDeletionDetectionPolicy = dataDeletionDetectionPolicy; return this; } /** - * Get the eTag property: The ETag of the DataSource. + * Get the eTag property: The ETag of the data source. * * @return the eTag value. */ @@ -222,12 +222,12 @@ public String getETag() { } /** - * Set the eTag property: The ETag of the DataSource. + * Set the eTag property: The ETag of the data source. * * @param eTag the eTag value to set. - * @return the DataSource object itself. + * @return the SearchIndexerDataSource object itself. */ - public DataSource setETag(String eTag) { + public SearchIndexerDataSource setETag(String eTag) { this.eTag = eTag; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSourceType.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSourceType.java new file mode 100644 index 000000000000..18d054d973fe --- /dev/null +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerDataSourceType.java @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. + +package com.azure.search.documents.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** + * Defines values for SearchIndexerDataSourceType. + */ +public final class SearchIndexerDataSourceType extends ExpandableStringEnum { + /** + * Static value azuresql for SearchIndexerDataSourceType. + */ + public static final SearchIndexerDataSourceType AZURE_SQL = fromString("azuresql"); + + /** + * Static value cosmosdb for SearchIndexerDataSourceType. + */ + public static final SearchIndexerDataSourceType COSMOS_DB = fromString("cosmosdb"); + + /** + * Static value azureblob for SearchIndexerDataSourceType. + */ + public static final SearchIndexerDataSourceType AZURE_BLOB = fromString("azureblob"); + + /** + * Static value azuretable for SearchIndexerDataSourceType. + */ + public static final SearchIndexerDataSourceType AZURE_TABLE = fromString("azuretable"); + + /** + * Static value mysql for SearchIndexerDataSourceType. + */ + public static final SearchIndexerDataSourceType MY_SQL = fromString("mysql"); + + /** + * Creates or finds a SearchIndexerDataSourceType from its string representation. + * + * @param name a name to look for. + * @return the corresponding SearchIndexerDataSourceType. + */ + @JsonCreator + public static SearchIndexerDataSourceType fromString(String name) { + return fromString(name, SearchIndexerDataSourceType.class); + } + + /** + * @return known SearchIndexerDataSourceType values. + */ + public static Collection values() { + return values(SearchIndexerDataSourceType.class); + } +} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemError.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerError.java similarity index 98% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemError.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerError.java index 7361356d9950..c9a0691d3444 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemError.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerError.java @@ -13,7 +13,7 @@ * Represents an item- or document-level indexing error. */ @Fluent -public final class ItemError { +public final class SearchIndexerError { /* * The key of the item for which indexing failed. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerLimits.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerLimits.java similarity index 96% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerLimits.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerLimits.java index 33bd7cc3b6fa..108d2f23ab78 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerLimits.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerLimits.java @@ -11,10 +11,10 @@ import java.time.Duration; /** - * The IndexerLimits model. + * The SearchIndexerLimits model. */ @Fluent -public final class IndexerLimits { +public final class SearchIndexerLimits { /* * The maximum duration that the indexer is permitted to run for one * execution. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkill.java similarity index 89% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skill.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkill.java index 90e08e846e54..ff474b39b14b 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkill.java @@ -16,8 +16,8 @@ /** * Base type for skills. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = Skill.class) -@JsonTypeName("Skill") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type", defaultImpl = SearchIndexerSkill.class) +@JsonTypeName("SearchIndexerSkill") @JsonSubTypes({ @JsonSubTypes.Type(name = "#Microsoft.Skills.Util.ConditionalSkill", value = ConditionalSkill.class), @JsonSubTypes.Type(name = "#Microsoft.Skills.Text.KeyPhraseExtractionSkill", value = KeyPhraseExtractionSkill.class), @@ -33,7 +33,7 @@ @JsonSubTypes.Type(name = "#Microsoft.Skills.Custom.WebApiSkill", value = WebApiSkill.class) }) @Fluent -public abstract class Skill { +public class SearchIndexerSkill { /* * The name of the skill which uniquely identifies it within the skillset. * A skill with no name defined will be given a default name of its 1-based @@ -90,9 +90,9 @@ public String getName() { * character '#'. * * @param name the name value to set. - * @return the Skill object itself. + * @return the SearchIndexerSkill object itself. */ - public Skill setName(String name) { + public SearchIndexerSkill setName(String name) { this.name = name; return this; } @@ -112,9 +112,9 @@ public String getDescription() { * describes the inputs, outputs, and usage of the skill. * * @param description the description value to set. - * @return the Skill object itself. + * @return the SearchIndexerSkill object itself. */ - public Skill setDescription(String description) { + public SearchIndexerSkill setDescription(String description) { this.description = description; return this; } @@ -136,9 +136,9 @@ public String getContext() { * /document or /document/content). The default is /document. * * @param context the context value to set. - * @return the Skill object itself. + * @return the SearchIndexerSkill object itself. */ - public Skill setContext(String context) { + public SearchIndexerSkill setContext(String context) { this.context = context; return this; } @@ -158,9 +158,9 @@ public List getInputs() { * source data set, or the output of an upstream skill. * * @param inputs the inputs value to set. - * @return the Skill object itself. + * @return the SearchIndexerSkill object itself. */ - public Skill setInputs(List inputs) { + public SearchIndexerSkill setInputs(List inputs) { this.inputs = inputs; return this; } @@ -182,9 +182,9 @@ public List getOutputs() { * skill. * * @param outputs the outputs value to set. - * @return the Skill object itself. + * @return the SearchIndexerSkill object itself. */ - public Skill setOutputs(List outputs) { + public SearchIndexerSkill setOutputs(List outputs) { this.outputs = outputs; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skillset.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkillset.java similarity index 80% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skillset.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkillset.java index c600cd74bfcc..1d690457c6bc 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/Skillset.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerSkillset.java @@ -14,7 +14,7 @@ * A list of skills. */ @Fluent -public final class Skillset { +public final class SearchIndexerSkillset { /* * The name of the skillset. */ @@ -31,7 +31,7 @@ public final class Skillset { * A list of skills in the skillset. */ @JsonProperty(value = "skills", required = true) - private List skills; + private List skills; /* * Details about cognitive services to be used when running skills. @@ -58,9 +58,9 @@ public String getName() { * Set the name property: The name of the skillset. * * @param name the name value to set. - * @return the Skillset object itself. + * @return the SearchIndexerSkillset object itself. */ - public Skillset setName(String name) { + public SearchIndexerSkillset setName(String name) { this.name = name; return this; } @@ -78,9 +78,9 @@ public String getDescription() { * Set the description property: The description of the skillset. * * @param description the description value to set. - * @return the Skillset object itself. + * @return the SearchIndexerSkillset object itself. */ - public Skillset setDescription(String description) { + public SearchIndexerSkillset setDescription(String description) { this.description = description; return this; } @@ -90,7 +90,7 @@ public Skillset setDescription(String description) { * * @return the skills value. */ - public List getSkills() { + public List getSkills() { return this.skills; } @@ -98,9 +98,9 @@ public List getSkills() { * Set the skills property: A list of skills in the skillset. * * @param skills the skills value to set. - * @return the Skillset object itself. + * @return the SearchIndexerSkillset object itself. */ - public Skillset setSkills(List skills) { + public SearchIndexerSkillset setSkills(List skills) { this.skills = skills; return this; } @@ -121,9 +121,9 @@ public CognitiveServicesAccount getCognitiveServicesAccount() { * * @param cognitiveServicesAccount the cognitiveServicesAccount value to * set. - * @return the Skillset object itself. + * @return the SearchIndexerSkillset object itself. */ - public Skillset setCognitiveServicesAccount(CognitiveServicesAccount cognitiveServicesAccount) { + public SearchIndexerSkillset setCognitiveServicesAccount(CognitiveServicesAccount cognitiveServicesAccount) { this.cognitiveServicesAccount = cognitiveServicesAccount; return this; } @@ -141,9 +141,9 @@ public String getETag() { * Set the eTag property: The ETag of the skillset. * * @param eTag the eTag value to set. - * @return the Skillset object itself. + * @return the SearchIndexerSkillset object itself. */ - public Skillset setETag(String eTag) { + public SearchIndexerSkillset setETag(String eTag) { this.eTag = eTag; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionInfo.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerStatus.java similarity index 89% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionInfo.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerStatus.java index 1462fafee4d0..953fe6cc936c 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/IndexerExecutionInfo.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerStatus.java @@ -14,10 +14,10 @@ * Represents the current status and execution history of an indexer. */ @Fluent -public final class IndexerExecutionInfo { +public final class SearchIndexerStatus { /* - * Overall indexer status. Possible values include: 'unknown', 'error', - * 'running' + * Overall indexer status. Possible values include: 'Unknown', 'Error', + * 'Running' */ @JsonProperty(value = "status", required = true, access = JsonProperty.Access.WRITE_ONLY) private IndexerStatus status; @@ -39,11 +39,11 @@ public final class IndexerExecutionInfo { * The execution limits for the indexer. */ @JsonProperty(value = "limits", required = true, access = JsonProperty.Access.WRITE_ONLY) - private IndexerLimits limits; + private SearchIndexerLimits limits; /** * Get the status property: Overall indexer status. Possible values - * include: 'unknown', 'error', 'running'. + * include: 'Unknown', 'Error', 'Running'. * * @return the status value. */ @@ -76,7 +76,7 @@ public List getExecutionHistory() { * * @return the limits value. */ - public IndexerLimits getLimits() { + public SearchIndexerLimits getLimits() { return this.limits; } } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemWarning.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerWarning.java similarity index 98% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemWarning.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerWarning.java index 8f0f8c3619f3..8bc3466e6687 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ItemWarning.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchIndexerWarning.java @@ -13,7 +13,7 @@ * Represents an item-level warning. */ @Fluent -public final class ItemWarning { +public final class SearchIndexerWarning { /* * The key of the item which generated a warning. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchOptions.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchOptions.java index f612b2cbff53..e42dac78575f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchOptions.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchOptions.java @@ -85,7 +85,7 @@ public final class SearchOptions { /* * A value that specifies the syntax of the search query. The default is * 'simple'. Use 'full' if your query uses the Lucene query syntax. - * Possible values include: 'simple', 'full' + * Possible values include: 'Simple', 'Full' */ @JsonProperty(value = "") private QueryType queryType; @@ -119,7 +119,7 @@ public final class SearchOptions { /* * A value that specifies whether any or all of the search terms must be * matched in order to count the document as a match. Possible values - * include: 'any', 'all' + * include: 'Any', 'All' */ @JsonProperty(value = "") private SearchMode searchMode; @@ -357,7 +357,7 @@ public SearchOptions setOrderBy(String... orderBy) { /** * Get the queryType property: A value that specifies the syntax of the * search query. The default is 'simple'. Use 'full' if your query uses the - * Lucene query syntax. Possible values include: 'simple', 'full'. + * Lucene query syntax. Possible values include: 'Simple', 'Full'. * * @return the queryType value. */ @@ -368,7 +368,7 @@ public QueryType getQueryType() { /** * Set the queryType property: A value that specifies the syntax of the * search query. The default is 'simple'. Use 'full' if your query uses the - * Lucene query syntax. Possible values include: 'simple', 'full'. + * Lucene query syntax. Possible values include: 'Simple', 'Full'. * * @param queryType the queryType value to set. * @return the SearchOptions object itself. @@ -461,7 +461,7 @@ public SearchOptions setSearchFields(String... searchFields) { /** * Get the searchMode property: A value that specifies whether any or all * of the search terms must be matched in order to count the document as a - * match. Possible values include: 'any', 'all'. + * match. Possible values include: 'Any', 'All'. * * @return the searchMode value. */ @@ -472,7 +472,7 @@ public SearchMode getSearchMode() { /** * Set the searchMode property: A value that specifies whether any or all * of the search terms must be matched in order to count the document as a - * match. Possible values include: 'any', 'all'. + * match. Possible values include: 'Any', 'All'. * * @param searchMode the searchMode value to set. * @return the SearchOptions object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EncryptionKey.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchResourceEncryptionKey.java similarity index 58% rename from sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EncryptionKey.java rename to sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchResourceEncryptionKey.java index 90570fa99568..24c3e51531d9 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/EncryptionKey.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchResourceEncryptionKey.java @@ -15,20 +15,20 @@ * Search, such as indexes and synonym maps. */ @Fluent -public final class EncryptionKey { +public final class SearchResourceEncryptionKey { /* * The name of your Azure Key Vault key to be used to encrypt your data at * rest. */ @JsonProperty(value = "keyVaultKeyName", required = true) - private String keyVaultKeyName; + private String keyName; /* * The version of your Azure Key Vault key to be used to encrypt your data * at rest. */ @JsonProperty(value = "keyVaultKeyVersion", required = true) - private String keyVaultKeyVersion; + private String keyVersion; /* * The URI of your Azure Key Vault, also referred to as DNS name, that @@ -36,7 +36,7 @@ public final class EncryptionKey { * might be https://my-keyvault-name.vault.azure.net. */ @JsonProperty(value = "keyVaultUri", required = true) - private String keyVaultUri; + private String vaultUri; /* * Optional Azure Active Directory credentials used for accessing your @@ -46,72 +46,72 @@ public final class EncryptionKey { private AzureActiveDirectoryApplicationCredentials accessCredentials; /** - * Get the keyVaultKeyName property: The name of your Azure Key Vault key - * to be used to encrypt your data at rest. + * Get the keyName property: The name of your Azure Key Vault key to be + * used to encrypt your data at rest. * - * @return the keyVaultKeyName value. + * @return the keyName value. */ - public String getKeyVaultKeyName() { - return this.keyVaultKeyName; + public String getKeyName() { + return this.keyName; } /** - * Set the keyVaultKeyName property: The name of your Azure Key Vault key - * to be used to encrypt your data at rest. + * Set the keyName property: The name of your Azure Key Vault key to be + * used to encrypt your data at rest. * - * @param keyVaultKeyName the keyVaultKeyName value to set. - * @return the EncryptionKey object itself. + * @param keyName the keyName value to set. + * @return the SearchResourceEncryptionKey object itself. */ - public EncryptionKey setKeyVaultKeyName(String keyVaultKeyName) { - this.keyVaultKeyName = keyVaultKeyName; + public SearchResourceEncryptionKey setKeyName(String keyName) { + this.keyName = keyName; return this; } /** - * Get the keyVaultKeyVersion property: The version of your Azure Key Vault - * key to be used to encrypt your data at rest. + * Get the keyVersion property: The version of your Azure Key Vault key to + * be used to encrypt your data at rest. * - * @return the keyVaultKeyVersion value. + * @return the keyVersion value. */ - public String getKeyVaultKeyVersion() { - return this.keyVaultKeyVersion; + public String getKeyVersion() { + return this.keyVersion; } /** - * Set the keyVaultKeyVersion property: The version of your Azure Key Vault - * key to be used to encrypt your data at rest. + * Set the keyVersion property: The version of your Azure Key Vault key to + * be used to encrypt your data at rest. * - * @param keyVaultKeyVersion the keyVaultKeyVersion value to set. - * @return the EncryptionKey object itself. + * @param keyVersion the keyVersion value to set. + * @return the SearchResourceEncryptionKey object itself. */ - public EncryptionKey setKeyVaultKeyVersion(String keyVaultKeyVersion) { - this.keyVaultKeyVersion = keyVaultKeyVersion; + public SearchResourceEncryptionKey setKeyVersion(String keyVersion) { + this.keyVersion = keyVersion; return this; } /** - * Get the keyVaultUri property: The URI of your Azure Key Vault, also + * Get the vaultUri property: The URI of your Azure Key Vault, also * referred to as DNS name, that contains the key to be used to encrypt * your data at rest. An example URI might be * https://my-keyvault-name.vault.azure.net. * - * @return the keyVaultUri value. + * @return the vaultUri value. */ - public String getKeyVaultUri() { - return this.keyVaultUri; + public String getVaultUri() { + return this.vaultUri; } /** - * Set the keyVaultUri property: The URI of your Azure Key Vault, also + * Set the vaultUri property: The URI of your Azure Key Vault, also * referred to as DNS name, that contains the key to be used to encrypt * your data at rest. An example URI might be * https://my-keyvault-name.vault.azure.net. * - * @param keyVaultUri the keyVaultUri value to set. - * @return the EncryptionKey object itself. + * @param vaultUri the vaultUri value to set. + * @return the SearchResourceEncryptionKey object itself. */ - public EncryptionKey setKeyVaultUri(String keyVaultUri) { - this.keyVaultUri = keyVaultUri; + public SearchResourceEncryptionKey setVaultUri(String vaultUri) { + this.vaultUri = vaultUri; return this; } @@ -132,9 +132,9 @@ public AzureActiveDirectoryApplicationCredentials getAccessCredentials() { * using managed identity instead. * * @param accessCredentials the accessCredentials value to set. - * @return the EncryptionKey object itself. + * @return the SearchResourceEncryptionKey object itself. */ - public EncryptionKey setAccessCredentials(AzureActiveDirectoryApplicationCredentials accessCredentials) { + public SearchResourceEncryptionKey setAccessCredentials(AzureActiveDirectoryApplicationCredentials accessCredentials) { this.accessCredentials = accessCredentials; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchableField.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchableField.java index 42fa06b254a1..662dc3ee207d 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchableField.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SearchableField.java @@ -6,12 +6,12 @@ import java.util.List; /** - * A helper Field model to build a searchable {@link Field}. + * A helper Field model to build a searchable {@link SearchField}. */ public class SearchableField extends SimpleField { - private AnalyzerName analyzer; - private AnalyzerName searchAnalyzer; - private AnalyzerName indexAnalyzer; + private LexicalAnalyzerName analyzer; + private LexicalAnalyzerName searchAnalyzer; + private LexicalAnalyzerName indexAnalyzer; private List synonymMapNames; /** @@ -22,16 +22,16 @@ public class SearchableField extends SimpleField { * @throws NullPointerException when {@code name} is null. */ public SearchableField(String name, boolean collection) { - super(name, DataType.EDM_STRING, collection); + super(name, SearchFieldDataType.STRING, collection); } /** * Gets the name of the language analyzer. This property cannot be set when either {@code searchAnalyzer} or * {@code indexAnalyzer} are set. Once the analyzer is chosen, it cannot be changed for the field in the index. * - * @return The {@link AnalyzerName} used for analyzer. + * @return The {@link LexicalAnalyzerName} used for analyzer. */ - public AnalyzerName getAnalyzer() { + public LexicalAnalyzerName getAnalyzer() { return analyzer; } @@ -39,10 +39,10 @@ public AnalyzerName getAnalyzer() { * Sets the name of the language analyzer. This property cannot be set when either {@code searchAnalyzer} or * {@code indexAnalyzer} are set. Once the analyzer is chosen, it cannot be changed for the field in the index. * - * @param analyzer The {@link AnalyzerName} used for analyzer. + * @param analyzer The {@link LexicalAnalyzerName} used for analyzer. * @return The SearchableField object itself. */ - public SearchableField setAnalyzer(AnalyzerName analyzer) { + public SearchableField setAnalyzer(LexicalAnalyzerName analyzer) { this.analyzer = analyzer; return this; } @@ -52,9 +52,9 @@ public SearchableField setAnalyzer(AnalyzerName analyzer) { * {@code indexAnalyzer}, and cannot be set when {@code analyzer} is set. Once the analyzer is chosen, it cannot be * changed for the field in the index. * - * @return The {@link AnalyzerName} used for search analyzer. + * @return The {@link LexicalAnalyzerName} used for search analyzer. */ - public AnalyzerName getSearchAnalyzer() { + public LexicalAnalyzerName getSearchAnalyzer() { return searchAnalyzer; } @@ -63,10 +63,10 @@ public AnalyzerName getSearchAnalyzer() { * {@code indexAnalyzer}, and cannot be set when {@code analyzer} is set. Once the analyzer is chosen, it cannot be * changed for the field in the index. * - * @param searchAnalyzer The {@link AnalyzerName} used for search analyzer. + * @param searchAnalyzer The {@link LexicalAnalyzerName} used for search analyzer. * @return The SearchableField object itself. */ - public SearchableField setSearchAnalyzer(AnalyzerName searchAnalyzer) { + public SearchableField setSearchAnalyzer(LexicalAnalyzerName searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; return this; } @@ -76,9 +76,9 @@ public SearchableField setSearchAnalyzer(AnalyzerName searchAnalyzer) { * {@code searchAnalyzer}, and cannot be set when {@code analyzer} is set. Once the analyzer is chosen, it cannot be * changed for the field in the index. * - * @return The {@link AnalyzerName} used for index analyzer. + * @return The {@link LexicalAnalyzerName} used for index analyzer. */ - public AnalyzerName getIndexAnalyzer() { + public LexicalAnalyzerName getIndexAnalyzer() { return indexAnalyzer; } @@ -87,10 +87,10 @@ public AnalyzerName getIndexAnalyzer() { * {@code searchAnalyzer}, and cannot be set when {@code analyzer} is set. Once the analyzer is chosen, it cannot be * changed for the field in the index. * - * @param indexAnalyzer The {@link AnalyzerName} used for index analyzer. + * @param indexAnalyzer The {@link LexicalAnalyzerName} used for index analyzer. * @return The SearchableField object itself. */ - public SearchableField setIndexAnalyzer(AnalyzerName indexAnalyzer) { + public SearchableField setIndexAnalyzer(LexicalAnalyzerName indexAnalyzer) { this.indexAnalyzer = indexAnalyzer; return this; } @@ -124,12 +124,12 @@ public SearchableField setSynonymMapNames(List synonymMapNames) { } /** - * Convert SearchableField to {@link Field}. + * Convert SearchableField to {@link SearchField}. * - * @return The {@link Field} object. + * @return The {@link SearchField} object. */ - public Field build() { - return new Field() + public SearchField build() { + return new SearchField() .setName(super.getName()) .setType(super.getDataType()) .setSearchable(true) diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SentimentSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SentimentSkill.java index e48a24904c0c..d8c1b9aa444f 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SentimentSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SentimentSkill.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.SentimentSkill") @Fluent -public final class SentimentSkill extends Skill { +public final class SentimentSkill extends SearchIndexerSkill { /* * A value indicating which language code to use. Default is en. Possible * values include: 'da', 'nl', 'en', 'fi', 'fr', 'de', 'el', 'it', 'no', diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ShaperSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ShaperSkill.java index c5cec3e3fe15..191e074ed91e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ShaperSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/ShaperSkill.java @@ -17,5 +17,5 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Util.ShaperSkill") @Fluent -public final class ShaperSkill extends Skill { +public final class ShaperSkill extends SearchIndexerSkill { } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SimpleField.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SimpleField.java index 3250fb4854de..212ff0b08fa1 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SimpleField.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SimpleField.java @@ -4,9 +4,9 @@ package com.azure.search.documents.models; /** - * A helper Field model to build a simple {@link Field}. + * A helper Field model to build a simple {@link SearchField}. */ -public class SimpleField extends FieldBase { +public class SimpleField extends SearchFieldBase { private boolean key; private boolean facetable; private boolean sortable; @@ -17,18 +17,18 @@ public class SimpleField extends FieldBase { * Initializes a new instance of the {@link SimpleField} class. * * @param name The name of the field, which must be unique within the index or parent field. - * @param dataType The {@link DataType} of the {@link Field}. + * @param dataType The {@link SearchFieldDataType} of the {@link SearchField}. * @param collection boolean field to indicate whether the dataType is collection. * @throws NullPointerException when {@code name} is null. */ - public SimpleField(String name, DataType dataType, boolean collection) { - super(name, collection ? DataType.collection(dataType) : dataType); + public SimpleField(String name, SearchFieldDataType dataType, boolean collection) { + super(name, collection ? SearchFieldDataType.collection(dataType) : dataType); } /** * Gets whether the field is the key field. * - * @return An {@link Index} must have exactly one key field of type {@code DataType.EDM_STRING}. + * @return An {@link SearchIndex} must have exactly one key field of type {@code DataType.EDM_STRING}. */ public boolean isKey() { return key; @@ -135,12 +135,12 @@ public SimpleField setFacetable(boolean facetable) { } /** - * Convert SimpleField to {@link Field}. + * Convert SimpleField to {@link SearchField}. * - * @return The {@link Field} object. + * @return The {@link SearchField} object. */ - public Field build() { - return new Field().setName(super.getName()) + public SearchField build() { + return new SearchField().setName(super.getName()) .setType(super.getDataType()) .setKey(key) .setSearchable(false) diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SnowballTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SnowballTokenFilter.java index 4578025d134d..4cb02456e0f2 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SnowballTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SnowballTokenFilter.java @@ -20,21 +20,21 @@ @Fluent public final class SnowballTokenFilter extends TokenFilter { /* - * The language to use. Possible values include: 'armenian', 'basque', - * 'catalan', 'danish', 'dutch', 'english', 'finnish', 'french', 'german', - * 'german2', 'hungarian', 'italian', 'kp', 'lovins', 'norwegian', - * 'porter', 'portuguese', 'romanian', 'russian', 'spanish', 'swedish', - * 'turkish' + * The language to use. Possible values include: 'Armenian', 'Basque', + * 'Catalan', 'Danish', 'Dutch', 'English', 'Finnish', 'French', 'German', + * 'German2', 'Hungarian', 'Italian', 'Kp', 'Lovins', 'Norwegian', + * 'Porter', 'Portuguese', 'Romanian', 'Russian', 'Spanish', 'Swedish', + * 'Turkish' */ @JsonProperty(value = "language", required = true) private SnowballTokenFilterLanguage language; /** * Get the language property: The language to use. Possible values include: - * 'armenian', 'basque', 'catalan', 'danish', 'dutch', 'english', - * 'finnish', 'french', 'german', 'german2', 'hungarian', 'italian', 'kp', - * 'lovins', 'norwegian', 'porter', 'portuguese', 'romanian', 'russian', - * 'spanish', 'swedish', 'turkish'. + * 'Armenian', 'Basque', 'Catalan', 'Danish', 'Dutch', 'English', + * 'Finnish', 'French', 'German', 'German2', 'Hungarian', 'Italian', 'Kp', + * 'Lovins', 'Norwegian', 'Porter', 'Portuguese', 'Romanian', 'Russian', + * 'Spanish', 'Swedish', 'Turkish'. * * @return the language value. */ @@ -44,10 +44,10 @@ public SnowballTokenFilterLanguage getLanguage() { /** * Set the language property: The language to use. Possible values include: - * 'armenian', 'basque', 'catalan', 'danish', 'dutch', 'english', - * 'finnish', 'french', 'german', 'german2', 'hungarian', 'italian', 'kp', - * 'lovins', 'norwegian', 'porter', 'portuguese', 'romanian', 'russian', - * 'spanish', 'swedish', 'turkish'. + * 'Armenian', 'Basque', 'Catalan', 'Danish', 'Dutch', 'English', + * 'Finnish', 'French', 'German', 'German2', 'Hungarian', 'Italian', 'Kp', + * 'Lovins', 'Norwegian', 'Porter', 'Portuguese', 'Romanian', 'Russian', + * 'Spanish', 'Swedish', 'Turkish'. * * @param language the language value to set. * @return the SnowballTokenFilter object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SplitSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SplitSkill.java index aff60f032ad5..b3ab43547245 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SplitSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SplitSkill.java @@ -17,7 +17,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.SplitSkill") @Fluent -public final class SplitSkill extends Skill { +public final class SplitSkill extends SearchIndexerSkill { /* * A value indicating which language code to use. Default is en. Possible * values include: 'da', 'de', 'en', 'es', 'fi', 'fr', 'it', 'ko', 'pt' @@ -27,7 +27,7 @@ public final class SplitSkill extends Skill { /* * A value indicating which split mode to perform. Possible values include: - * 'pages', 'sentences' + * 'Pages', 'Sentences' */ @JsonProperty(value = "textSplitMode") private TextSplitMode textSplitMode; @@ -64,7 +64,7 @@ public SplitSkill setDefaultLanguageCode(SplitSkillLanguage defaultLanguageCode) /** * Get the textSplitMode property: A value indicating which split mode to - * perform. Possible values include: 'pages', 'sentences'. + * perform. Possible values include: 'Pages', 'Sentences'. * * @return the textSplitMode value. */ @@ -74,7 +74,7 @@ public TextSplitMode getTextSplitMode() { /** * Set the textSplitMode property: A value indicating which split mode to - * perform. Possible values include: 'pages', 'sentences'. + * perform. Possible values include: 'Pages', 'Sentences'. * * @param textSplitMode the textSplitMode value to set. * @return the SplitSkill object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StemmerTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StemmerTokenFilter.java index d10ac8761ba6..985e41090c12 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StemmerTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StemmerTokenFilter.java @@ -20,35 +20,35 @@ @Fluent public final class StemmerTokenFilter extends TokenFilter { /* - * The language to use. Possible values include: 'arabic', 'armenian', - * 'basque', 'brazilian', 'bulgarian', 'catalan', 'czech', 'danish', - * 'dutch', 'dutchKp', 'english', 'lightEnglish', 'minimalEnglish', - * 'possessiveEnglish', 'porter2', 'lovins', 'finnish', 'lightFinnish', - * 'french', 'lightFrench', 'minimalFrench', 'galician', 'minimalGalician', - * 'german', 'german2', 'lightGerman', 'minimalGerman', 'greek', 'hindi', - * 'hungarian', 'lightHungarian', 'indonesian', 'irish', 'italian', - * 'lightItalian', 'sorani', 'latvian', 'norwegian', 'lightNorwegian', - * 'minimalNorwegian', 'lightNynorsk', 'minimalNynorsk', 'portuguese', - * 'lightPortuguese', 'minimalPortuguese', 'portugueseRslp', 'romanian', - * 'russian', 'lightRussian', 'spanish', 'lightSpanish', 'swedish', - * 'lightSwedish', 'turkish' + * The language to use. Possible values include: 'Arabic', 'Armenian', + * 'Basque', 'Brazilian', 'Bulgarian', 'Catalan', 'Czech', 'Danish', + * 'Dutch', 'DutchKp', 'English', 'LightEnglish', 'MinimalEnglish', + * 'PossessiveEnglish', 'Porter2', 'Lovins', 'Finnish', 'LightFinnish', + * 'French', 'LightFrench', 'MinimalFrench', 'Galician', 'MinimalGalician', + * 'German', 'German2', 'LightGerman', 'MinimalGerman', 'Greek', 'Hindi', + * 'Hungarian', 'LightHungarian', 'Indonesian', 'Irish', 'Italian', + * 'LightItalian', 'Sorani', 'Latvian', 'Norwegian', 'LightNorwegian', + * 'MinimalNorwegian', 'LightNynorsk', 'MinimalNynorsk', 'Portuguese', + * 'LightPortuguese', 'MinimalPortuguese', 'PortugueseRslp', 'Romanian', + * 'Russian', 'LightRussian', 'Spanish', 'LightSpanish', 'Swedish', + * 'LightSwedish', 'Turkish' */ @JsonProperty(value = "language", required = true) private StemmerTokenFilterLanguage language; /** * Get the language property: The language to use. Possible values include: - * 'arabic', 'armenian', 'basque', 'brazilian', 'bulgarian', 'catalan', - * 'czech', 'danish', 'dutch', 'dutchKp', 'english', 'lightEnglish', - * 'minimalEnglish', 'possessiveEnglish', 'porter2', 'lovins', 'finnish', - * 'lightFinnish', 'french', 'lightFrench', 'minimalFrench', 'galician', - * 'minimalGalician', 'german', 'german2', 'lightGerman', 'minimalGerman', - * 'greek', 'hindi', 'hungarian', 'lightHungarian', 'indonesian', 'irish', - * 'italian', 'lightItalian', 'sorani', 'latvian', 'norwegian', - * 'lightNorwegian', 'minimalNorwegian', 'lightNynorsk', 'minimalNynorsk', - * 'portuguese', 'lightPortuguese', 'minimalPortuguese', 'portugueseRslp', - * 'romanian', 'russian', 'lightRussian', 'spanish', 'lightSpanish', - * 'swedish', 'lightSwedish', 'turkish'. + * 'Arabic', 'Armenian', 'Basque', 'Brazilian', 'Bulgarian', 'Catalan', + * 'Czech', 'Danish', 'Dutch', 'DutchKp', 'English', 'LightEnglish', + * 'MinimalEnglish', 'PossessiveEnglish', 'Porter2', 'Lovins', 'Finnish', + * 'LightFinnish', 'French', 'LightFrench', 'MinimalFrench', 'Galician', + * 'MinimalGalician', 'German', 'German2', 'LightGerman', 'MinimalGerman', + * 'Greek', 'Hindi', 'Hungarian', 'LightHungarian', 'Indonesian', 'Irish', + * 'Italian', 'LightItalian', 'Sorani', 'Latvian', 'Norwegian', + * 'LightNorwegian', 'MinimalNorwegian', 'LightNynorsk', 'MinimalNynorsk', + * 'Portuguese', 'LightPortuguese', 'MinimalPortuguese', 'PortugueseRslp', + * 'Romanian', 'Russian', 'LightRussian', 'Spanish', 'LightSpanish', + * 'Swedish', 'LightSwedish', 'Turkish'. * * @return the language value. */ @@ -58,17 +58,17 @@ public StemmerTokenFilterLanguage getLanguage() { /** * Set the language property: The language to use. Possible values include: - * 'arabic', 'armenian', 'basque', 'brazilian', 'bulgarian', 'catalan', - * 'czech', 'danish', 'dutch', 'dutchKp', 'english', 'lightEnglish', - * 'minimalEnglish', 'possessiveEnglish', 'porter2', 'lovins', 'finnish', - * 'lightFinnish', 'french', 'lightFrench', 'minimalFrench', 'galician', - * 'minimalGalician', 'german', 'german2', 'lightGerman', 'minimalGerman', - * 'greek', 'hindi', 'hungarian', 'lightHungarian', 'indonesian', 'irish', - * 'italian', 'lightItalian', 'sorani', 'latvian', 'norwegian', - * 'lightNorwegian', 'minimalNorwegian', 'lightNynorsk', 'minimalNynorsk', - * 'portuguese', 'lightPortuguese', 'minimalPortuguese', 'portugueseRslp', - * 'romanian', 'russian', 'lightRussian', 'spanish', 'lightSpanish', - * 'swedish', 'lightSwedish', 'turkish'. + * 'Arabic', 'Armenian', 'Basque', 'Brazilian', 'Bulgarian', 'Catalan', + * 'Czech', 'Danish', 'Dutch', 'DutchKp', 'English', 'LightEnglish', + * 'MinimalEnglish', 'PossessiveEnglish', 'Porter2', 'Lovins', 'Finnish', + * 'LightFinnish', 'French', 'LightFrench', 'MinimalFrench', 'Galician', + * 'MinimalGalician', 'German', 'German2', 'LightGerman', 'MinimalGerman', + * 'Greek', 'Hindi', 'Hungarian', 'LightHungarian', 'Indonesian', 'Irish', + * 'Italian', 'LightItalian', 'Sorani', 'Latvian', 'Norwegian', + * 'LightNorwegian', 'MinimalNorwegian', 'LightNynorsk', 'MinimalNynorsk', + * 'Portuguese', 'LightPortuguese', 'MinimalPortuguese', 'PortugueseRslp', + * 'Romanian', 'Russian', 'LightRussian', 'Spanish', 'LightSpanish', + * 'Swedish', 'LightSwedish', 'Turkish'. * * @param language the language value to set. * @return the StemmerTokenFilter object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopAnalyzer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopAnalyzer.java index 710acce1b993..d54976974ff2 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopAnalyzer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopAnalyzer.java @@ -19,7 +19,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.StopAnalyzer") @Fluent -public final class StopAnalyzer extends Analyzer { +public final class StopAnalyzer extends LexicalAnalyzer { /* * A list of stopwords. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopwordsTokenFilter.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopwordsTokenFilter.java index c95882539b4e..ecef04ebd1ed 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopwordsTokenFilter.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/StopwordsTokenFilter.java @@ -30,11 +30,11 @@ public final class StopwordsTokenFilter extends TokenFilter { /* * A predefined list of stopwords to use. This property and the stopwords * property cannot both be set. Default is English. Possible values - * include: 'arabic', 'armenian', 'basque', 'brazilian', 'bulgarian', - * 'catalan', 'czech', 'danish', 'dutch', 'english', 'finnish', 'french', - * 'galician', 'german', 'greek', 'hindi', 'hungarian', 'indonesian', - * 'irish', 'italian', 'latvian', 'norwegian', 'persian', 'portuguese', - * 'romanian', 'russian', 'sorani', 'spanish', 'swedish', 'thai', 'turkish' + * include: 'Arabic', 'Armenian', 'Basque', 'Brazilian', 'Bulgarian', + * 'Catalan', 'Czech', 'Danish', 'Dutch', 'English', 'Finnish', 'French', + * 'Galician', 'German', 'Greek', 'Hindi', 'Hungarian', 'Indonesian', + * 'Irish', 'Italian', 'Latvian', 'Norwegian', 'Persian', 'Portuguese', + * 'Romanian', 'Russian', 'Sorani', 'Spanish', 'Swedish', 'Thai', 'Turkish' */ @JsonProperty(value = "stopwordsList") private StopwordsList stopwordsList; @@ -78,12 +78,12 @@ public StopwordsTokenFilter setStopwords(List stopwords) { /** * Get the stopwordsList property: A predefined list of stopwords to use. * This property and the stopwords property cannot both be set. Default is - * English. Possible values include: 'arabic', 'armenian', 'basque', - * 'brazilian', 'bulgarian', 'catalan', 'czech', 'danish', 'dutch', - * 'english', 'finnish', 'french', 'galician', 'german', 'greek', 'hindi', - * 'hungarian', 'indonesian', 'irish', 'italian', 'latvian', 'norwegian', - * 'persian', 'portuguese', 'romanian', 'russian', 'sorani', 'spanish', - * 'swedish', 'thai', 'turkish'. + * English. Possible values include: 'Arabic', 'Armenian', 'Basque', + * 'Brazilian', 'Bulgarian', 'Catalan', 'Czech', 'Danish', 'Dutch', + * 'English', 'Finnish', 'French', 'Galician', 'German', 'Greek', 'Hindi', + * 'Hungarian', 'Indonesian', 'Irish', 'Italian', 'Latvian', 'Norwegian', + * 'Persian', 'Portuguese', 'Romanian', 'Russian', 'Sorani', 'Spanish', + * 'Swedish', 'Thai', 'Turkish'. * * @return the stopwordsList value. */ @@ -94,12 +94,12 @@ public StopwordsList getStopwordsList() { /** * Set the stopwordsList property: A predefined list of stopwords to use. * This property and the stopwords property cannot both be set. Default is - * English. Possible values include: 'arabic', 'armenian', 'basque', - * 'brazilian', 'bulgarian', 'catalan', 'czech', 'danish', 'dutch', - * 'english', 'finnish', 'french', 'galician', 'german', 'greek', 'hindi', - * 'hungarian', 'indonesian', 'irish', 'italian', 'latvian', 'norwegian', - * 'persian', 'portuguese', 'romanian', 'russian', 'sorani', 'spanish', - * 'swedish', 'thai', 'turkish'. + * English. Possible values include: 'Arabic', 'Armenian', 'Basque', + * 'Brazilian', 'Bulgarian', 'Catalan', 'Czech', 'Danish', 'Dutch', + * 'English', 'Finnish', 'French', 'Galician', 'German', 'Greek', 'Hindi', + * 'Hungarian', 'Indonesian', 'Irish', 'Italian', 'Latvian', 'Norwegian', + * 'Persian', 'Portuguese', 'Romanian', 'Russian', 'Sorani', 'Spanish', + * 'Swedish', 'Thai', 'Turkish'. * * @param stopwordsList the stopwordsList value to set. * @return the StopwordsTokenFilter object itself. diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SynonymMap.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SynonymMap.java index 3f5f82479ef7..673fb1a1fcba 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SynonymMap.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/SynonymMap.java @@ -47,7 +47,7 @@ public final class SynonymMap { * paid services created on or after January 1, 2019. */ @JsonProperty(value = "encryptionKey") - private EncryptionKey encryptionKey; + private SearchResourceEncryptionKey encryptionKey; /* * The ETag of the synonym map. @@ -119,7 +119,7 @@ public SynonymMap setSynonyms(String synonyms) { * * @return the encryptionKey value. */ - public EncryptionKey getEncryptionKey() { + public SearchResourceEncryptionKey getEncryptionKey() { return this.encryptionKey; } @@ -139,7 +139,7 @@ public EncryptionKey getEncryptionKey() { * @param encryptionKey the encryptionKey value to set. * @return the SynonymMap object itself. */ - public SynonymMap setEncryptionKey(EncryptionKey encryptionKey) { + public SynonymMap setEncryptionKey(SearchResourceEncryptionKey encryptionKey) { this.encryptionKey = encryptionKey; return this; } diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TextTranslationSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TextTranslationSkill.java index 6cb08b5de955..96ca4e5b8b28 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TextTranslationSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TextTranslationSkill.java @@ -17,7 +17,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Text.TranslationSkill") @Fluent -public final class TextTranslationSkill extends Skill { +public final class TextTranslationSkill extends SearchIndexerSkill { /* * The language code to translate documents into for documents that don't * specify the to language explicitly. Possible values include: 'af', 'ar', diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenizerName.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenizerName.java deleted file mode 100644 index d78739a8e065..000000000000 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/TokenizerName.java +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -package com.azure.search.documents.models; - -import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import java.util.Collection; - -/** - * Defines values for TokenizerName. - */ -public final class TokenizerName extends ExpandableStringEnum { - /** - * Static value classic for TokenizerName. - */ - public static final TokenizerName CLASSIC = fromString("classic"); - - /** - * Static value edgeNGram for TokenizerName. - */ - public static final TokenizerName EDGE_NGRAM = fromString("edgeNGram"); - - /** - * Static value keyword_v2 for TokenizerName. - */ - public static final TokenizerName KEYWORD = fromString("keyword_v2"); - - /** - * Static value letter for TokenizerName. - */ - public static final TokenizerName LETTER = fromString("letter"); - - /** - * Static value lowercase for TokenizerName. - */ - public static final TokenizerName LOWERCASE = fromString("lowercase"); - - /** - * Static value microsoft_language_tokenizer for TokenizerName. - */ - public static final TokenizerName MICROSOFT_LANGUAGE_TOKENIZER = fromString("microsoft_language_tokenizer"); - - /** - * Static value microsoft_language_stemming_tokenizer for TokenizerName. - */ - public static final TokenizerName MICROSOFT_LANGUAGE_STEMMING_TOKENIZER = fromString("microsoft_language_stemming_tokenizer"); - - /** - * Static value nGram for TokenizerName. - */ - public static final TokenizerName NGRAM = fromString("nGram"); - - /** - * Static value path_hierarchy_v2 for TokenizerName. - */ - public static final TokenizerName PATH_HIERARCHY = fromString("path_hierarchy_v2"); - - /** - * Static value pattern for TokenizerName. - */ - public static final TokenizerName PATTERN = fromString("pattern"); - - /** - * Static value standard_v2 for TokenizerName. - */ - public static final TokenizerName STANDARD = fromString("standard_v2"); - - /** - * Static value uax_url_email for TokenizerName. - */ - public static final TokenizerName UAX_URL_EMAIL = fromString("uax_url_email"); - - /** - * Static value whitespace for TokenizerName. - */ - public static final TokenizerName WHITESPACE = fromString("whitespace"); - - /** - * Creates or finds a TokenizerName from its string representation. - * - * @param name a name to look for. - * @return the corresponding TokenizerName. - */ - @JsonCreator - public static TokenizerName fromString(String name) { - return fromString(name, TokenizerName.class); - } - - /** - * @return known TokenizerName values. - */ - public static Collection values() { - return values(TokenizerName.class); - } -} diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/UaxUrlEmailTokenizer.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/UaxUrlEmailTokenizer.java index cfb52b64013e..7c12a902dadf 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/UaxUrlEmailTokenizer.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/UaxUrlEmailTokenizer.java @@ -18,7 +18,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Azure.Search.UaxUrlEmailTokenizer") @Fluent -public final class UaxUrlEmailTokenizer extends Tokenizer { +public final class UaxUrlEmailTokenizer extends LexicalTokenizer { /* * The maximum token length. Default is 255. Tokens longer than the maximum * length are split. The maximum token length that can be used is 300 diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/WebApiSkill.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/WebApiSkill.java index a3b4af515bf2..f9494bc55e5a 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/WebApiSkill.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/models/WebApiSkill.java @@ -20,7 +20,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type") @JsonTypeName("#Microsoft.Skills.Custom.WebApiSkill") @Fluent -public final class WebApiSkill extends Skill { +public final class WebApiSkill extends SearchIndexerSkill { /* * The url for the Web API. */ diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedFlux.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedFlux.java index 2b7ca1a790b5..0e12be8809dc 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedFlux.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedFlux.java @@ -5,7 +5,7 @@ import com.azure.core.http.rest.PagedFluxBase; import com.azure.core.util.paging.ContinuablePagedFlux; -import com.azure.search.documents.models.SearchRequest; +import com.azure.search.documents.implementation.models.SearchRequest; import com.azure.search.documents.models.SearchResult; import reactor.core.publisher.Mono; diff --git a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedIterable.java b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedIterable.java index cd94d49c0eb9..09e9104c022e 100644 --- a/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedIterable.java +++ b/sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/util/SearchPagedIterable.java @@ -5,7 +5,7 @@ import com.azure.core.http.rest.PagedIterableBase; import com.azure.core.util.paging.ContinuablePagedIterable; -import com.azure.search.documents.models.SearchRequest; +import com.azure.search.documents.implementation.models.SearchRequest; import com.azure.search.documents.models.SearchResult; /** diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexExample.java index c5dce80dc46a..ce1643b5f371 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexExample.java @@ -5,9 +5,9 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import java.util.Arrays; @@ -28,16 +28,16 @@ public static void main(String[] args) { .credential(searchApiKeyCredential) .buildClient(); - Index newIndex = new Index() + SearchIndex newIndex = new SearchIndex() .setName(INDEX_NAME) .setFields( - Arrays.asList(new Field() + Arrays.asList(new SearchField() .setName("Name") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE), - new Field() + new SearchField() .setName("Cuisine") - .setType(DataType.EDM_STRING))); + .setType(SearchFieldDataType.STRING))); // Create index. client.createIndex(newIndex); diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexerExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexerExample.java index fd170f152447..cff9df7b85c1 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexerExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateIndexerExample.java @@ -7,10 +7,10 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Configuration; import com.azure.search.documents.models.FieldMapping; -import com.azure.search.documents.models.Indexer; import com.azure.search.documents.models.IndexingParameters; import com.azure.search.documents.models.IndexingSchedule; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchIndexer; import java.time.Duration; import java.util.Collections; @@ -58,7 +58,7 @@ private static void createOrUpdateIndexer(SearchServiceAsyncClient searchService .setInterval(Duration.ofHours(12)); // Create the indexer - Indexer indexer = new Indexer() + SearchIndexer indexer = new SearchIndexer() .setName(INDEXER_NAME) .setTargetIndexName(INDEX_NAME) .setDataSourceName(DATA_SOURCE_NAME) @@ -67,14 +67,14 @@ private static void createOrUpdateIndexer(SearchServiceAsyncClient searchService .setSchedule(indexingSchedule); System.out.println(String.format("Creating Indexer: %s", indexer.getName())); - Response response = searchServiceClient.createOrUpdateIndexerWithResponse( + Response response = searchServiceClient.createOrUpdateIndexerWithResponse( indexer, false, new RequestOptions() ).block(); if (response != null) { System.out.println(String.format("Response code: %s", response.getStatusCode())); - Indexer createdIndexer = response.getValue(); + SearchIndexer createdIndexer = response.getValue(); System.out.println(String .format("Created indexer name: %s, ETag: %s", createdIndexer.getName(), createdIndexer.getETag())); } diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateSkillsetExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateSkillsetExample.java index f4bf41368f03..248093c15767 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateSkillsetExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/CreateSkillsetExample.java @@ -8,8 +8,8 @@ import com.azure.search.documents.models.InputFieldMappingEntry; import com.azure.search.documents.models.OcrSkill; import com.azure.search.documents.models.OutputFieldMappingEntry; -import com.azure.search.documents.models.Skill; -import com.azure.search.documents.models.Skillset; +import com.azure.search.documents.models.SearchIndexerSkill; +import com.azure.search.documents.models.SearchIndexerSkillset; import com.azure.search.documents.models.WebApiSkill; import java.util.Arrays; @@ -58,7 +58,7 @@ private static void createOcrSkillset(SearchServiceClient searchServiceClient) { .setTargetName("myLayoutText") ); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new OcrSkill() .setShouldDetectOrientation(true) .setDefaultLanguageCode(null) @@ -69,14 +69,14 @@ private static void createOcrSkillset(SearchServiceClient searchServiceClient) { .setOutputs(outputs) ); - Skillset skillset = new Skillset() + SearchIndexerSkillset skillset = new SearchIndexerSkillset() .setName(OCR_SKILLSET_NAME) .setDescription("Extracts text (plain and structured) from image.") .setSkills(skills); System.out.println(String.format("Creating OCR skillset '%s'", skillset.getName())); - Skillset createdSkillset = searchServiceClient.createSkillset(skillset); + SearchIndexerSkillset createdSkillset = searchServiceClient.createSkillset(skillset); System.out.println("Created OCR skillset"); System.out.println(String.format("Name: %s", createdSkillset.getName())); @@ -101,7 +101,7 @@ private static void createCustomSkillset(SearchServiceClient searchServiceClient .setTargetName("myTextItems") ); - Skill webApiSkill = new WebApiSkill() + SearchIndexerSkill webApiSkill = new WebApiSkill() .setUri("https://example.com") .setHttpMethod("POST") // Supports only "POST" and "PUT" HTTP methods .setHttpHeaders(headers) @@ -110,14 +110,14 @@ private static void createCustomSkillset(SearchServiceClient searchServiceClient .setName("webapi-skill") .setDescription("A WebApiSkill that can be used to call a custom web api function"); - Skillset skillset = new Skillset() + SearchIndexerSkillset skillset = new SearchIndexerSkillset() .setName(CUSTOME_SKILLSET_NAME) .setDescription("Skillset for testing custom skillsets") .setSkills(Collections.singletonList(webApiSkill)); System.out.println(String.format("Creating custom skillset '%s'", skillset.getName())); - Skillset createdSkillset = searchServiceClient.createSkillset(skillset); + SearchIndexerSkillset createdSkillset = searchServiceClient.createSkillset(skillset); System.out.println("Created custom skillset"); System.out.println(String.format("Name: %s", createdSkillset.getName())); diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/DataSourceExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/DataSourceExample.java index b1692ded74f9..126b92a48bbc 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/DataSourceExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/DataSourceExample.java @@ -7,11 +7,11 @@ import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Configuration; import com.azure.search.documents.models.DataChangeDetectionPolicy; -import com.azure.search.documents.models.DataContainer; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.DataSourceCredentials; -import com.azure.search.documents.models.DataSourceType; import com.azure.search.documents.models.HighWaterMarkChangeDetectionPolicy; +import com.azure.search.documents.models.SearchIndexerDataContainer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerDataSourceType; import java.util.Collection; import java.util.HashSet; @@ -54,10 +54,11 @@ public static void main(String[] args) { /* * Get all existing data sources; list should include the ones we just created. * */ - PagedIterable dataSources = client.listDataSources(); - for (DataSource dataSource : dataSources) { + PagedIterable dataSources = client.listDataSources(); + for (SearchIndexerDataSource dataSource : dataSources) { if (names.contains(dataSource.getName())) { - System.out.println(String.format("Found data source %s of type %s", dataSource.getName(), dataSource.getType().toString())); + System.out.println(String.format("Found data source %s of type %s", dataSource.getName(), + dataSource.getType().toString())); } } @@ -77,11 +78,10 @@ private static void deleteDataSource(SearchServiceClient client, String dataSour } } - private static DataSource createSampleDatasource(DataSourceType type, - String connectionString, - DataContainer container, - DataChangeDetectionPolicy dataChangeDetectionPolicy) { - return new DataSource() + private static SearchIndexerDataSource createSampleDatasource(SearchIndexerDataSourceType type, + String connectionString, SearchIndexerDataContainer container, + DataChangeDetectionPolicy dataChangeDetectionPolicy) { + return new SearchIndexerDataSource() .setName(generateDataSourceName()) .setType(type) .setCredentials(new DataSourceCredentials() @@ -92,12 +92,13 @@ private static DataSource createSampleDatasource(DataSourceType type, private static String createDataSource( SearchServiceClient client, - DataSourceType type, + SearchIndexerDataSourceType type, String connectionString, - DataContainer container, + SearchIndexerDataContainer container, DataChangeDetectionPolicy dataChangeDetectionPolicy) { - DataSource dataSource = createSampleDatasource(type, connectionString, container, dataChangeDetectionPolicy); + SearchIndexerDataSource dataSource = createSampleDatasource(type, connectionString, container, + dataChangeDetectionPolicy); try { client.createOrUpdateDataSource(dataSource); } catch (Exception ex) { @@ -109,9 +110,9 @@ private static String createDataSource( private static String createTableStorageDataSource(SearchServiceClient client) { return createDataSource( client, - DataSourceType.AZURE_TABLE, + SearchIndexerDataSourceType.AZURE_TABLE, TABLE_STORAGE_CONNECTION_STRING, - new DataContainer() + new SearchIndexerDataContainer() .setName("testtable") // Replace your table name here .setQuery("PartitionKey eq 'test'"), // Add your query here or remove this if you don't need one null @@ -121,9 +122,9 @@ private static String createTableStorageDataSource(SearchServiceClient client) { private static String createCosmosDataSource(SearchServiceClient client) { return createDataSource( client, - DataSourceType.COSMOS, + SearchIndexerDataSourceType.COSMOS_DB, COSMOS_CONNECTION_STRING, - new DataContainer() + new SearchIndexerDataContainer() .setName("testcollection") // Replace your collection name here .setQuery(null), // Add your query here or remove this if you don't need one new HighWaterMarkChangeDetectionPolicy().setHighWaterMarkColumnName("_ts") @@ -133,9 +134,9 @@ private static String createCosmosDataSource(SearchServiceClient client) { private static String createBlobDataSource(SearchServiceClient client) { return createDataSource( client, - DataSourceType.AZURE_BLOB, + SearchIndexerDataSourceType.AZURE_BLOB, BLOB_STORAGE_CONNECTION_STRING, - new DataContainer() + new SearchIndexerDataContainer() .setName("testcontainer") // Replace your container name here .setQuery("testfolder"), // Add your folder here or remove this if you want to index all folders within the container null @@ -145,9 +146,9 @@ private static String createBlobDataSource(SearchServiceClient client) { private static String createSqlDataSource(SearchServiceClient client) { return createDataSource( client, - DataSourceType.AZURE_SQL, + SearchIndexerDataSourceType.AZURE_SQL, SQL_CONNECTION_STRING, - new DataContainer() + new SearchIndexerDataContainer() .setName("testtable"), // Replace your table or view name here null); // Or new SqlIntegratedChangeTrackingPolicy() if your database has change tracking enabled } diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexAndServiceStatisticsExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexAndServiceStatisticsExample.java index ecf0ee138a47..0426810dab82 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexAndServiceStatisticsExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexAndServiceStatisticsExample.java @@ -4,21 +4,21 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.AnalyzerName; import com.azure.search.documents.models.CorsOptions; -import com.azure.search.documents.models.DataType; import com.azure.search.documents.models.DistanceScoringFunction; import com.azure.search.documents.models.DistanceScoringParameters; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.FreshnessScoringFunction; import com.azure.search.documents.models.FreshnessScoringParameters; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.LexicalAnalyzerName; import com.azure.search.documents.models.MagnitudeScoringFunction; import com.azure.search.documents.models.MagnitudeScoringParameters; import com.azure.search.documents.models.ScoringFunctionAggregation; import com.azure.search.documents.models.ScoringFunctionInterpolation; import com.azure.search.documents.models.ScoringProfile; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.ServiceStatistics; import com.azure.search.documents.models.Suggester; import com.azure.search.documents.models.TagScoringFunction; @@ -102,8 +102,8 @@ private static void getServiceStatistics(SearchServiceClient client) { } private static void getIndexStatistics(SearchServiceClient client) { - Index testIndex = createTestIndex(); - Index index = client.createOrUpdateIndex(testIndex); + SearchIndex testIndex = createTestIndex(); + SearchIndex index = client.createOrUpdateIndex(testIndex); GetIndexStatisticsResult result = client.getIndexStatistics(index.getName()); long documentCount = result.getDocumentCount(); long storageSize = result.getStorageSize(); @@ -121,139 +121,139 @@ private static void getIndexStatistics(SearchServiceClient client) { * * @return an Index */ - private static Index createTestIndex() { + private static SearchIndex createTestIndex() { Map weights = new HashMap<>(); weights.put("Description", 1.5); weights.put("Category", 2.0); - return new Index() + return new SearchIndex() .setName("hotels") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE) .setSearchable(Boolean.FALSE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("HotelName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.FALSE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Description") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.FALSE) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.FALSE) .setSortable(Boolean.FALSE) .setFacetable(Boolean.FALSE) - .setAnalyzer(AnalyzerName.EN_LUCENE) + .setAnalyzer(LexicalAnalyzerName.EN_LUCENE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("DescriptionFr") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.FALSE) .setSortable(Boolean.FALSE) .setFacetable(Boolean.FALSE) - .setAnalyzer(AnalyzerName.FR_LUCENE) + .setAnalyzer(LexicalAnalyzerName.FR_LUCENE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Description_Custom") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.FALSE) .setSortable(Boolean.FALSE) .setFacetable(Boolean.FALSE) - .setSearchAnalyzer(AnalyzerName.STOP) - .setIndexAnalyzer(AnalyzerName.STOP) + .setSearchAnalyzer(LexicalAnalyzerName.STOP) + .setIndexAnalyzer(LexicalAnalyzerName.STOP) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Category") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Tags") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.FALSE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("ParkingIncluded") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SmokingAllowed") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("LastRenovationDate") - .setType(DataType.EDM_DATE_TIME_OFFSET) + .setType(SearchFieldDataType.DATE_TIME_OFFSET) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Rating") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Address") - .setType(DataType.EDM_COMPLEX_TYPE) + .setType(SearchFieldDataType.COMPLEX) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("StreetAddress") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("City") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("StateProvince") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Country") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("PostalCode") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) @@ -261,66 +261,66 @@ private static Index createTestIndex() { .setHidden(Boolean.FALSE) ) ), - new Field() + new SearchField() .setName("Location") - .setType(DataType.EDM_GEOGRAPHY_POINT) + .setType(SearchFieldDataType.GEOGRAPHY_POINT) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.FALSE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Rooms") - .setType(DataType.collection(DataType.EDM_COMPLEX_TYPE)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.COMPLEX)) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Description") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setHidden(Boolean.FALSE) - .setAnalyzer(AnalyzerName.EN_LUCENE), - new Field() + .setAnalyzer(LexicalAnalyzerName.EN_LUCENE), + new SearchField() .setName("DescriptionFr") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setHidden(Boolean.FALSE) - .setAnalyzer(AnalyzerName.FR_LUCENE), - new Field() + .setAnalyzer(LexicalAnalyzerName.FR_LUCENE), + new SearchField() .setName("Type") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("BaseRate") - .setType(DataType.EDM_DOUBLE) + .setType(SearchFieldDataType.DOUBLE) .setKey(Boolean.FALSE) .setSearchable(Boolean.FALSE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("BedOptions") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SleepsCount") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SmokingAllowed") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Tags") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.FALSE) @@ -328,16 +328,16 @@ private static Index createTestIndex() { .setHidden(Boolean.FALSE) ) ), - new Field() + new SearchField() .setName("TotalGuests") - .setType(DataType.EDM_INT64) + .setType(SearchFieldDataType.INT64) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.TRUE), - new Field() + new SearchField() .setName("ProfitMargin") - .setType(DataType.EDM_DOUBLE) + .setType(SearchFieldDataType.DOUBLE) ) ) .setScoringProfiles(Arrays.asList( diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexContentManagementExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexContentManagementExample.java index 0fcf63c66c15..823c6edf42c4 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexContentManagementExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/IndexContentManagementExample.java @@ -5,8 +5,8 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.IndexDocumentsBatch; import com.azure.search.documents.models.Hotel; +import com.azure.search.documents.models.IndexDocumentsBatch; import com.azure.search.documents.models.IndexDocumentsResult; import java.util.ArrayList; diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/LifecycleSetupExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/LifecycleSetupExample.java index fbe96b1d7130..0f2d00f94602 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/LifecycleSetupExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/LifecycleSetupExample.java @@ -5,22 +5,22 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.AnalyzerName; -import com.azure.search.documents.models.DataContainer; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.DataSourceCredentials; -import com.azure.search.documents.models.DataSourceType; -import com.azure.search.documents.models.DataType; import com.azure.search.documents.models.EntityRecognitionSkill; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.HighWaterMarkChangeDetectionPolicy; -import com.azure.search.documents.models.Index; -import com.azure.search.documents.models.Indexer; import com.azure.search.documents.models.IndexingSchedule; import com.azure.search.documents.models.InputFieldMappingEntry; +import com.azure.search.documents.models.LexicalAnalyzerName; import com.azure.search.documents.models.OutputFieldMappingEntry; -import com.azure.search.documents.models.Skill; -import com.azure.search.documents.models.Skillset; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerDataContainer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerDataSourceType; +import com.azure.search.documents.models.SearchIndexerSkill; +import com.azure.search.documents.models.SearchIndexerSkillset; import com.azure.search.documents.models.Suggester; import java.time.Duration; @@ -55,19 +55,19 @@ public class LifecycleSetupExample { public static void main(String[] args) { SearchServiceClient client = createServiceClient(); // Create a data source for a Cosmos DB database - DataSource dataSource = createCosmosDataSource(client); + SearchIndexerDataSource dataSource = createCosmosDataSource(client); System.out.println("Created DataSource " + dataSource.getName()); // Create an index - Index index = createIndex(client); + SearchIndex index = createIndex(client); System.out.println("Created Index " + index.getName()); // Create a skillset for Cognitive Services - Skillset skillset = createSkillset(client); + SearchIndexerSkillset skillset = createSkillset(client); System.out.println("Created Skillset " + skillset.getName()); // Create an indexer that uses the skillset and data source and loads the index - Indexer indexer = createIndexer(client, dataSource, skillset, index); + SearchIndexer indexer = createIndexer(client, dataSource, skillset, index); System.out.println("Created Indexer " + indexer.getName()); // Update indexer schedule @@ -86,7 +86,7 @@ private static SearchServiceClient createServiceClient() { .buildClient(); } - private static void updateIndexerSchedule(SearchServiceClient client, Indexer indexer) { + private static void updateIndexerSchedule(SearchServiceClient client, SearchIndexer indexer) { IndexingSchedule indexingSchedule = new IndexingSchedule() .setInterval(Duration.ofMinutes(10)); indexer.setSchedule(indexingSchedule); @@ -94,8 +94,9 @@ private static void updateIndexerSchedule(SearchServiceClient client, Indexer in client.createOrUpdateIndexer(indexer); } - private static Indexer createIndexer(SearchServiceClient client, DataSource dataSource, Skillset skillset, Index index) { - Indexer indexer = new Indexer() + private static SearchIndexer createIndexer(SearchServiceClient client, SearchIndexerDataSource dataSource, + SearchIndexerSkillset skillset, SearchIndex index) { + SearchIndexer indexer = new SearchIndexer() .setName(INDEXER_NAME) .setDataSourceName(dataSource.getName()) .setSkillsetName(skillset.getName()) @@ -104,7 +105,7 @@ private static Indexer createIndexer(SearchServiceClient client, DataSource data return client.createOrUpdateIndexer(indexer); } - private static Skillset createSkillset(SearchServiceClient client) { + private static SearchIndexerSkillset createSkillset(SearchServiceClient client) { List inputs = Collections.singletonList( new InputFieldMappingEntry() .setName("text") @@ -118,14 +119,14 @@ private static Skillset createSkillset(SearchServiceClient client) { ); - Skill skill = new EntityRecognitionSkill() + SearchIndexerSkill skill = new EntityRecognitionSkill() .setName("#1") .setDescription("Entity Recognition Skill") .setContext("/document/Description") .setInputs(inputs) .setOutputs(outputs); - Skillset skillset = new Skillset() + SearchIndexerSkillset skillset = new SearchIndexerSkillset() .setName(SKILLSET_NAME) .setDescription("Skillset for testing default configuration") .setSkills(Collections.singletonList(skill)); @@ -134,48 +135,48 @@ private static Skillset createSkillset(SearchServiceClient client) { return client.createOrUpdateSkillset(skillset); } - private static Index createIndex(SearchServiceClient client) { + private static SearchIndex createIndex(SearchServiceClient client) { // Index definition - Index index = new Index() + SearchIndex index = new SearchIndex() .setName(INDEX_NAME) .setFields( - Arrays.asList(new Field() + Arrays.asList(new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setHidden(Boolean.FALSE) .setSearchable(Boolean.FALSE) .setSortable(Boolean.FALSE), - new Field() + new SearchField() .setName("HotelName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setFacetable(Boolean.FALSE) .setFilterable(Boolean.FALSE) .setHidden(Boolean.FALSE) .setKey(Boolean.FALSE) .setSearchable(Boolean.TRUE) .setSortable(Boolean.FALSE) - .setAnalyzer(AnalyzerName.EN_MICROSOFT), - new Field() + .setAnalyzer(LexicalAnalyzerName.EN_MICROSOFT), + new SearchField() .setName("Description") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.FALSE) .setHidden(Boolean.FALSE) .setSortable(Boolean.FALSE) .setFacetable(Boolean.FALSE) - .setAnalyzer(AnalyzerName.EN_MICROSOFT), - new Field() + .setAnalyzer(LexicalAnalyzerName.EN_MICROSOFT), + new SearchField() .setName("Tags") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setFacetable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setHidden(Boolean.FALSE) .setSearchable(Boolean.TRUE) - .setAnalyzer(AnalyzerName.EN_MICROSOFT))); + .setAnalyzer(LexicalAnalyzerName.EN_MICROSOFT))); // Set Suggester index.setSuggesters(Collections.singletonList(new Suggester() @@ -185,14 +186,15 @@ private static Index createIndex(SearchServiceClient client) { return client.createOrUpdateIndex(index); } - private static DataSource createCosmosDataSource(SearchServiceClient client) { + private static SearchIndexerDataSource createCosmosDataSource(SearchServiceClient client) { - DataContainer dataContainer = new DataContainer().setName(COSMOS_COLLECTION_NAME); - HighWaterMarkChangeDetectionPolicy highWaterMarkChangeDetectionPolicy = new HighWaterMarkChangeDetectionPolicy().setHighWaterMarkColumnName("_ts"); + SearchIndexerDataContainer dataContainer = new SearchIndexerDataContainer().setName(COSMOS_COLLECTION_NAME); + HighWaterMarkChangeDetectionPolicy highWaterMarkChangeDetectionPolicy = + new HighWaterMarkChangeDetectionPolicy().setHighWaterMarkColumnName("_ts"); - DataSource dataSource = new DataSource() + SearchIndexerDataSource dataSource = new SearchIndexerDataSource() .setName(DATASOURCE_NAME) - .setType(DataSourceType.COSMOS) + .setType(SearchIndexerDataSourceType.COSMOS_DB) .setCredentials(new DataSourceCredentials() .setConnectionString(COSMOS_CONNECTION_STRING)) .setContainer(dataContainer) diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ListIndexersExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ListIndexersExample.java index 8c4cf4daab24..65baeeb467af 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ListIndexersExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ListIndexersExample.java @@ -6,8 +6,8 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.http.rest.PagedResponse; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.Indexer; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchIndexer; import java.util.List; @@ -35,15 +35,15 @@ public static void main(String[] args) { } private static void listIndexers(SearchServiceAsyncClient searchServiceClient) { - PagedResponse response = searchServiceClient.listIndexers("*", + PagedResponse response = searchServiceClient.listIndexers("*", new RequestOptions()).byPage().blockFirst(); if (response != null) { System.out.println(String.format("Response code: %s", response.getStatusCode())); - List indexers = response.getValue(); + List indexers = response.getValue(); System.out.println("Found the following indexers:"); - for (Indexer indexer : indexers) { + for (SearchIndexer indexer : indexers) { System.out.println(String.format("Indexer name: %s, ETag: %s", indexer.getName(), indexer.getETag())); } } diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ReadmeSamples.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ReadmeSamples.java index 4ecd3e795e08..981089725cd1 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ReadmeSamples.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/ReadmeSamples.java @@ -8,11 +8,11 @@ import com.azure.core.http.HttpResponse; import com.azure.core.http.policy.AddHeadersFromContextPolicy; import com.azure.core.util.Context; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.Hotel; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.SearchOptions; import com.azure.search.documents.models.SearchResult; @@ -72,7 +72,7 @@ public void customHeaders() { headers.put("my-header2", "my-header2-value"); headers.put("my-header3", "my-header3-value"); // Call API by passing headers in Context. - Index index = new Index().setName(indexName); + SearchIndex index = new SearchIndex().setName(indexName); searchServiceClient.createIndexWithResponse( index, new RequestOptions(), @@ -93,16 +93,16 @@ public void handleErrorsWithSyncClient() { } public void createIndexWithSyncClient() { - Index newIndex = new Index() + SearchIndex newIndex = new SearchIndex() .setName("index_name") .setFields( - Arrays.asList(new Field() + Arrays.asList(new SearchField() .setName("Name") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE), - new Field() + new SearchField() .setName("Cuisine") - .setType(DataType.EDM_STRING))); + .setType(SearchFieldDataType.STRING))); // Create index. searchServiceClient.createIndex(newIndex); } diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RefineSearchCapabilitiesExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RefineSearchCapabilitiesExample.java index 99d7fc6d0dac..f557f6c81357 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RefineSearchCapabilitiesExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RefineSearchCapabilitiesExample.java @@ -5,18 +5,18 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.Hotel; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.IndexDocumentsResult; -import com.azure.search.documents.models.Indexer; import com.azure.search.documents.models.InputFieldMappingEntry; import com.azure.search.documents.models.OutputFieldMappingEntry; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerSkill; +import com.azure.search.documents.models.SearchIndexerSkillset; import com.azure.search.documents.models.ServiceCounters; import com.azure.search.documents.models.ServiceLimits; import com.azure.search.documents.models.ServiceStatistics; -import com.azure.search.documents.models.Skill; -import com.azure.search.documents.models.Skillset; import com.azure.search.documents.models.SynonymMap; import com.azure.search.documents.models.WebApiSkill; @@ -76,7 +76,7 @@ private static void addCustomWebSkillset(SearchServiceClient client) { HashMap headers = new HashMap<>(); headers.put("Ocp-Apim-Subscription-Key", "Bing entity search API key"); - Skill webApiSkill = new WebApiSkill() + SearchIndexerSkill webApiSkill = new WebApiSkill() .setUri("https://api.cognitive.microsoft.com/bing/v7.0/entities/") .setHttpMethod("POST") // Supports only "POST" and "PUT" HTTP methods .setHttpHeaders(headers) @@ -85,7 +85,7 @@ private static void addCustomWebSkillset(SearchServiceClient client) { .setName("webapi-skill") .setDescription("A WebApi skill that can be used as a custom skillset"); - Skillset skillset = new Skillset() + SearchIndexerSkillset skillset = new SearchIndexerSkillset() .setName(skillsetName) .setDescription("Skillset for testing custom skillsets") .setSkills(Collections.singletonList(webApiSkill)); @@ -93,7 +93,7 @@ private static void addCustomWebSkillset(SearchServiceClient client) { client.createOrUpdateSkillset(skillset); System.out.printf("Created Skillset %s%n", skillsetName); - Indexer indexer = client.getIndexer(INDEXER_NAME).setSkillsetName(skillsetName); + SearchIndexer indexer = client.getIndexer(INDEXER_NAME).setSkillsetName(skillsetName); client.createOrUpdateIndexer(indexer); System.out.printf("Updated Indexer %s with Skillset %s%n", INDEXER_NAME, skillsetName); } @@ -129,8 +129,8 @@ private static void addSynonymMapToIndex(SearchServiceClient client) { client.createOrUpdateSynonymMap(synonymMap); - Index index = client.getIndex(INDEX_NAME); - List fields = index.getFields(); + SearchIndex index = client.getIndex(INDEX_NAME); + List fields = index.getFields(); fields.get(1).setSynonymMaps(Collections.singletonList(synonymMapName)); index.setFields(fields); diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RunningSearchSolutionExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RunningSearchSolutionExample.java index 8e62aa407c5a..9b20e24ae27e 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RunningSearchSolutionExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/RunningSearchSolutionExample.java @@ -11,8 +11,8 @@ import com.azure.search.documents.models.AutocompleteMode; import com.azure.search.documents.models.AutocompleteOptions; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.IndexerExecutionInfo; import com.azure.search.documents.models.RequestOptions; +import com.azure.search.documents.models.SearchIndexerStatus; import com.azure.search.documents.models.SearchOptions; import com.azure.search.documents.models.SuggestOptions; import com.azure.search.documents.models.SuggestResult; @@ -52,7 +52,7 @@ public static void main(String[] args) { serviceClient.runIndexer(INDEXER_NAME); // get indexer status - IndexerExecutionInfo indexerStatus = serviceClient.getIndexerStatus(INDEXER_NAME); + SearchIndexerStatus indexerStatus = serviceClient.getIndexerStatus(INDEXER_NAME); System.out.printf("Indexer %s status = %s%n", INDEXER_NAME, indexerStatus.getStatus()); // run a search query diff --git a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/SynonymMapsCreateExample.java b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/SynonymMapsCreateExample.java index 75e2568ed885..91d5b11d8668 100644 --- a/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/SynonymMapsCreateExample.java +++ b/sdk/search/azure-search-documents/src/samples/java/com/azure/search/documents/SynonymMapsCreateExample.java @@ -5,9 +5,9 @@ import com.azure.core.credential.AzureKeyCredential; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.SynonymMap; import java.util.Arrays; @@ -54,16 +54,16 @@ private static void createSynonymMap(SearchServiceClient serviceClient, String s } private static void assignSynonymMapToIndex(String synonymMapName) { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName("hotels") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true), - new Field() + new SearchField() .setName("HotelName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSynonymMaps(Collections.singletonList(synonymMapName)) )); } diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/CustomAnalyzerSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/CustomAnalyzerSyncTests.java index b238aa58a82d..317ecdcec8bf 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/CustomAnalyzerSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/CustomAnalyzerSyncTests.java @@ -5,8 +5,7 @@ import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Context; import com.azure.search.documents.models.AnalyzeRequest; -import com.azure.search.documents.models.Analyzer; -import com.azure.search.documents.models.AnalyzerName; +import com.azure.search.documents.models.AnalyzedTokenInfo; import com.azure.search.documents.models.AsciiFoldingTokenFilter; import com.azure.search.documents.models.CharFilter; import com.azure.search.documents.models.CharFilterName; @@ -15,19 +14,22 @@ import com.azure.search.documents.models.ClassicTokenizer; import com.azure.search.documents.models.CommonGramTokenFilter; import com.azure.search.documents.models.CustomAnalyzer; -import com.azure.search.documents.models.DataType; import com.azure.search.documents.models.DictionaryDecompounderTokenFilter; import com.azure.search.documents.models.EdgeNGramTokenFilterSide; import com.azure.search.documents.models.EdgeNGramTokenFilterV2; import com.azure.search.documents.models.EdgeNGramTokenizer; import com.azure.search.documents.models.ElisionTokenFilter; -import com.azure.search.documents.models.Field; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.KeepTokenFilter; import com.azure.search.documents.models.KeywordMarkerTokenFilter; import com.azure.search.documents.models.KeywordTokenizerV2; import com.azure.search.documents.models.LengthTokenFilter; +import com.azure.search.documents.models.LexicalAnalyzer; +import com.azure.search.documents.models.LexicalAnalyzerName; +import com.azure.search.documents.models.LexicalTokenizer; +import com.azure.search.documents.models.LexicalTokenizerName; import com.azure.search.documents.models.LimitTokenFilter; +import com.azure.search.documents.models.LuceneStandardAnalyzer; +import com.azure.search.documents.models.LuceneStandardTokenizerV2; import com.azure.search.documents.models.MappingCharFilter; import com.azure.search.documents.models.MicrosoftLanguageStemmingTokenizer; import com.azure.search.documents.models.MicrosoftLanguageTokenizer; @@ -44,13 +46,14 @@ import com.azure.search.documents.models.PhoneticEncoder; import com.azure.search.documents.models.PhoneticTokenFilter; import com.azure.search.documents.models.RegexFlags; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.SearchOptions; import com.azure.search.documents.models.SearchResult; import com.azure.search.documents.models.ShingleTokenFilter; import com.azure.search.documents.models.SnowballTokenFilter; import com.azure.search.documents.models.SnowballTokenFilterLanguage; -import com.azure.search.documents.models.StandardAnalyzer; -import com.azure.search.documents.models.StandardTokenizerV2; import com.azure.search.documents.models.StemmerOverrideTokenFilter; import com.azure.search.documents.models.StemmerTokenFilter; import com.azure.search.documents.models.StemmerTokenFilterLanguage; @@ -61,9 +64,6 @@ import com.azure.search.documents.models.TokenCharacterKind; import com.azure.search.documents.models.TokenFilter; import com.azure.search.documents.models.TokenFilterName; -import com.azure.search.documents.models.TokenInfo; -import com.azure.search.documents.models.Tokenizer; -import com.azure.search.documents.models.TokenizerName; import com.azure.search.documents.models.TruncateTokenFilter; import com.azure.search.documents.models.UaxUrlEmailTokenizer; import com.azure.search.documents.models.UniqueTokenFilter; @@ -113,27 +113,27 @@ protected void afterTest() { @Test public void canSearchWithCustomAnalyzer() { - final AnalyzerName customAnalyzerName = AnalyzerName.fromString("my_email_analyzer"); + final LexicalAnalyzerName customLexicalAnalyzerName = LexicalAnalyzerName.fromString("my_email_analyzer"); final CharFilterName customCharFilterName = CharFilterName.fromString("my_email_filter"); - Index index = new Index() + SearchIndex index = new SearchIndex() .setName(randomIndexName("testindex")) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("id") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true), - new Field() + new SearchField() .setName("message") - .setType(DataType.EDM_STRING) - .setAnalyzer(customAnalyzerName) + .setType(SearchFieldDataType.STRING) + .setAnalyzer(customLexicalAnalyzerName) .setSearchable(true) )) .setAnalyzers(Collections.singletonList( new CustomAnalyzer() - .setTokenizer(TokenizerName.STANDARD) + .setTokenizer(LexicalTokenizerName.STANDARD) .setCharFilters(Collections.singletonList(customCharFilterName)) - .setName(customAnalyzerName.toString()) + .setName(customLexicalAnalyzerName.toString()) )) .setCharFilters(Collections.singletonList( new PatternReplaceCharFilter() @@ -168,27 +168,27 @@ public void canSearchWithCustomAnalyzer() { @Test public void canUseAllAnalyzerNamesInIndexDefinition() { - Index index = prepareIndexWithAllAnalyzerNames(); + SearchIndex index = prepareIndexWithAllLexicalAnalyzerNames(); indexesToCleanup.add(index.getName()); - Index res = searchServiceClient.createIndex(index); + SearchIndex res = searchServiceClient.createIndex(index); assertObjectEquals(index, res, true, "etag"); // Add language analyzers to searchAnalyzer and indexAnalyzer properties and expect failure try { - new Field() + new SearchField() .setName("field") - .setType(DataType.EDM_STRING) - .setSearchAnalyzer(AnalyzerName.EN_LUCENE); + .setType(SearchFieldDataType.STRING) + .setSearchAnalyzer(LexicalAnalyzerName.EN_LUCENE); } catch (Exception ex) { assertEquals(IllegalArgumentException.class, ex.getClass()); assertEquals("Only non-language analyzer can be used as search analyzer.", ex.getMessage()); } try { - new Field() + new SearchField() .setName("field") - .setType(DataType.EDM_STRING) - .setIndexAnalyzer(AnalyzerName.AR_MICROSOFT); + .setType(SearchFieldDataType.STRING) + .setIndexAnalyzer(LexicalAnalyzerName.AR_MICROSOFT); } catch (Exception ex) { assertEquals(IllegalArgumentException.class, ex.getClass()); assertEquals("Only non-language analyzer can be used as index analyzer.", ex.getMessage()); @@ -197,22 +197,22 @@ public void canUseAllAnalyzerNamesInIndexDefinition() { @Test public void canAnalyze() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); AnalyzeRequest request = new AnalyzeRequest() .setText("One two") - .setAnalyzer(AnalyzerName.WHITESPACE); - PagedIterable results = searchServiceClient.analyzeText(index.getName(), request); - Iterator iterator = results.iterator(); + .setAnalyzer(LexicalAnalyzerName.WHITESPACE); + PagedIterable results = searchServiceClient.analyzeText(index.getName(), request); + Iterator iterator = results.iterator(); assertTokenInfoEqual("One", 0, 3, 0, iterator.next()); assertTokenInfoEqual("two", 4, 7, 1, iterator.next()); assertFalse(iterator.hasNext()); request = new AnalyzeRequest() .setText("One's ") - .setTokenizer(TokenizerName.WHITESPACE) + .setTokenizer(LexicalTokenizerName.WHITESPACE) .setTokenFilters(Collections.singletonList(TokenFilterName.APOSTROPHE)) .setCharFilters(Collections.singletonList(CharFilterName.HTML_STRIP)); results = searchServiceClient.analyzeText(index.getName(), request); @@ -230,18 +230,18 @@ public void canAnalyze() { @Test public void canAnalyzeWithAllPossibleNames() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); - AnalyzerName.values() + LexicalAnalyzerName.values() .stream() .map(an -> new AnalyzeRequest() .setText("One two") .setAnalyzer(an)) .forEach(r -> searchServiceClient.analyzeText(index.getName(), r)); - TokenizerName.values() + LexicalTokenizerName.values() .stream() .map(tn -> new AnalyzeRequest() .setText("One two") @@ -250,7 +250,7 @@ public void canAnalyzeWithAllPossibleNames() { AnalyzeRequest request = new AnalyzeRequest() .setText("One two") - .setTokenizer(TokenizerName.WHITESPACE) + .setTokenizer(LexicalTokenizerName.WHITESPACE) .setTokenFilters(new ArrayList<>(TokenFilterName.values())) .setCharFilters(new ArrayList<>(CharFilterName.values())); searchServiceClient.analyzeText(index.getName(), request); @@ -258,7 +258,7 @@ public void canAnalyzeWithAllPossibleNames() { @Test public void addingCustomAnalyzerThrowsHttpExceptionByDefault() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList(new StopAnalyzer().setName("a1"))); searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); @@ -274,13 +274,13 @@ public void addingCustomAnalyzerThrowsHttpExceptionByDefault() { @Test public void canAddCustomAnalyzerWithIndexDowntime() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList(new StopAnalyzer().setName("a1"))); searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); addAnalyzerToIndex(index, new StopAnalyzer().setName("a2")); - Index updatedIndex = searchServiceClient.createOrUpdateIndexWithResponse(index, + SearchIndex updatedIndex = searchServiceClient.createOrUpdateIndexWithResponse(index, true, false, generateRequestOptions(), Context.NONE).getValue(); assertAnalysisComponentsEqual(index, updatedIndex); @@ -288,9 +288,9 @@ public void canAddCustomAnalyzerWithIndexDowntime() { @Test public void canCreateAllAnalysisComponents() { - Index index = prepareIndexWithAllAnalysisComponentTypes(); + SearchIndex index = prepareIndexWithAllAnalysisComponentTypes(); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); searchServiceClient.deleteIndex(index.getName()); @@ -299,14 +299,14 @@ public void canCreateAllAnalysisComponents() { // have defaults that are zero or null, and another where we need to specify the default values we // expect to get back from the REST API. - Index indexWithSpecialDefaults = createIndexWithSpecialDefaults(); - Index expectedIndexWithSpecialDefaults = createExpectedIndexWithSpecialDefaults(indexWithSpecialDefaults); + SearchIndex indexWithSpecialDefaults = createIndexWithSpecialDefaults(); + SearchIndex expectedIndexWithSpecialDefaults = createExpectedIndexWithSpecialDefaults(indexWithSpecialDefaults); - List splittedIndexWithSpecialDefaults = splitIndex(indexWithSpecialDefaults); - List splittedExpectedIndexWithSpecialDefaults = splitIndex(expectedIndexWithSpecialDefaults); + List splittedIndexWithSpecialDefaults = splitIndex(indexWithSpecialDefaults); + List splittedExpectedIndexWithSpecialDefaults = splitIndex(expectedIndexWithSpecialDefaults); for (int j = 0; j < splittedIndexWithSpecialDefaults.size(); j++) { - Index expected = splittedExpectedIndexWithSpecialDefaults.get(j); - Index actual = searchServiceClient.createIndex(expected); + SearchIndex expected = splittedExpectedIndexWithSpecialDefaults.get(j); + SearchIndex actual = searchServiceClient.createIndex(expected); assertAnalysisComponentsEqual(expected, actual); searchServiceClient.deleteIndex(actual.getName()); } @@ -314,16 +314,16 @@ public void canCreateAllAnalysisComponents() { @Test public void canUseAllAnalysisComponentNames() { - Index index = prepareIndexWithAllAnalysisComponentNames(); + SearchIndex index = prepareIndexWithAllAnalysisComponentNames(); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertCustomAnalysisComponentsEqual(index, createdIndex); } @Test public void canUseAllRegexFlagsAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList(new PatternAnalyzer() .setStopwords(Arrays.asList("stop1", "stop2")) .setLowerCaseTerms(true) @@ -331,7 +331,7 @@ public void canUseAllRegexFlagsAnalyzer() { .setFlags(new ArrayList<>(RegexFlags.values())) .setName(generateName()))); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -339,10 +339,10 @@ public void canUseAllRegexFlagsAnalyzer() { @Test public void canUseAllRegexFlagsNullAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(null); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -350,10 +350,10 @@ public void canUseAllRegexFlagsNullAnalyzer() { @Test public void canUseAllRegexFlagsEmptyAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(new ArrayList<>()); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -361,7 +361,7 @@ public void canUseAllRegexFlagsEmptyAnalyzer() { @Test public void canUseAllRegexFlagsNullNameAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setName(null) )); @@ -375,7 +375,7 @@ public void canUseAllRegexFlagsNullNameAnalyzer() { @Test public void canUseAllRegexFlagsEmptyNameAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setName("") )); @@ -389,12 +389,12 @@ public void canUseAllRegexFlagsEmptyNameAnalyzer() { @Test public void canUseAllRegexFlagsNullLowerCaseAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setLowerCaseTerms(null).setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -402,12 +402,12 @@ public void canUseAllRegexFlagsNullLowerCaseAnalyzer() { @Test public void canUseAllRegexFlagsNullPatternAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setPattern(null).setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -415,12 +415,12 @@ public void canUseAllRegexFlagsNullPatternAnalyzer() { @Test public void canUseAllRegexFlagsEmptyPatternAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setPattern("").setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -428,12 +428,12 @@ public void canUseAllRegexFlagsEmptyPatternAnalyzer() { @Test public void canUseAllRegexFlagsNullFlagsAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setFlags(null).setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -441,7 +441,7 @@ public void canUseAllRegexFlagsNullFlagsAnalyzer() { @Test public void canUseAllRegexFlagsEmptyFlagsAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList( new PatternAnalyzer().setFlags(new ArrayList<>()).setName(generateName()) )); @@ -455,12 +455,12 @@ public void canUseAllRegexFlagsEmptyFlagsAnalyzer() { @Test public void canUseAllRegexFlagsNullStopwordsAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList(new PatternAnalyzer() .setStopwords(null) .setName(generateName()))); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -468,12 +468,12 @@ public void canUseAllRegexFlagsNullStopwordsAnalyzer() { @Test public void canUseAllRegexFlagsEmptyStopwordsAnalyzer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setAnalyzers(Collections.singletonList(new PatternAnalyzer() .setStopwords(new ArrayList<>()) .setName(generateName()))); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -481,14 +481,14 @@ public void canUseAllRegexFlagsEmptyStopwordsAnalyzer() { @Test public void canUseAllRegexFlagsTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setPattern(".*") .setFlags(new ArrayList<>(RegexFlags.values())) .setGroup(0) .setName(generateName()))); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -496,10 +496,10 @@ public void canUseAllRegexFlagsTokenizer() { @Test public void canUseAllRegexFlagsNullTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(null); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -507,10 +507,10 @@ public void canUseAllRegexFlagsNullTokenizer() { @Test public void canUseAllRegexFlagsEmptyTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(new ArrayList<>()); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -518,7 +518,7 @@ public void canUseAllRegexFlagsEmptyTokenizer() { @Test public void canUseAllRegexFlagsNullNameTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setName(null) )); @@ -532,7 +532,7 @@ public void canUseAllRegexFlagsNullNameTokenizer() { @Test public void canUseAllRegexFlagsEmptyNameTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setName("") )); @@ -546,12 +546,12 @@ public void canUseAllRegexFlagsEmptyNameTokenizer() { @Test public void canUseAllRegexFlagsNullPatternTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setPattern(null).setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -559,12 +559,12 @@ public void canUseAllRegexFlagsNullPatternTokenizer() { @Test public void canUseAllRegexFlagsEmptyPatternTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setPattern("").setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -572,12 +572,12 @@ public void canUseAllRegexFlagsEmptyPatternTokenizer() { @Test public void canUseAllRegexFlagsNullFlagsTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setFlags(null).setName(generateName()) )); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -586,7 +586,7 @@ public void canUseAllRegexFlagsNullFlagsTokenizer() { @Test public void canUseAllRegexFlagsEmptyFlagsTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setFlags(new ArrayList<>()).setName(generateName()) )); @@ -600,12 +600,12 @@ public void canUseAllRegexFlagsEmptyFlagsTokenizer() { @Test public void canUseAllRegexFlagsNullGroupTokenizer() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setTokenizers(Collections.singletonList(new PatternTokenizer() .setGroup(null) .setName(generateName()))); - Index createdIndex = searchServiceClient.createIndex(index); + SearchIndex createdIndex = searchServiceClient.createIndex(index); indexesToCleanup.add(index.getName()); assertAnalysisComponentsEqual(index, createdIndex); @@ -613,44 +613,44 @@ public void canUseAllRegexFlagsNullGroupTokenizer() { @Test public void canUseAllAnalysisComponentOptions() { - List indexes = prepareIndexesWithAllAnalysisComponentOptions(); + List indexes = prepareIndexesWithAllAnalysisComponentOptions(); indexes.forEach(expectedIndex -> { - Index createdIndex = searchServiceClient.createIndex(expectedIndex); + SearchIndex createdIndex = searchServiceClient.createIndex(expectedIndex); indexesToCleanup.add(expectedIndex.getName()); assertAnalysisComponentsEqual(expectedIndex, createdIndex); searchServiceClient.deleteIndex(createdIndex.getName()); }); } - void addAnalyzerToIndex(Index index, Analyzer analyzer) { - List analyzers = new ArrayList<>(index.getAnalyzers()); + void addAnalyzerToIndex(SearchIndex index, LexicalAnalyzer analyzer) { + List analyzers = new ArrayList<>(index.getAnalyzers()); analyzers.add(analyzer); index.setAnalyzers(analyzers); } - void assertAnalysisComponentsEqual(Index expected, Index actual) { + void assertAnalysisComponentsEqual(SearchIndex expected, SearchIndex actual) { // Compare analysis components directly so that test failures show better comparisons. // Analyzers assertAnalyzersEqual(expected.getAnalyzers(), actual.getAnalyzers()); - // Tokenizer - assertTokenizersEqual(expected.getTokenizers(), actual.getTokenizers()); + // LexicalTokenizer + assertLexicalTokenizersEqual(expected.getTokenizers(), actual.getTokenizers()); // Char filter assertCharFiltersEqual(expected.getCharFilters(), actual.getCharFilters()); } - void assertCustomAnalysisComponentsEqual(Index expected, Index actual) { + void assertCustomAnalysisComponentsEqual(SearchIndex expected, SearchIndex actual) { // Compare analysis components directly so that test failures show better comparisons. - // Analyzers - Sort according to their Tokenizers before comparing: - List expectedAnalyzers = expected.getAnalyzers(); - List actualAnalyzers = actual.getAnalyzers(); + // Analyzers - Sort according to their LexicalTokenizers before comparing: + List expectedAnalyzers = expected.getAnalyzers(); + List actualAnalyzers = actual.getAnalyzers(); if (expectedAnalyzers != null && actualAnalyzers != null) { - Comparator customAnalyzerComparator = Comparator - .comparing((Analyzer a) -> ((CustomAnalyzer) a).getTokenizer().toString()); + Comparator customAnalyzerComparator = Comparator + .comparing((LexicalAnalyzer a) -> ((CustomAnalyzer) a).getTokenizer().toString()); expectedAnalyzers.sort(customAnalyzerComparator); actualAnalyzers.sort(customAnalyzerComparator); @@ -658,14 +658,14 @@ void assertCustomAnalysisComponentsEqual(Index expected, Index actual) { assertAnalyzersEqual(expectedAnalyzers, actualAnalyzers); } - // Tokenizer - assertTokenizersEqual(expected.getTokenizers(), actual.getTokenizers()); + // LexicalTokenizer + assertLexicalTokenizersEqual(expected.getTokenizers(), actual.getTokenizers()); // Char filter assertCharFiltersEqual(expected.getCharFilters(), actual.getCharFilters()); } - private void assertAnalyzersEqual(List expected, List actual) { + private void assertAnalyzersEqual(List expected, List actual) { if (expected != null && actual != null) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { @@ -674,7 +674,7 @@ private void assertAnalyzersEqual(List expected, List actual } } - private void assertTokenizersEqual(List expected, List actual) { + private void assertLexicalTokenizersEqual(List expected, List actual) { if (expected != null && actual != null) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { @@ -696,11 +696,11 @@ String generateName() { return testResourceNamer.randomName(NAME_PREFIX, 24); } - List prepareIndexesWithAllAnalysisComponentOptions() { - Index index = createTestIndex(); + List prepareIndexesWithAllAnalysisComponentOptions() { + SearchIndex index = createTestIndex(); // Set tokenizers - List tokenizers = new ArrayList<>(); + List tokenizers = new ArrayList<>(); tokenizers.add( new EdgeNGramTokenizer() .setMinGram(1) @@ -776,27 +776,29 @@ List prepareIndexesWithAllAnalysisComponentOptions() { return splitIndex(index); } - Index prepareIndexWithAllAnalyzerNames() { - List allAnalyzerNames = new ArrayList<>(AnalyzerName.values()); - allAnalyzerNames.sort(Comparator.comparing(AnalyzerName::toString)); + SearchIndex prepareIndexWithAllLexicalAnalyzerNames() { + List allLexicalAnalyzerNames = new ArrayList<>(LexicalAnalyzerName.values()); + allLexicalAnalyzerNames.sort(Comparator.comparing(LexicalAnalyzerName::toString)); - List fields = new ArrayList<>(); + List fields = new ArrayList<>(); int fieldNumber = 0; // All analyzer names can be set on the analyzer property. - for (int i = 0; i < allAnalyzerNames.size(); i++) { - DataType fieldType = (i % 2 == 0) ? DataType.EDM_STRING : DataType.collection(DataType.EDM_STRING); - fields.add(new Field() + for (int i = 0; i < allLexicalAnalyzerNames.size(); i++) { + SearchFieldDataType fieldType = (i % 2 == 0) ? SearchFieldDataType.STRING + : SearchFieldDataType.collection(SearchFieldDataType.STRING); + fields.add(new SearchField() .setName("field" + (fieldNumber++)) .setType(fieldType) - .setAnalyzer(allAnalyzerNames.get(i))); + .setAnalyzer(allLexicalAnalyzerNames.get(i))); } - List searchAnalyzersAndIndexAnalyzers = getAnalyzersAllowedForSearchAnalyzerAndIndexAnalyzer(); + List searchAnalyzersAndIndexAnalyzers = getAnalyzersAllowedForSearchAnalyzerAndIndexAnalyzer(); for (int i = 0; i < searchAnalyzersAndIndexAnalyzers.size(); i++) { - DataType fieldType = (i % 2 == 0) ? DataType.EDM_STRING : DataType.collection(DataType.EDM_STRING); - fields.add(new Field() + SearchFieldDataType fieldType = (i % 2 == 0) ? SearchFieldDataType.STRING + : SearchFieldDataType.collection(SearchFieldDataType.STRING); + fields.add(new SearchField() .setName("field" + (fieldNumber++)) .setType(fieldType) .setSearchable(true) @@ -804,20 +806,20 @@ Index prepareIndexWithAllAnalyzerNames() { .setIndexAnalyzer(searchAnalyzersAndIndexAnalyzers.get(i))); } - fields.add(new Field() + fields.add(new SearchField() .setName("id") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true)); - return new Index() + return new SearchIndex() .setName(randomIndexName("hotel")) .setFields(fields); } - Index prepareIndexWithAllAnalysisComponentNames() { - Analyzer analyzerWithAllTokenFilterAndCharFilters = + SearchIndex prepareIndexWithAllAnalysisComponentNames() { + LexicalAnalyzer analyzerWithAllTokenFilterAndCharFilters = new CustomAnalyzer() - .setTokenizer(TokenizerName.LOWERCASE) + .setTokenizer(LexicalTokenizerName.LOWERCASE) .setTokenFilters(TokenFilterName.values() .stream() .sorted(Comparator.comparing(TokenFilterName::toString)) @@ -828,18 +830,18 @@ Index prepareIndexWithAllAnalysisComponentNames() { .collect(Collectors.toList())) .setName("abc"); - Index index = createTestIndex(); - List analyzers = new ArrayList<>(); + SearchIndex index = createTestIndex(); + List analyzers = new ArrayList<>(); analyzers.add(analyzerWithAllTokenFilterAndCharFilters); - analyzers.addAll(TokenizerName.values() + analyzers.addAll(LexicalTokenizerName.values() .stream() - .sorted(Comparator.comparing(TokenizerName::toString)) + .sorted(Comparator.comparing(LexicalTokenizerName::toString)) .map(tn -> new CustomAnalyzer() .setTokenizer(tn) .setName(generateName())) .collect(Collectors.toList())); - analyzers.sort(Comparator.comparing(Analyzer::getName)); + analyzers.sort(Comparator.comparing(LexicalAnalyzer::getName)); index.setAnalyzers(analyzers); return index; @@ -849,13 +851,13 @@ Index prepareIndexWithAllAnalysisComponentNames() { * Custom analysis components (analyzer/tokenzier/tokenFilter/charFilter) count in index must be between 0 and 50. * Split an Index into indexes, each of which has a total analysis components count within the limit. */ - List splitIndex(Index index) { - Collection> analyzersLists = splitAnalysisComponents(index.getAnalyzers()); - List indexes = analyzersLists + List splitIndex(SearchIndex index) { + Collection> analyzersLists = splitAnalysisComponents(index.getAnalyzers()); + List indexes = analyzersLists .stream() .map(a -> createTestIndex().setAnalyzers(a)).collect(Collectors.toList()); - Collection> tokenizersLists = splitAnalysisComponents(index.getTokenizers()); + Collection> tokenizersLists = splitAnalysisComponents(index.getTokenizers()); indexes.addAll(tokenizersLists .stream() .map(t -> createTestIndex().setTokenizers(t)) @@ -898,8 +900,8 @@ private Collection> splitAnalysisComponents(List list) { return lists; } - Index prepareIndexWithAllAnalysisComponentTypes() { - final TokenizerName customTokenizerName = TokenizerName.fromString("my_tokenizer"); + SearchIndex prepareIndexWithAllAnalysisComponentTypes() { + final LexicalTokenizerName customTokenizerName = LexicalTokenizerName.fromString("my_tokenizer"); final TokenFilterName customTokenFilterName = TokenFilterName.fromString("my_tokenfilter"); final CharFilterName customCharFilterName = CharFilterName.fromString("my_charfilter"); @@ -911,15 +913,15 @@ Index prepareIndexWithAllAnalysisComponentTypes() { .setCharFilters(Collections.singletonList(customCharFilterName)) .setName(generateName()), new CustomAnalyzer() - .setTokenizer(TokenizerName.EDGE_NGRAM) + .setTokenizer(LexicalTokenizerName.EDGE_NGRAM) .setName(generateName()), new PatternAnalyzer() .setLowerCaseTerms(false) .setPattern("abc") - .setFlags(Collections.singletonList(RegexFlags.DOTALL)) + .setFlags(Collections.singletonList(RegexFlags.DOT_ALL)) .setStopwords(Collections.singletonList("the")) .setName(generateName()), - new StandardAnalyzer() + new LuceneStandardAnalyzer() .setMaxTokenLength(100) .setStopwords(Collections.singletonList("the")) .setName(generateName()), @@ -972,7 +974,7 @@ Index prepareIndexWithAllAnalysisComponentTypes() { .setFlags(Collections.singletonList(RegexFlags.MULTILINE)) .setGroup(0) .setName(generateName()), - new StandardTokenizerV2() + new LuceneStandardTokenizerV2() .setMaxTokenLength(100) .setName(generateName()), new UaxUrlEmailTokenizer() @@ -1031,8 +1033,8 @@ Index prepareIndexWithAllAnalysisComponentTypes() { .setKeywords(Collections.singletonList("essential")) .setName(generateName()), new LengthTokenFilter() - .setMin(5) - .setMax(10) + .setMinLength(5) + .setMaxLength(10) .setName(generateName()), new LimitTokenFilter() .setMaxTokenCount(10) @@ -1121,14 +1123,14 @@ Index prepareIndexWithAllAnalysisComponentTypes() { )); } - Index createIndexWithSpecialDefaults() { + SearchIndex createIndexWithSpecialDefaults() { int i = 0; return createTestIndex() .setAnalyzers(Arrays.asList( new PatternAnalyzer() .setName(generateSimpleName(i++)), - new StandardAnalyzer() + new LuceneStandardAnalyzer() .setName(generateSimpleName(i++)) )) .setTokenizers(Arrays.asList( @@ -1148,7 +1150,7 @@ Index createIndexWithSpecialDefaults() { .setName(generateSimpleName(i++)), new PatternTokenizer() .setName(generateSimpleName(i++)), - new StandardTokenizerV2() + new LuceneStandardTokenizerV2() .setName(generateSimpleName(i++)), new UaxUrlEmailTokenizer() .setName(generateSimpleName(i++)) @@ -1184,7 +1186,7 @@ Index createIndexWithSpecialDefaults() { )); } - Index createExpectedIndexWithSpecialDefaults(Index index) { + SearchIndex createExpectedIndexWithSpecialDefaults(SearchIndex index) { int i = 0; return createTestIndex() @@ -1194,7 +1196,7 @@ Index createExpectedIndexWithSpecialDefaults(Index index) { .setLowerCaseTerms(true) .setPattern("\\W+") .setName(generateSimpleName(i++)), - new StandardAnalyzer() + new LuceneStandardAnalyzer() .setMaxTokenLength(255) .setName(generateSimpleName(i++)) )) @@ -1232,7 +1234,7 @@ Index createExpectedIndexWithSpecialDefaults(Index index) { .setPattern("\\W+") .setGroup(-1) .setName(generateSimpleName(i++)), - new StandardTokenizerV2() + new LuceneStandardTokenizerV2() .setMaxTokenLength(255) .setName(generateSimpleName(i++)), new UaxUrlEmailTokenizer() @@ -1252,7 +1254,7 @@ Index createExpectedIndexWithSpecialDefaults(Index index) { .setSide(EdgeNGramTokenFilterSide.FRONT) .setName(generateSimpleName(i++)), new LengthTokenFilter() - .setMax(300) + .setMaxLength(300) .setName(generateSimpleName(i++)), new LimitTokenFilter() .setMaxTokenCount(1) @@ -1298,7 +1300,7 @@ Index createExpectedIndexWithSpecialDefaults(Index index) { } void assertTokenInfoEqual(String expectedToken, Integer expectedStartOffset, Integer expectedEndOffset, - Integer expectedPosition, TokenInfo actual) { + Integer expectedPosition, AnalyzedTokenInfo actual) { assertEquals(expectedToken, actual.getToken()); assertEquals(expectedStartOffset, actual.getStartOffset()); assertEquals(expectedEndOffset, actual.getEndOffset()); @@ -1309,10 +1311,10 @@ private String generateSimpleName(int n) { return String.format("a%d", n); } - private List getAnalyzersAllowedForSearchAnalyzerAndIndexAnalyzer() { + private List getAnalyzersAllowedForSearchAnalyzerAndIndexAnalyzer() { // Only non-language analyzer names can be set on the searchAnalyzer and indexAnalyzer properties. // ASSUMPTION: Only language analyzers end in .lucene or .microsoft. - return AnalyzerName.values() + return LexicalAnalyzerName.values() .stream() .filter(an -> !an.toString().endsWith(".lucene") && !an.toString().endsWith(".microsoft")) .collect(Collectors.toList()); diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourceSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourceSyncTests.java index ca9f753b90f8..e21923bf4735 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourceSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourceSyncTests.java @@ -7,14 +7,14 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.core.util.CoreUtils; -import com.azure.search.documents.models.DataContainer; import com.azure.search.documents.models.DataDeletionDetectionPolicy; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.DataSourceCredentials; -import com.azure.search.documents.models.DataSourceType; import com.azure.search.documents.models.HighWaterMarkChangeDetectionPolicy; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchIndexerDataContainer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerDataSourceType; import com.azure.search.documents.models.SoftDeleteColumnDeletionDetectionPolicy; import com.azure.search.documents.models.SqlIntegratedChangeTrackingPolicy; import org.junit.jupiter.api.Test; @@ -61,15 +61,15 @@ protected void afterTest() { @Test public void canCreateAndListDataSources() { - DataSource dataSource1 = createTestBlobDataSource(null); - DataSource dataSource2 = createTestSqlDataSourceObject(); + SearchIndexerDataSource dataSource1 = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource2 = createTestSqlDataSourceObject(); client.createOrUpdateDataSource(dataSource1); dataSourcesToDelete.add(dataSource1.getName()); client.createOrUpdateDataSource(dataSource2); dataSourcesToDelete.add(dataSource2.getName()); - Iterator results = client.listDataSources().iterator(); + Iterator results = client.listDataSources().iterator(); assertEquals(dataSource1.getName(), results.next().getName()); assertEquals(dataSource2.getName(), results.next().getName()); @@ -78,15 +78,15 @@ public void canCreateAndListDataSources() { @Test public void canCreateAndListDataSourcesWithResponse() { - DataSource dataSource1 = createTestBlobDataSource(null); - DataSource dataSource2 = createTestSqlDataSourceObject(); + SearchIndexerDataSource dataSource1 = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource2 = createTestSqlDataSourceObject(); client.createOrUpdateDataSourceWithResponse(dataSource1, false, new RequestOptions(), Context.NONE); dataSourcesToDelete.add(dataSource1.getName()); client.createOrUpdateDataSourceWithResponse(dataSource2, false, new RequestOptions(), Context.NONE); dataSourcesToDelete.add(dataSource2.getName()); - Iterator results = client.listDataSources("name", new RequestOptions(), Context.NONE).iterator(); + Iterator results = client.listDataSources("name", new RequestOptions(), Context.NONE).iterator(); assertEquals(dataSource1.getName(), results.next().getName()); assertEquals(dataSource2.getName(), results.next().getName()); @@ -95,7 +95,7 @@ public void canCreateAndListDataSourcesWithResponse() { @Test public void canCreateAndDeleteDatasource() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); client.deleteDataSource(dataSource.getName()); assertThrows(HttpResponseException.class, () -> client.getDataSource(dataSource.getName())); @@ -103,7 +103,7 @@ public void canCreateAndDeleteDatasource() { @Test public void deleteDataSourceIsIdempotent() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); // Try to delete before the data source exists, expect a NOT FOUND return status code Response result = client.deleteDataSourceWithResponse(dataSource, false, generateRequestOptions(), @@ -123,8 +123,8 @@ public void deleteDataSourceIsIdempotent() { @Test public void createDataSourceFailsWithUsefulMessageOnUserError() { - DataSource dataSource = createTestSqlDataSourceObject(); - dataSource.setType(DataSourceType.fromString("thistypedoesnotexist")); + SearchIndexerDataSource dataSource = createTestSqlDataSourceObject(); + dataSource.setType(SearchIndexerDataSourceType.fromString("thistypedoesnotexist")); assertHttpResponseException( () -> client.createOrUpdateDataSource(dataSource), @@ -135,22 +135,22 @@ public void createDataSourceFailsWithUsefulMessageOnUserError() { @Test public void canUpdateDataSource() { - DataSource initial = createTestSqlDataSourceObject(); + SearchIndexerDataSource initial = createTestSqlDataSourceObject(); // Create the data source client.createOrUpdateDataSource(initial); dataSourcesToDelete.add(initial.getName()); - DataSource updatedExpected = createTestSqlDataSourceObject() + SearchIndexerDataSource updatedExpected = createTestSqlDataSourceObject() .setName(initial.getName()) - .setContainer(new DataContainer().setName("somethingdifferent")) + .setContainer(new SearchIndexerDataContainer().setName("somethingdifferent")) .setDescription("somethingdifferent") .setDataChangeDetectionPolicy(new HighWaterMarkChangeDetectionPolicy() .setHighWaterMarkColumnName("rowversion")) .setDataDeletionDetectionPolicy(new SoftDeleteColumnDeletionDetectionPolicy() .setSoftDeleteColumnName("isDeleted")); - DataSource updatedActual = client.createOrUpdateDataSource(updatedExpected); + SearchIndexerDataSource updatedActual = client.createOrUpdateDataSource(updatedExpected); updatedExpected.getCredentials().setConnectionString(null); // Create doesn't return connection strings. TestHelpers.assertObjectEquals(updatedExpected, updatedActual, false, "etag", "@odata.etag"); @@ -158,10 +158,10 @@ public void canUpdateDataSource() { @Test public void createOrUpdateDatasourceIfNotExistsSucceedsOnNoResource() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(dataSource.getName()); - DataSource response = client.createOrUpdateDataSourceWithResponse(dataSource, true, null, Context.NONE) + SearchIndexerDataSource response = client.createOrUpdateDataSourceWithResponse(dataSource, true, null, Context.NONE) .getValue(); assertFalse(CoreUtils.isNullOrEmpty(response.getETag())); @@ -169,10 +169,10 @@ public void createOrUpdateDatasourceIfNotExistsSucceedsOnNoResource() { @Test public void deleteDataSourceIfExistsWorksOnlyWhenResourceExists() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(dataSource.getName()); - DataSource response = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) + SearchIndexerDataSource response = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) .getValue(); client.deleteDataSourceWithResponse(response, true, null, Context.NONE); @@ -187,12 +187,12 @@ public void deleteDataSourceIfExistsWorksOnlyWhenResourceExists() { @Test public void deleteDataSourceIfNotChangedWorksOnlyOnCurrentResource() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); - DataSource stale = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) + SearchIndexerDataSource stale = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) .getValue(); - DataSource current = client.createOrUpdateDataSourceWithResponse(stale, false, null, Context.NONE) + SearchIndexerDataSource current = client.createOrUpdateDataSourceWithResponse(stale, false, null, Context.NONE) .getValue(); try { @@ -207,14 +207,14 @@ public void deleteDataSourceIfNotChangedWorksOnlyOnCurrentResource() { @Test public void updateDataSourceIfExistsSucceedsOnExistingResource() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(dataSource.getName()); - DataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) + SearchIndexerDataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); - DataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, + SearchIndexerDataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -225,14 +225,14 @@ public void updateDataSourceIfExistsSucceedsOnExistingResource() { @Test public void updateDataSourceIfNotChangedFailsWhenResourceChanged() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(dataSource.getName()); - DataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) + SearchIndexerDataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); - DataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, + SearchIndexerDataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -251,14 +251,14 @@ public void updateDataSourceIfNotChangedFailsWhenResourceChanged() { @Test public void updateDataSourceIfNotChangedSucceedsWhenResourceUnchanged() { - DataSource dataSource = createTestBlobDataSource(null); + SearchIndexerDataSource dataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(dataSource.getName()); - DataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) + SearchIndexerDataSource original = client.createOrUpdateDataSourceWithResponse(dataSource, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); - DataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, + SearchIndexerDataSource updated = client.createOrUpdateDataSourceWithResponse(original.setDescription("an update"), false, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -283,8 +283,7 @@ public void createDataSourceReturnsCorrectDefinition() { // AzureSql createAndValidateDataSource(createTestSqlDataSourceObject(null, null)); createAndValidateDataSource(createTestSqlDataSourceObject(deletionDetectionPolicy, null)); - createAndValidateDataSource(createTestSqlDataSourceObject(null, new - SqlIntegratedChangeTrackingPolicy())); + createAndValidateDataSource(createTestSqlDataSourceObject(null, new SqlIntegratedChangeTrackingPolicy())); createAndValidateDataSource(createTestSqlDataSourceObject(deletionDetectionPolicy, changeDetectionPolicy)); @@ -303,8 +302,8 @@ public void createDataSourceReturnsCorrectDefinition() { createAndValidateDataSource(createTestBlobDataSource(deletionDetectionPolicy)); } - private void createAndValidateDataSource(DataSource expectedDataSource) { - DataSource actualDataSource = client.createOrUpdateDataSource(expectedDataSource); + private void createAndValidateDataSource(SearchIndexerDataSource expectedDataSource) { + SearchIndexerDataSource actualDataSource = client.createOrUpdateDataSource(expectedDataSource); expectedDataSource.setCredentials(new DataSourceCredentials().setConnectionString(null)); TestHelpers.assertObjectEquals(expectedDataSource, actualDataSource, false, "etag", "@odata.etag"); @@ -321,14 +320,14 @@ public void getDataSourceReturnsCorrectDefinition() { createGetAndValidateDataSource(createTestCosmosDataSource(null, false)); } - private void createGetAndValidateDataSource(DataSource expectedDataSource) { + private void createGetAndValidateDataSource(SearchIndexerDataSource expectedDataSource) { client.createOrUpdateDataSource(expectedDataSource); String dataSourceName = expectedDataSource.getName(); // Get doesn't return connection strings. expectedDataSource.setCredentials(new DataSourceCredentials().setConnectionString(null)); - DataSource actualDataSource = client.getDataSource(dataSourceName); + SearchIndexerDataSource actualDataSource = client.getDataSource(dataSourceName); TestHelpers.assertObjectEquals(expectedDataSource, actualDataSource, false, "etag", "@odata.etag"); actualDataSource = client.getDataSourceWithResponse(dataSourceName, generateRequestOptions(), Context.NONE) @@ -349,22 +348,22 @@ public void getDataSourceThrowsOnNotFound() { @Test public void canCreateDataSource() { - DataSource expectedDataSource = createTestBlobDataSource(null); + SearchIndexerDataSource expectedDataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(expectedDataSource.getName()); - DataSource actualDataSource = client.createDataSource(expectedDataSource); + SearchIndexerDataSource actualDataSource = client.createDataSource(expectedDataSource); assertNotNull(actualDataSource); assertEquals(expectedDataSource.getName(), actualDataSource.getName()); - Iterator dataSources = client.listDataSources().iterator(); + Iterator dataSources = client.listDataSources().iterator(); assertEquals(expectedDataSource.getName(), dataSources.next().getName()); assertFalse(dataSources.hasNext()); } @Test public void canCreateDataSourceWithResponse() { - DataSource expectedDataSource = createTestBlobDataSource(null); + SearchIndexerDataSource expectedDataSource = createTestBlobDataSource(null); dataSourcesToDelete.add(expectedDataSource.getName()); - Response response = client + Response response = client .createDataSourceWithResponse(expectedDataSource, new RequestOptions(), null); assertNotNull(response); assertNotNull(response.getValue()); @@ -379,7 +378,7 @@ public void canUpdateConnectionData() { // Hence, we only validate that the properties on the local items can change. // Create an initial dataSource - DataSource initial = createTestBlobDataSource(null); + SearchIndexerDataSource initial = createTestBlobDataSource(null); assertEquals(initial.getCredentials().getConnectionString(), FAKE_STORAGE_CONNECTION_STRING); // tweak the connection string and verify it was changed @@ -390,20 +389,20 @@ public void canUpdateConnectionData() { assertEquals(initial.getCredentials().getConnectionString(), newConnString); } - DataSource createTestBlobDataSource(DataDeletionDetectionPolicy deletionDetectionPolicy) { - return DataSources.createFromAzureBlobStorage(testResourceNamer.randomName(BLOB_DATASOURCE_TEST_NAME, 32), + SearchIndexerDataSource createTestBlobDataSource(DataDeletionDetectionPolicy deletionDetectionPolicy) { + return SearchIndexerDataSources.createFromAzureBlobStorage(testResourceNamer.randomName(BLOB_DATASOURCE_TEST_NAME, 32), FAKE_STORAGE_CONNECTION_STRING, "fakecontainer", "/fakefolder/", FAKE_DESCRIPTION, deletionDetectionPolicy); } - DataSource createTestTableStorageDataSource() { - return DataSources.createFromAzureTableStorage("azs-java-test-tablestorage", FAKE_STORAGE_CONNECTION_STRING, + SearchIndexerDataSource createTestTableStorageDataSource() { + return SearchIndexerDataSources.createFromAzureTableStorage("azs-java-test-tablestorage", FAKE_STORAGE_CONNECTION_STRING, "faketable", "fake query", FAKE_DESCRIPTION, null); } - DataSource createTestCosmosDataSource(DataDeletionDetectionPolicy deletionDetectionPolicy, + SearchIndexerDataSource createTestCosmosDataSource(DataDeletionDetectionPolicy deletionDetectionPolicy, boolean useChangeDetection) { - return DataSources.createFromCosmos("azs-java-test-cosmos", FAKE_COSMOS_CONNECTION_STRING, "faketable", + return SearchIndexerDataSources.createFromCosmos("azs-java-test-cosmos", FAKE_COSMOS_CONNECTION_STRING, "faketable", "SELECT ... FROM x where x._ts > @HighWaterMark", useChangeDetection, FAKE_DESCRIPTION, deletionDetectionPolicy); } diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourcesTest.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourcesTest.java index c8bff914ff34..44d2c71aa8be 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourcesTest.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/DataSourcesTest.java @@ -3,11 +3,11 @@ package com.azure.search.documents; -import com.azure.search.documents.models.DataContainer; -import com.azure.search.documents.models.DataSource; import com.azure.search.documents.models.DataSourceCredentials; -import com.azure.search.documents.models.DataSourceType; import com.azure.search.documents.models.HighWaterMarkChangeDetectionPolicy; +import com.azure.search.documents.models.SearchIndexerDataContainer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerDataSourceType; import org.junit.jupiter.api.Test; /** @@ -18,13 +18,13 @@ public class DataSourcesTest { @Test public void canCreateSqlDataSource() { // check utility method with minimal overloads - DataSource expected = new DataSource() + SearchIndexerDataSource expected = new SearchIndexerDataSource() .setName("sql") - .setType(DataSourceType.AZURE_SQL) + .setType(SearchIndexerDataSourceType.AZURE_SQL) .setCredentials(new DataSourceCredentials() .setConnectionString("connectionString")) - .setContainer(new DataContainer().setName("table")); - DataSource actual = DataSources.createFromAzureSql( + .setContainer(new SearchIndexerDataContainer().setName("table")); + SearchIndexerDataSource actual = SearchIndexerDataSources.createFromAzureSql( "sql", "connectionString", "table"); TestHelpers.assertObjectEquals(expected, actual, false, "etag"); @@ -33,14 +33,14 @@ public void canCreateSqlDataSource() { @Test public void canCreateStorageBlobDataSource() { // check utility method with minimal overloads - DataSource expected = new DataSource() + SearchIndexerDataSource expected = new SearchIndexerDataSource() .setName("storageBlob") - .setType(DataSourceType.AZURE_BLOB) + .setType(SearchIndexerDataSourceType.AZURE_BLOB) .setCredentials(new DataSourceCredentials() .setConnectionString("connectionString")) - .setContainer(new DataContainer() + .setContainer(new SearchIndexerDataContainer() .setName("container")); - DataSource actual = DataSources.createFromAzureBlobStorage( + SearchIndexerDataSource actual = SearchIndexerDataSources.createFromAzureBlobStorage( "storageBlob", "connectionString", "container"); TestHelpers.assertObjectEquals(expected, actual, false, "etag"); @@ -49,14 +49,14 @@ public void canCreateStorageBlobDataSource() { @Test public void canCreateStorageTableDataSource() { // check utility method with minimal overloads - DataSource expected = new DataSource() + SearchIndexerDataSource expected = new SearchIndexerDataSource() .setName("storageTable") - .setType(DataSourceType.AZURE_TABLE) + .setType(SearchIndexerDataSourceType.AZURE_TABLE) .setCredentials(new DataSourceCredentials() .setConnectionString("connectionString")) - .setContainer(new DataContainer() + .setContainer(new SearchIndexerDataContainer() .setName("table")); - DataSource actual = DataSources.createFromAzureTableStorage( + SearchIndexerDataSource actual = SearchIndexerDataSources.createFromAzureTableStorage( "storageTable", "connectionString", "table"); TestHelpers.assertObjectEquals(expected, actual, false, "etag"); @@ -65,15 +65,15 @@ public void canCreateStorageTableDataSource() { @Test public void canCreateCosmosDataSource() { // check utility method overloads - DataSource expected = new DataSource() + SearchIndexerDataSource expected = new SearchIndexerDataSource() .setName("cosmos") - .setType(DataSourceType.COSMOS) + .setType(SearchIndexerDataSourceType.COSMOS_DB) .setCredentials(new DataSourceCredentials() .setConnectionString("connectionString")) - .setContainer(new DataContainer() + .setContainer(new SearchIndexerDataContainer() .setName("collection")); - DataSource actual = DataSources.createFromCosmos("cosmos", "connectionString", "collection", false); + SearchIndexerDataSource actual = SearchIndexerDataSources.createFromCosmos("cosmos", "connectionString", "collection", false); TestHelpers.assertObjectEquals(expected, actual, false, "etag"); } @@ -81,16 +81,17 @@ public void canCreateCosmosDataSource() { @Test public void canCreateCosmosDataSourceWithMinimalOverload() { // check utility method with minimal overloads - DataSource expected = new DataSource() + SearchIndexerDataSource expected = new SearchIndexerDataSource() .setName("cosmos") - .setType(DataSourceType.COSMOS) + .setType(SearchIndexerDataSourceType.COSMOS_DB) .setCredentials(new DataSourceCredentials() .setConnectionString("connectionString")) - .setContainer(new DataContainer() + .setContainer(new SearchIndexerDataContainer() .setName("collection")) .setDataChangeDetectionPolicy(new HighWaterMarkChangeDetectionPolicy().setHighWaterMarkColumnName("_ts")); - DataSource actual = DataSources.createFromCosmos("cosmos", "connectionString", "collection"); + SearchIndexerDataSource actual = SearchIndexerDataSources.createFromCosmos("cosmos", + "connectionString", "collection"); TestHelpers.assertObjectEquals(expected, actual, false, "etag"); } diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/FieldBuilderTest.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/FieldBuilderTest.java index 93b2f465056c..b95409cc3ec4 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/FieldBuilderTest.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/FieldBuilderTest.java @@ -3,10 +3,10 @@ package com.azure.search.documents; -import com.azure.search.documents.models.AnalyzerName; -import com.azure.search.documents.models.ComplexField; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; +import com.azure.search.documents.models.ComplexSearchField; +import com.azure.search.documents.models.LexicalAnalyzerName; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; import com.azure.search.documents.models.SearchableField; import com.azure.search.documents.models.SimpleField; import com.azure.search.documents.test.environment.models.Hotel; @@ -30,8 +30,8 @@ public class FieldBuilderTest { @Test public void hotelComparison() { - List actualFields = sortByFieldName(FieldBuilder.build(Hotel.class)); - List expectedFields = sortByFieldName(buildHotelFields()); + List actualFields = sortByFieldName(FieldBuilder.build(Hotel.class)); + List expectedFields = sortByFieldName(buildHotelFields()); assertListFieldEquals(expectedFields, actualFields); } @@ -39,28 +39,28 @@ public void hotelComparison() { public void hotelSearchableThrowException() { Exception exception = assertThrows(RuntimeException.class, () -> FieldBuilder.build(HotelSearchException.class)); - assertExceptionMassageAndDataType(exception, DataType.EDM_INT32, "hotelId"); + assertExceptionMassageAndDataType(exception, SearchFieldDataType.INT32, "hotelId"); } @Test public void hotelListFieldSearchableThrowException() { Exception exception = assertThrows(RuntimeException.class, () -> FieldBuilder.build(HotelSearchableExceptionOnList.class)); - assertExceptionMassageAndDataType(exception, DataType.collection(DataType.EDM_INT32), "passcode"); + assertExceptionMassageAndDataType(exception, SearchFieldDataType.collection(SearchFieldDataType.INT32), "passcode"); } @Test public void hotelCircularDependencies() { - List actualFields = sortByFieldName(FieldBuilder.build(HotelCircularDependencies.class)); - List expectedFields = sortByFieldName(buildHotelCircularDependenciesModel()); + List actualFields = sortByFieldName(FieldBuilder.build(HotelCircularDependencies.class)); + List expectedFields = sortByFieldName(buildHotelCircularDependenciesModel()); assertListFieldEquals(expectedFields, actualFields); } @Test public void hotelWithEmptySynonymMaps() { // We cannot put null in the annotation. So no need to test null case. - List actualFields = FieldBuilder.build(HotelWithEmptyInSynonymMaps.class); - List expectedFields = Collections.singletonList(new SearchableField("tags", true) + List actualFields = FieldBuilder.build(HotelWithEmptyInSynonymMaps.class); + List expectedFields = Collections.singletonList(new SearchableField("tags", true) .setSynonymMapNames(Arrays.asList("asynonymMaps", "maps")).build()); assertListFieldEquals(expectedFields, actualFields); } @@ -79,77 +79,77 @@ public void hotelAnalyzerException() { "either analyzer or both searchAnalyzer and indexAnalyzer"); } - private void assertListFieldEquals(List expected, List actual) { + private void assertListFieldEquals(List expected, List actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { TestHelpers.assertObjectEquals(expected.get(i), actual.get(i)); } } - private void assertExceptionMassageAndDataType(Exception exception, DataType dataType, String msg) { + private void assertExceptionMassageAndDataType(Exception exception, SearchFieldDataType dataType, String msg) { assertTrue(exception.getMessage().contains(msg)); if (dataType != null) { assertTrue(exception.getMessage().contains(dataType.toString())); } } - private List buildHotelCircularDependenciesModel() { - Field homeAddress = new ComplexField("homeAddress", false).setFields(buildHotelInAddress()).build(); - Field billingAddress = new ComplexField("billingAddress", false).setFields(buildHotelInAddress()).build(); + private List buildHotelCircularDependenciesModel() { + SearchField homeAddress = new ComplexSearchField("homeAddress", false).setFields(buildHotelInAddress()).build(); + SearchField billingAddress = new ComplexSearchField("billingAddress", false).setFields(buildHotelInAddress()).build(); return Arrays.asList(homeAddress, billingAddress); } - private List buildHotelInAddress() { - Field hotel = new ComplexField("hotel", false).build(); + private List buildHotelInAddress() { + SearchField hotel = new ComplexSearchField("hotel", false).build(); return Collections.singletonList(hotel); } - private List buildHotelFields() { - Field hotelId = new SimpleField("hotelId", DataType.EDM_STRING, false).setSortable(true) + private List buildHotelFields() { + SearchField hotelId = new SimpleField("hotelId", SearchFieldDataType.STRING, false).setSortable(true) .setKey(true).build(); - Field hotelName = new SearchableField("hotelName", false).setAnalyzer(AnalyzerName.fromString("en.lucene")) + SearchField hotelName = new SearchableField("hotelName", false).setAnalyzer(LexicalAnalyzerName.fromString("en.lucene")) .setSortable(true).build(); - Field description = new SimpleField("description", DataType.EDM_STRING, false).build(); - Field category = new SimpleField("category", DataType.EDM_STRING, false).build(); - Field tags = new SearchableField("tags", true).build(); - Field parkingIncluded = new SimpleField("parkingIncluded", DataType.EDM_BOOLEAN, false).build(); - Field smokingAllowed = new SimpleField("smokingAllowed", DataType.EDM_BOOLEAN, false).build(); - Field lastRenovationDate = new SimpleField("lastRenovationDate", DataType.EDM_DATE_TIME_OFFSET, false).build(); - Field rating = new SimpleField("rating", DataType.EDM_INT32, false).build(); - Field location = new SimpleField("location", DataType.EDM_GEOGRAPHY_POINT, false).build(); - Field address = new ComplexField("address", false) + SearchField description = new SimpleField("description", SearchFieldDataType.STRING, false).build(); + SearchField category = new SimpleField("category", SearchFieldDataType.STRING, false).build(); + SearchField tags = new SearchableField("tags", true).build(); + SearchField parkingIncluded = new SimpleField("parkingIncluded", SearchFieldDataType.BOOLEAN, false).build(); + SearchField smokingAllowed = new SimpleField("smokingAllowed", SearchFieldDataType.BOOLEAN, false).build(); + SearchField lastRenovationDate = new SimpleField("lastRenovationDate", SearchFieldDataType.DATE_TIME_OFFSET, false).build(); + SearchField rating = new SimpleField("rating", SearchFieldDataType.INT32, false).build(); + SearchField location = new SimpleField("location", SearchFieldDataType.GEOGRAPHY_POINT, false).build(); + SearchField address = new ComplexSearchField("address", false) .setFields(buildHotelAddressField()).build(); - Field rooms = new ComplexField("rooms", true).setFields(buildHotelRoomField()).build(); + SearchField rooms = new ComplexSearchField("rooms", true).setFields(buildHotelRoomField()).build(); return Arrays.asList(hotelId, hotelName, description, category, tags, parkingIncluded, smokingAllowed, lastRenovationDate, rating, location, address, rooms); } - private List buildHotelAddressField() { - Field streetAddress = new SimpleField("streetAddress", DataType.EDM_STRING, false).setFacetable(true) + private List buildHotelAddressField() { + SearchField streetAddress = new SimpleField("streetAddress", SearchFieldDataType.STRING, false).setFacetable(true) .setKey(true).build(); - Field city = new SearchableField("city", false).setFilterable(true).build(); - Field stateProvince = new SearchableField("stateProvince", false).build(); - Field country = new SearchableField("country", false) + SearchField city = new SearchableField("city", false).setFilterable(true).build(); + SearchField stateProvince = new SearchableField("stateProvince", false).build(); + SearchField country = new SearchableField("country", false) .setSynonymMapNames(Arrays.asList("America -> USA", "USA -> US")).build(); - Field postalCode = new SimpleField("postalCode", DataType.EDM_STRING, false).build(); + SearchField postalCode = new SimpleField("postalCode", SearchFieldDataType.STRING, false).build(); return Arrays.asList(streetAddress, city, stateProvince, country, postalCode); } - private List buildHotelRoomField() { - Field description = new SimpleField("description", DataType.EDM_STRING, false).build(); - Field descriptionFr = new SimpleField("descriptionFr", DataType.EDM_STRING, false).build(); - Field type = new SimpleField("type", DataType.EDM_STRING, false).build(); - Field baseRate = new SimpleField("baseRate", DataType.EDM_DOUBLE, false).build(); - Field bedOptions = new SimpleField("bedOptions", DataType.EDM_STRING, false).build(); - Field sleepsCount = new SimpleField("sleepsCount", DataType.EDM_INT32, false).build(); - Field smokingAllowed = new SimpleField("smokingAllowed", DataType.EDM_BOOLEAN, false).build(); - Field tags = new SimpleField("tags", DataType.EDM_STRING, true).build(); + private List buildHotelRoomField() { + SearchField description = new SimpleField("description", SearchFieldDataType.STRING, false).build(); + SearchField descriptionFr = new SimpleField("descriptionFr", SearchFieldDataType.STRING, false).build(); + SearchField type = new SimpleField("type", SearchFieldDataType.STRING, false).build(); + SearchField baseRate = new SimpleField("baseRate", SearchFieldDataType.DOUBLE, false).build(); + SearchField bedOptions = new SimpleField("bedOptions", SearchFieldDataType.STRING, false).build(); + SearchField sleepsCount = new SimpleField("sleepsCount", SearchFieldDataType.INT32, false).build(); + SearchField smokingAllowed = new SimpleField("smokingAllowed", SearchFieldDataType.BOOLEAN, false).build(); + SearchField tags = new SimpleField("tags", SearchFieldDataType.STRING, true).build(); return Arrays.asList(description, descriptionFr, type, baseRate, bedOptions, sleepsCount, smokingAllowed, tags); } - private List sortByFieldName(List fields) { - fields.sort(Comparator.comparing(Field::getName)); + private List sortByFieldName(List fields) { + fields.sort(Comparator.comparing(SearchField::getName)); return fields; } } diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexManagementSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexManagementSyncTests.java index 058e78242aed..cad31c09dfd7 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexManagementSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexManagementSyncTests.java @@ -6,18 +6,18 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.core.util.CoreUtils; -import com.azure.search.documents.models.AnalyzerName; import com.azure.search.documents.models.CorsOptions; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.GetIndexStatisticsResult; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.LexicalAnalyzerName; import com.azure.search.documents.models.MagnitudeScoringFunction; import com.azure.search.documents.models.MagnitudeScoringParameters; import com.azure.search.documents.models.ScoringFunctionAggregation; import com.azure.search.documents.models.ScoringFunctionInterpolation; import com.azure.search.documents.models.ScoringProfile; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.Suggester; import com.azure.search.documents.models.SynonymMap; import org.junit.jupiter.api.Test; @@ -75,8 +75,8 @@ protected void afterTest() { @Test public void createIndexReturnsCorrectDefinition() { - Index index = createTestIndex(); - Index createdIndex = client.createIndex(index); + SearchIndex index = createTestIndex(); + SearchIndex createdIndex = client.createIndex(index); indexesToDelete.add(createdIndex.getName()); assertObjectEquals(index, createdIndex, true, "etag"); @@ -84,8 +84,8 @@ public void createIndexReturnsCorrectDefinition() { @Test public void createIndexReturnsCorrectDefinitionWithResponse() { - Index index = createTestIndex(); - Response createIndexResponse = client.createIndexWithResponse(index.setName("hotel2"), + SearchIndex index = createTestIndex(); + Response createIndexResponse = client.createIndexWithResponse(index.setName("hotel2"), generateRequestOptions(), Context.NONE); indexesToDelete.add(createIndexResponse.getValue().getName()); @@ -94,7 +94,7 @@ public void createIndexReturnsCorrectDefinitionWithResponse() { @Test public void createIndexReturnsCorrectDefaultValues() { - Index index = createTestIndex() + SearchIndex index = createTestIndex() .setCorsOptions(new CorsOptions().setAllowedOrigins("*")) .setScoringProfiles(Collections.singletonList(new ScoringProfile() .setName("MyProfile") @@ -106,7 +106,7 @@ public void createIndexReturnsCorrectDefaultValues() { .setBoost(2.0)) ) )); - Index indexResponse = client.createIndex(index); + SearchIndex indexResponse = client.createIndex(index); indexesToDelete.add(indexResponse.getName()); ScoringProfile scoringProfile = indexResponse.getScoringProfiles().get(0); @@ -119,12 +119,12 @@ public void createIndexReturnsCorrectDefaultValues() { @Test public void createIndexFailsWithUsefulMessageOnUserError() { String indexName = HOTEL_INDEX_NAME; - Index index = new Index() + SearchIndex index = new SearchIndex() .setName(indexName) .setFields(Collections.singletonList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(false) )); String expectedMessage = String.format("The request is invalid. Details: index : Found 0 key fields in index '%s'. " @@ -142,21 +142,21 @@ public void createIndexFailsWithUsefulMessageOnUserError() { @Test public void getIndexReturnsCorrectDefinition() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createIndex(index); indexesToDelete.add(index.getName()); - Index createdIndex = client.getIndex(index.getName()); + SearchIndex createdIndex = client.getIndex(index.getName()); assertObjectEquals(index, createdIndex, true, "etag"); } @Test public void getIndexReturnsCorrectDefinitionWithResponse() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createIndex(index); indexesToDelete.add(index.getName()); - Response getIndexResponse = client.getIndexWithResponse(index.getName(), generateRequestOptions(), + Response getIndexResponse = client.getIndexWithResponse(index.getName(), generateRequestOptions(), Context.NONE); assertObjectEquals(index, getIndexResponse.getValue(), true, "etag"); } @@ -172,14 +172,14 @@ public void getIndexThrowsOnNotFound() { @Test public void deleteIndexIfNotChangedWorksOnlyOnCurrentResource() { - Index indexToCreate = createTestIndex(); + SearchIndex indexToCreate = createTestIndex(); // Create the resource in the search service - Index originalIndex = client.createOrUpdateIndexWithResponse(indexToCreate, false, false, null, Context.NONE) + SearchIndex originalIndex = client.createOrUpdateIndexWithResponse(indexToCreate, false, false, null, Context.NONE) .getValue(); // Update the resource, the eTag will be changed - Index updatedIndex = client.createOrUpdateIndexWithResponse(originalIndex + SearchIndex updatedIndex = client.createOrUpdateIndexWithResponse(originalIndex .setCorsOptions(new CorsOptions().setAllowedOrigins("https://test.com/")), false, false, null, Context.NONE) .getValue(); @@ -195,7 +195,7 @@ public void deleteIndexIfNotChangedWorksOnlyOnCurrentResource() { @Test public void deleteIndexIfExistsWorksOnlyWhenResourceExists() { - Index index = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) + SearchIndex index = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) .getValue(); client.deleteIndexWithResponse(index, true, null, Context.NONE); @@ -211,18 +211,18 @@ public void deleteIndexIfExistsWorksOnlyWhenResourceExists() { @Test public void deleteIndexIsIdempotent() { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName(HOTEL_INDEX_NAME) .setFields(Collections.singletonList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true) )); Response deleteResponse = client.deleteIndexWithResponse(index, false, generateRequestOptions(), Context.NONE); assertEquals(HttpURLConnection.HTTP_NOT_FOUND, deleteResponse.getStatusCode()); - Response createResponse = client.createIndexWithResponse(index, generateRequestOptions(), Context.NONE); + Response createResponse = client.createIndexWithResponse(index, generateRequestOptions(), Context.NONE); assertEquals(HttpURLConnection.HTTP_CREATED, createResponse.getStatusCode()); // Delete the same index twice @@ -235,7 +235,7 @@ public void deleteIndexIsIdempotent() { @Test public void canCreateAndDeleteIndex() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createIndex(index); client.deleteIndex(index.getName()); @@ -244,9 +244,9 @@ public void canCreateAndDeleteIndex() { @Test public void canCreateAndListIndexes() { - Index index1 = createTestIndex(); + SearchIndex index1 = createTestIndex(); index1.setName("a" + index1.getName()); - Index index2 = createTestIndex(); + SearchIndex index2 = createTestIndex(); index2.setName("b" + index1.getName()); client.createIndex(index1); @@ -254,8 +254,8 @@ public void canCreateAndListIndexes() { client.createIndex(index2); indexesToDelete.add(index2.getName()); - PagedIterable actual = client.listIndexes(); - List result = actual.stream().collect(Collectors.toList()); + PagedIterable actual = client.listIndexes(); + List result = actual.stream().collect(Collectors.toList()); assertEquals(2, result.size()); assertEquals(index1.getName(), result.get(0).getName()); @@ -264,9 +264,9 @@ public void canCreateAndListIndexes() { @Test public void canListIndexesWithSelectedField() { - Index index1 = createTestIndex(); + SearchIndex index1 = createTestIndex(); index1.setName("a" + index1.getName()); - Index index2 = createTestIndex(); + SearchIndex index2 = createTestIndex(); index2.setName("b" + index1.getName()); client.createIndex(index1); @@ -274,9 +274,9 @@ public void canListIndexesWithSelectedField() { client.createIndex(index2); indexesToDelete.add(index2.getName()); - PagedIterable selectedFieldListResponse = client.listIndexes("name", + PagedIterable selectedFieldListResponse = client.listIndexes("name", generateRequestOptions(), Context.NONE); - List result = selectedFieldListResponse.stream().collect(Collectors.toList()); + List result = selectedFieldListResponse.stream().collect(Collectors.toList()); result.forEach(res -> { assertNotNull(res.getName()); @@ -303,20 +303,20 @@ public void canAddSynonymFieldProperty() { client.createSynonymMap(synonymMap); synonymMapsToDelete.add(synonymMap.getName()); - Index index = new Index() + SearchIndex index = new SearchIndex() .setName(HOTEL_INDEX_NAME) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true), - new Field() + new SearchField() .setName("HotelName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSynonymMaps(Collections.singletonList(synonymMapName)) )); - Index createdIndex = client.createIndex(index); + SearchIndex createdIndex = client.createIndex(index); indexesToDelete.add(createdIndex.getName()); List actualSynonym = index.getFields().get(1).getSynonymMaps(); @@ -335,34 +335,34 @@ public void canUpdateSynonymFieldProperty() { synonymMapsToDelete.add(synonymMap.getName()); // Create an index - Index index = createTestIndex(); - Field hotelNameField = getFieldByName(index, "HotelName"); + SearchIndex index = createTestIndex(); + SearchField hotelNameField = getFieldByName(index, "HotelName"); hotelNameField.setSynonymMaps(Collections.singletonList(synonymMapName)); client.createIndex(index); indexesToDelete.add(index.getName()); // Update an existing index - Index existingIndex = client.getIndex(index.getName()); + SearchIndex existingIndex = client.getIndex(index.getName()); hotelNameField = getFieldByName(existingIndex, "HotelName"); hotelNameField.setSynonymMaps(Collections.emptyList()); - Index updatedIndex = client.createOrUpdateIndexWithResponse(existingIndex, + SearchIndex updatedIndex = client.createOrUpdateIndexWithResponse(existingIndex, true, false, generateRequestOptions(), Context.NONE).getValue(); assertObjectEquals(existingIndex, updatedIndex, true, "etag", "@odata.etag"); } @Test public void canUpdateIndexDefinition() { - Index fullFeaturedIndex = createTestIndex(); + SearchIndex fullFeaturedIndex = createTestIndex(); // Start out with no scoring profiles and different CORS options. - Index initialIndex = createTestIndex(); + SearchIndex initialIndex = createTestIndex(); initialIndex.setName(fullFeaturedIndex.getName()) .setScoringProfiles(new ArrayList<>()) .setDefaultScoringProfile(null) .setCorsOptions(initialIndex.getCorsOptions().setAllowedOrigins("*")); - Index index = client.createIndex(initialIndex); + SearchIndex index = client.createIndex(initialIndex); indexesToDelete.add(index.getName()); // Now update the index. @@ -373,12 +373,12 @@ public void canUpdateIndexDefinition() { .setDefaultScoringProfile(fullFeaturedIndex.getDefaultScoringProfile()) .setCorsOptions(index.getCorsOptions().setAllowedOrigins(allowedOrigins)); - Index updatedIndex = client.createOrUpdateIndex(index); + SearchIndex updatedIndex = client.createOrUpdateIndex(index); assertObjectEquals(fullFeaturedIndex, updatedIndex, true, "etag", "@odata.etag"); // Modify the fields on an existing index - Index existingIndex = client.getIndex(fullFeaturedIndex.getName()); + SearchIndex existingIndex = client.getIndex(fullFeaturedIndex.getName()); SynonymMap synonymMap = client.createSynonymMap(new SynonymMap() .setName(testResourceNamer.randomName("names", 32)) @@ -386,19 +386,19 @@ public void canUpdateIndexDefinition() { ); synonymMapsToDelete.add(synonymMap.getName()); - Field tagsField = getFieldByName(existingIndex, "Description_Custom"); + SearchField tagsField = getFieldByName(existingIndex, "Description_Custom"); tagsField.setHidden(true) - .setSearchAnalyzer(AnalyzerName.WHITESPACE) + .setSearchAnalyzer(LexicalAnalyzerName.WHITESPACE) .setSynonymMaps(Collections.singletonList(synonymMap.getName())); - Field hotelWebSiteField = new Field() + SearchField hotelWebSiteField = new SearchField() .setName("HotelWebsite") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE); existingIndex.getFields().add(hotelWebSiteField); - Field hotelNameField = getFieldByName(existingIndex, "HotelName"); + SearchField hotelNameField = getFieldByName(existingIndex, "HotelName"); hotelNameField.setHidden(true); updatedIndex = client.createOrUpdateIndexWithResponse(existingIndex, @@ -409,36 +409,36 @@ public void canUpdateIndexDefinition() { @Test public void canUpdateSuggesterWithNewIndexFields() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createIndex(index); indexesToDelete.add(index.getName()); - Index existingIndex = client.getIndex(index.getName()); + SearchIndex existingIndex = client.getIndex(index.getName()); existingIndex.getFields().addAll(Arrays.asList( - new Field() + new SearchField() .setName("HotelAmenities") - .setType(DataType.EDM_STRING), - new Field() + .setType(SearchFieldDataType.STRING), + new SearchField() .setName("HotelRewards") - .setType(DataType.EDM_STRING))); + .setType(SearchFieldDataType.STRING))); existingIndex.setSuggesters(Collections.singletonList(new Suggester() .setName("Suggestion") .setSourceFields(Arrays.asList("HotelAmenities", "HotelRewards")) )); - Index updatedIndex = client.createOrUpdateIndexWithResponse(existingIndex, + SearchIndex updatedIndex = client.createOrUpdateIndexWithResponse(existingIndex, true, false, generateRequestOptions(), Context.NONE).getValue(); assertObjectEquals(existingIndex, updatedIndex, true, "etag", "@odata.etag"); } @Test public void createOrUpdateIndexThrowsWhenUpdatingSuggesterWithExistingIndexFields() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createIndex(index); indexesToDelete.add(index.getName()); - Index existingIndex = client.getIndex(index.getName()); + SearchIndex existingIndex = client.getIndex(index.getName()); String existingFieldName = "Category"; existingIndex.setSuggesters(Collections.singletonList(new Suggester() .setName("Suggestion") @@ -456,9 +456,9 @@ public void createOrUpdateIndexThrowsWhenUpdatingSuggesterWithExistingIndexField @Test public void createOrUpdateIndexCreatesWhenIndexDoesNotExist() { - Index expected = createTestIndex(); + SearchIndex expected = createTestIndex(); - Index actual = client.createOrUpdateIndex(expected); + SearchIndex actual = client.createOrUpdateIndex(expected); indexesToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -466,16 +466,16 @@ public void createOrUpdateIndexCreatesWhenIndexDoesNotExist() { indexesToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); - Index res = client.createOrUpdateIndex(expected.setName("hotel2")); + SearchIndex res = client.createOrUpdateIndex(expected.setName("hotel2")); indexesToDelete.add(res.getName()); assertEquals(expected.getName(), res.getName()); } @Test public void createOrUpdateIndexCreatesWhenIndexDoesNotExistWithResponse() { - Index expected = createTestIndex(); + SearchIndex expected = createTestIndex(); - Index actual = client.createOrUpdateIndexWithResponse(expected, false, false, + SearchIndex actual = client.createOrUpdateIndexWithResponse(expected, false, false, generateRequestOptions(), Context.NONE).getValue(); indexesToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -485,7 +485,7 @@ public void createOrUpdateIndexCreatesWhenIndexDoesNotExistWithResponse() { indexesToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); - Response createOrUpdateResponse = client.createOrUpdateIndexWithResponse(expected.setName("hotel2"), + Response createOrUpdateResponse = client.createOrUpdateIndexWithResponse(expected.setName("hotel2"), false, false, generateRequestOptions(), Context.NONE); indexesToDelete.add(createOrUpdateResponse.getValue().getName()); assertEquals(HttpURLConnection.HTTP_CREATED, createOrUpdateResponse.getStatusCode()); @@ -493,7 +493,7 @@ public void createOrUpdateIndexCreatesWhenIndexDoesNotExistWithResponse() { @Test public void createOrUpdateIndexIfNotExistsSucceedsOnNoResource() { - Index index = client.createOrUpdateIndexWithResponse(createTestIndex(), false, true, null, Context.NONE) + SearchIndex index = client.createOrUpdateIndexWithResponse(createTestIndex(), false, true, null, Context.NONE) .getValue(); indexesToDelete.add(index.getName()); @@ -503,12 +503,12 @@ public void createOrUpdateIndexIfNotExistsSucceedsOnNoResource() { @Test public void createOrUpdateIndexIfExistsSucceedsOnExistingResource() { - Index original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) + SearchIndex original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexesToDelete.add(original.getName()); - Index updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, false, null, + SearchIndex updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, false, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -518,12 +518,12 @@ public void createOrUpdateIndexIfExistsSucceedsOnExistingResource() { @Test public void createOrUpdateIndexIfNotChangedSucceedsWhenResourceUnchanged() { - Index original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) + SearchIndex original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexesToDelete.add(original.getName()); - Index updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, true, null, + SearchIndex updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, true, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -534,12 +534,12 @@ public void createOrUpdateIndexIfNotChangedSucceedsWhenResourceUnchanged() { @Test public void createOrUpdateIndexIfNotChangedFailsWhenResourceChanged() { - Index original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) + SearchIndex original = client.createOrUpdateIndexWithResponse(createTestIndex(), false, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexesToDelete.add(original.getName()); - Index updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, true, null, + SearchIndex updated = client.createOrUpdateIndexWithResponse(mutateCorsOptionsInIndex(original), false, true, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -557,7 +557,7 @@ public void createOrUpdateIndexIfNotChangedFailsWhenResourceChanged() { @Test public void canCreateAndGetIndexStats() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createOrUpdateIndex(index); indexesToDelete.add(index.getName()); @@ -568,7 +568,7 @@ public void canCreateAndGetIndexStats() { @Test public void canCreateAndGetIndexStatsWithResponse() { - Index index = createTestIndex(); + SearchIndex index = createTestIndex(); client.createOrUpdateIndex(index); indexesToDelete.add(index.getName()); @@ -578,12 +578,12 @@ public void canCreateAndGetIndexStatsWithResponse() { assertEquals(0, indexStatisticsResponse.getValue().getStorageSize()); } - Index mutateCorsOptionsInIndex(Index index) { + SearchIndex mutateCorsOptionsInIndex(SearchIndex index) { index.getCorsOptions().setAllowedOrigins("*"); return index; } - Field getFieldByName(Index index, String name) { + SearchField getFieldByName(SearchIndex index, String name) { return index.getFields() .stream() .filter(f -> f.getName().equals(name)) diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexersManagementSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexersManagementSyncTests.java index df98cfdf507e..1ae45bc1d2c0 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexersManagementSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexersManagementSyncTests.java @@ -7,16 +7,9 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.core.util.CoreUtils; -import com.azure.search.documents.models.DataSource; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.FieldMapping; -import com.azure.search.documents.models.Index; -import com.azure.search.documents.models.Indexer; -import com.azure.search.documents.models.IndexerExecutionInfo; import com.azure.search.documents.models.IndexerExecutionResult; import com.azure.search.documents.models.IndexerExecutionStatus; -import com.azure.search.documents.models.IndexerLimits; import com.azure.search.documents.models.IndexerStatus; import com.azure.search.documents.models.IndexingParameters; import com.azure.search.documents.models.IndexingSchedule; @@ -25,8 +18,15 @@ import com.azure.search.documents.models.OutputFieldMappingEntry; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.SearchErrorException; -import com.azure.search.documents.models.Skill; -import com.azure.search.documents.models.Skillset; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexer; +import com.azure.search.documents.models.SearchIndexerDataSource; +import com.azure.search.documents.models.SearchIndexerLimits; +import com.azure.search.documents.models.SearchIndexerSkill; +import com.azure.search.documents.models.SearchIndexerSkillset; +import com.azure.search.documents.models.SearchIndexerStatus; import com.azure.search.documents.test.CustomQueryPipelinePolicy; import io.netty.handler.codec.http.HttpResponseStatus; import org.junit.jupiter.api.Test; @@ -65,7 +65,7 @@ public class IndexersManagementSyncTests extends SearchTestBase { private SearchServiceClient client; private String createDataSource() { - DataSource dataSource = createTestSqlDataSourceObject(); + SearchIndexerDataSource dataSource = createTestSqlDataSourceObject(); client.createOrUpdateDataSource(dataSource); dataSourcesToDelete.add(dataSource.getName()); @@ -73,16 +73,16 @@ private String createDataSource() { } private String createIndex() { - Index index = createTestIndexForLiveDatasource(); + SearchIndex index = createTestIndexForLiveDatasource(); client.createIndex(index); indexesToDelete.add(index.getName()); return index.getName(); } - private Indexer createTestDataSourceAndIndexer() { + private SearchIndexer createTestDataSourceAndIndexer() { // Create the indexer object - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); client.createIndexer(indexer); indexersToDelete.add(indexer.getName()); @@ -94,9 +94,9 @@ private Indexer createTestDataSourceAndIndexer() { * * @param indexer the indexer to be created */ - private void createAndValidateIndexer(Indexer indexer) { + private void createAndValidateIndexer(SearchIndexer indexer) { // create this indexer in the service - Indexer indexerResponse = client.createIndexer(indexer); + SearchIndexer indexerResponse = client.createIndexer(indexer); indexersToDelete.add(indexerResponse.getName()); // verify the returned updated indexer is as expected @@ -133,14 +133,14 @@ protected void afterTest() { @Test public void createIndexerReturnsCorrectDefinition() { - Indexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource()) + SearchIndexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource()) .setIsDisabled(true) .setParameters(new IndexingParameters() .setBatchSize(50) .setMaxFailedItems(10) .setMaxFailedItemsPerBatch(10)); - Indexer actualIndexer = client.createIndexer(expectedIndexer); + SearchIndexer actualIndexer = client.createIndexer(expectedIndexer); indexersToDelete.add(actualIndexer.getName()); expectedIndexer.setParameters(new IndexingParameters() @@ -156,18 +156,18 @@ public void canCreateAndListIndexers() { String dataSourceName = createDataSource(); // Create two indexers - Indexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName); + SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName); indexer1.setName("a" + indexer1.getName()); - Indexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName); + SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName); indexer2.setName("b" + indexer2.getName()); client.createIndexer(indexer1); indexersToDelete.add(indexer1.getName()); client.createIndexer(indexer2); indexersToDelete.add(indexer2.getName()); - Iterator indexers = client.listIndexers().iterator(); + Iterator indexers = client.listIndexers().iterator(); - Indexer returnedIndexer = indexers.next(); + SearchIndexer returnedIndexer = indexers.next(); assertObjectEquals(indexer1, returnedIndexer, true, "etag"); returnedIndexer = indexers.next(); assertObjectEquals(indexer2, returnedIndexer, true, "etag"); @@ -179,18 +179,18 @@ public void canCreateAndListIndexerNames() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName); + SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName); indexer1.setName("a" + indexer1.getName()); - Indexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName); + SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName); indexer2.setName("b" + indexer2.getName()); client.createIndexer(indexer1); indexersToDelete.add(indexer1.getName()); client.createIndexer(indexer2); indexersToDelete.add(indexer2.getName()); - Iterator indexersRes = client.listIndexers("name", generateRequestOptions(), Context.NONE).iterator(); + Iterator indexersRes = client.listIndexers("name", generateRequestOptions(), Context.NONE).iterator(); - Indexer actualIndexer = indexersRes.next(); + SearchIndexer actualIndexer = indexersRes.next(); assertEquals(indexer1.getName(), actualIndexer.getName()); assertAllIndexerFieldsNullExceptName(actualIndexer); @@ -203,7 +203,7 @@ public void canCreateAndListIndexerNames() { @Test public void createIndexerFailsWithUsefulMessageOnUserError() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist"); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist"); assertHttpResponseException( () -> client.createIndexer(indexer), @@ -213,20 +213,20 @@ public void createIndexerFailsWithUsefulMessageOnUserError() { @Test public void canResetIndexerAndGetIndexerStatus() { - Indexer indexer = createTestDataSourceAndIndexer(); + SearchIndexer indexer = createTestDataSourceAndIndexer(); client.resetIndexer(indexer.getName()); - IndexerExecutionInfo indexerStatus = client.getIndexerStatus(indexer.getName()); + SearchIndexerStatus indexerStatus = client.getIndexerStatus(indexer.getName()); assertEquals(IndexerStatus.RUNNING, indexerStatus.getStatus()); assertEquals(IndexerExecutionStatus.RESET, indexerStatus.getLastResult().getStatus()); } @Test public void canResetIndexerAndGetIndexerStatusWithResponse() { - Indexer indexer = createTestDataSourceAndIndexer(); + SearchIndexer indexer = createTestDataSourceAndIndexer(); client.resetIndexerWithResponse(indexer.getName(), generateRequestOptions(), Context.NONE); - IndexerExecutionInfo indexerStatusResponse = client.getIndexerStatusWithResponse(indexer.getName(), + SearchIndexerStatus indexerStatusResponse = client.getIndexerStatusWithResponse(indexer.getName(), generateRequestOptions(), Context.NONE).getValue(); assertEquals(IndexerStatus.RUNNING, indexerStatusResponse.getStatus()); assertEquals(IndexerExecutionStatus.RESET, indexerStatusResponse.getLastResult().getStatus()); @@ -234,18 +234,18 @@ public void canResetIndexerAndGetIndexerStatusWithResponse() { @Test public void canRunIndexer() { - Indexer indexer = createTestDataSourceAndIndexer(); + SearchIndexer indexer = createTestDataSourceAndIndexer(); client.runIndexer(indexer.getName()); - IndexerExecutionInfo indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); + SearchIndexerStatus indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus()); } @Test public void canRunIndexerWithResponse() { - Indexer indexer = createTestDataSourceAndIndexer(); + SearchIndexer indexer = createTestDataSourceAndIndexer(); Response response = client.runIndexerWithResponse(indexer.getName(), generateRequestOptions(), Context.NONE); - IndexerExecutionInfo indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); + SearchIndexerStatus indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); assertEquals(HttpURLConnection.HTTP_ACCEPTED, response.getStatusCode()); assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus()); @@ -258,12 +258,12 @@ public void canRunIndexerAndGetIndexerStatus() { // returning a well-known mock response client = getSearchServiceClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient(); - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); client.createIndexer(indexer); indexersToDelete.add(indexer.getName()); - IndexerExecutionInfo indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); + SearchIndexerStatus indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus()); Response indexerRunResponse = client.runIndexerWithResponse(indexer.getName(), new RequestOptions(), @@ -272,7 +272,7 @@ public void canRunIndexerAndGetIndexerStatus() { indexerExecutionInfo = client.getIndexerStatus(indexer.getName()); - assertValidIndexerExecutionInfo(indexerExecutionInfo); + assertValidSearchIndexerStatus(indexerExecutionInfo); } @Test @@ -280,13 +280,13 @@ public void canUpdateIndexer() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName) + SearchIndexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); // verify the returned updated indexer is as expected setSameStartTime(updated, indexerResponse); @@ -298,13 +298,13 @@ public void canUpdateIndexerFieldMapping() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName) + SearchIndexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); // verify the returned updated indexer is as expected setSameStartTime(updated, indexerResponse); @@ -313,7 +313,7 @@ public void canUpdateIndexerFieldMapping() { @Test public void canCreateIndexerWithFieldMapping() { - Indexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource()); + SearchIndexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource()); createAndValidateIndexer(indexer); } @@ -322,13 +322,13 @@ public void canUpdateIndexerDisabled() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createDisabledIndexer(indexName, dataSourceName) + SearchIndexer updated = createDisabledIndexer(indexName, dataSourceName) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); setSameStartTime(updated, indexerResponse); assertObjectEquals(updated, indexerResponse, true, "etag"); @@ -339,13 +339,13 @@ public void canUpdateIndexerSchedule() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName) + SearchIndexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); setSameStartTime(updated, indexerResponse); assertObjectEquals(updated, indexerResponse, true, "etag"); @@ -353,7 +353,7 @@ public void canUpdateIndexerSchedule() { @Test public void canCreateIndexerWithSchedule() { - Indexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource()); + SearchIndexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource()); createAndValidateIndexer(indexer); } @@ -362,12 +362,12 @@ public void canUpdateIndexerBatchSizeMaxFailedItems() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createIndexerWithDifferentIndexingParameters(initial); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer updated = createIndexerWithDifferentIndexingParameters(initial); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); setSameStartTime(updated, indexerResponse); assertObjectEquals(updated, indexerResponse, true, "etag"); @@ -375,8 +375,8 @@ public void canUpdateIndexerBatchSizeMaxFailedItems() { @Test public void canCreateIndexerWithBatchSizeMaxFailedItems() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer); createAndValidateIndexer(updatedExpected); } @@ -389,13 +389,13 @@ public void canUpdateIndexerBlobParams() { String dataSourceName = client.createDataSource(createBlobDataSource()).getName(); dataSourcesToDelete.add(dataSourceName); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Indexer updated = createIndexerWithStorageConfig(indexName, dataSourceName) + SearchIndexer updated = createIndexerWithStorageConfig(indexName, dataSourceName) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); setSameStartTime(updated, indexerResponse); assertObjectEquals(updated, indexerResponse, true, "etag"); @@ -406,21 +406,21 @@ public void canUpdateIndexerBlobParams() { @Test public void canCreateIndexerWithBlobParams() { // Create the needed Azure blob resources and data source object - DataSource blobDataSource = createBlobDataSource(); + SearchIndexerDataSource blobDataSource = createBlobDataSource(); // Create the data source within the search service - DataSource dataSource = client.createOrUpdateDataSource(blobDataSource); + SearchIndexerDataSource dataSource = client.createOrUpdateDataSource(blobDataSource); dataSourcesToDelete.add(dataSource.getName()); // modify the indexer's blob params - Indexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName()); + SearchIndexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName()); createAndValidateIndexer(indexer); } @Test public void canCreateAndDeleteIndexer() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); client.createIndexer(indexer); client.deleteIndexer(indexer.getName()); @@ -429,7 +429,7 @@ public void canCreateAndDeleteIndexer() { @Test public void canCreateAndDeleteIndexerWithResponse() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); client.createIndexerWithResponse(indexer, new RequestOptions(), Context.NONE); client.deleteIndexerWithResponse(indexer, false, new RequestOptions(), Context.NONE); @@ -439,7 +439,7 @@ public void canCreateAndDeleteIndexerWithResponse() { @Test public void deleteIndexerIsIdempotent() { // Create the indexer object - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); // Try delete before the indexer even exists. Response result = client.deleteIndexerWithResponse(indexer, false, generateRequestOptions(), @@ -459,11 +459,11 @@ public void deleteIndexerIsIdempotent() { @Test public void canCreateAndGetIndexer() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); client.createIndexer(indexer); indexersToDelete.add(indexer.getName()); - Indexer indexerResult = client.getIndexer(indexer.getName()); + SearchIndexer indexerResult = client.getIndexer(indexer.getName()); assertObjectEquals(indexer, indexerResult, true, "etag"); indexerResult = client.getIndexerWithResponse(indexer.getName(), generateRequestOptions(), Context.NONE) @@ -481,8 +481,8 @@ public void getIndexerThrowsOnNotFound() { @Test public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer created = client.createOrUpdateIndexerWithResponse(indexer, true, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer created = client.createOrUpdateIndexerWithResponse(indexer, true, null, Context.NONE) .getValue(); indexersToDelete.add(created.getName()); @@ -491,8 +491,8 @@ public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() { @Test public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer created = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer created = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) .getValue(); client.deleteIndexerWithResponse(created, true, null, Context.NONE); @@ -508,11 +508,11 @@ public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() { @Test public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer stale = client.createOrUpdateIndexerWithResponse(indexer, true, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer stale = client.createOrUpdateIndexerWithResponse(indexer, true, null, Context.NONE) .getValue(); - Indexer updated = client.createOrUpdateIndexerWithResponse(stale, false, null, Context.NONE) + SearchIndexer updated = client.createOrUpdateIndexerWithResponse(stale, false, null, Context.NONE) .getValue(); try { @@ -527,13 +527,13 @@ public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() { @Test public void updateIndexerIfExistsSucceedsOnExistingResource() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexersToDelete.add(original.getName()); - Indexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), + SearchIndexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), false, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -545,13 +545,13 @@ public void updateIndexerIfExistsSucceedsOnExistingResource() { @Test public void updateIndexerIfNotChangedFailsWhenResourceChanged() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexersToDelete.add(original.getName()); - Indexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), + SearchIndexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), true, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -572,13 +572,13 @@ public void updateIndexerIfNotChangedFailsWhenResourceChanged() { @Test public void updateIndexerIfNotChangedSucceedsWhenResourceUnchanged() { - Indexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); - Indexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) + SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource()); + SearchIndexer original = client.createOrUpdateIndexerWithResponse(indexer, false, null, Context.NONE) .getValue(); String originalETag = original.getETag(); indexersToDelete.add(original.getName()); - Indexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), + SearchIndexer updated = client.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"), true, null, Context.NONE) .getValue(); String updatedETag = updated.getETag(); @@ -594,16 +594,16 @@ public void canUpdateIndexerSkillset() { String indexName = createIndex(); String dataSourceName = createDataSource(); - Indexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); + SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true); client.createIndexer(initial); indexersToDelete.add(initial.getName()); - Skillset skillset = createSkillsetObject(); + SearchIndexerSkillset skillset = createSkillsetObject(); client.createSkillset(skillset); skillsetsToDelete.add(skillset.getName()); - Indexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName()) + SearchIndexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName()) .setName(initial.getName()); - Indexer indexerResponse = client.createOrUpdateIndexer(updated); + SearchIndexer indexerResponse = client.createOrUpdateIndexer(updated); setSameStartTime(updated, indexerResponse); assertObjectEquals(updated, indexerResponse, true, "etag"); @@ -611,10 +611,10 @@ public void canUpdateIndexerSkillset() { @Test public void canCreateIndexerWithSkillset() { - Skillset skillset = client.createSkillset(createSkillsetObject()); + SearchIndexerSkillset skillset = client.createSkillset(createSkillsetObject()); skillsetsToDelete.add(skillset.getName()); - Indexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName()); + SearchIndexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName()); createAndValidateIndexer(indexer); } @@ -624,7 +624,7 @@ public void canCreateIndexerWithSkillset() { * * @return the newly created skillset object */ - Skillset createSkillsetObject() { + SearchIndexerSkillset createSkillsetObject() { List inputs = Arrays.asList( new InputFieldMappingEntry() .setName("url") @@ -640,7 +640,7 @@ Skillset createSkillsetObject() { .setTargetName("mytext") ); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new OcrSkill() .setShouldDetectOrientation(true) .setName("myocr") @@ -649,14 +649,14 @@ Skillset createSkillsetObject() { .setInputs(inputs) .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("ocr-skillset", 32)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Indexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) { - return new Indexer() + SearchIndexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) { + return new SearchIndexer() .setName(testResourceNamer.randomName("indexer", 32)) .setTargetIndexName(targetIndexName) .setDataSourceName(dataSourceName) @@ -668,23 +668,23 @@ Indexer createBaseTestIndexerObject(String targetIndexName, String dataSourceNam * * @return the newly created Index object */ - Index createTestIndexForLiveDatasource() { - return new Index() + SearchIndex createTestIndexForLiveDatasource() { + return new SearchIndex() .setName(testResourceNamer.randomName(IndexersManagementSyncTests.TARGET_INDEX_NAME, 32)) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("county_name") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.FALSE) .setFilterable(Boolean.TRUE), - new Field() + new SearchField() .setName("state") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE), - new Field() + new SearchField() .setName("feature_id") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.FALSE))); @@ -696,7 +696,7 @@ Index createTestIndexForLiveDatasource() { * * @return the created indexer */ - Indexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) { + SearchIndexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) { // create a new indexer object with a modified description return createBaseTestIndexerObject(targetIndexName, dataSourceName) .setDescription("somethingdifferent"); @@ -707,9 +707,9 @@ Indexer createIndexerWithDifferentDescription(String targetIndexName, String dat * * @return the created indexer */ - Indexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) { + SearchIndexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) { // create a new indexer object - Indexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); + SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); // Create field mappings List fieldMappings = Collections.singletonList(new FieldMapping() @@ -727,9 +727,9 @@ Indexer createIndexerWithDifferentFieldMapping(String targetIndexName, String da * * @return the created indexer */ - Indexer createDisabledIndexer(String targetIndexName, String dataSourceName) { + SearchIndexer createDisabledIndexer(String targetIndexName, String dataSourceName) { // create a new indexer object - Indexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); + SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); // modify it indexer.setIsDisabled(false); @@ -742,9 +742,9 @@ Indexer createDisabledIndexer(String targetIndexName, String dataSourceName) { * * @return the created indexer */ - Indexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) { + SearchIndexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) { // create a new indexer object - Indexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); + SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName); IndexingSchedule is = new IndexingSchedule() .setInterval(Duration.ofMinutes(10)); @@ -760,7 +760,7 @@ Indexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSo * * @return the created indexer */ - Indexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) { + SearchIndexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) { // create a new indexer object return createBaseTestIndexerObject(targetIndexName, dataSourceName) .setSkillsetName(skillsetName); @@ -771,7 +771,7 @@ Indexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSo * * @return the created indexer */ - Indexer createIndexerWithDifferentIndexingParameters(Indexer indexer) { + SearchIndexer createIndexerWithDifferentIndexingParameters(SearchIndexer indexer) { // create a new indexer object IndexingParameters ip = new IndexingParameters() .setMaxFailedItems(121) @@ -784,9 +784,9 @@ Indexer createIndexerWithDifferentIndexingParameters(Indexer indexer) { return indexer; } - Indexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) { + SearchIndexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) { // create an indexer object - Indexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName); + SearchIndexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName); // just adding some(valid) config values for blobs HashMap config = new HashMap<>(); @@ -804,13 +804,13 @@ Indexer createIndexerWithStorageConfig(String targetIndexName, String dataSource return updatedExpected; } - void setSameStartTime(Indexer expected, Indexer actual) { + void setSameStartTime(SearchIndexer expected, SearchIndexer actual) { // There ought to be a start time in the response; We just can't know what it is because it would // make the test timing-dependent. expected.getSchedule().setStartTime(actual.getSchedule().getStartTime()); } - void assertAllIndexerFieldsNullExceptName(Indexer indexer) { + void assertAllIndexerFieldsNullExceptName(SearchIndexer indexer) { assertNull(indexer.getParameters()); assertNull(indexer.getDataSourceName()); assertNull(indexer.getDescription()); @@ -829,11 +829,11 @@ void assertStartAndEndTimeValid(IndexerExecutionResult result) { assertNotEquals(OffsetDateTime.now(), result.getEndTime()); } - void assertValidIndexerExecutionInfo(IndexerExecutionInfo indexerExecutionInfo) { + void assertValidSearchIndexerStatus(SearchIndexerStatus indexerExecutionInfo) { assertEquals(IndexerExecutionStatus.IN_PROGRESS, indexerExecutionInfo.getLastResult().getStatus()); assertEquals(3, indexerExecutionInfo.getExecutionHistory().size()); - IndexerLimits limits = indexerExecutionInfo.getLimits(); + SearchIndexerLimits limits = indexerExecutionInfo.getLimits(); assertNotNull(limits); assertEquals(100000, limits.getMaxDocumentContentCharactersToExtract(), 0); assertEquals(1000, limits.getMaxDocumentExtractionSize(), 0); diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexingSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexingSyncTests.java index b5d5f1af6ded..b500afb4d6f7 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexingSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/IndexingSyncTests.java @@ -4,14 +4,14 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.GeoPoint; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.IndexBatchException; import com.azure.search.documents.models.IndexDocumentsBatch; import com.azure.search.documents.models.IndexDocumentsResult; import com.azure.search.documents.models.IndexingResult; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.test.environment.models.Author; import com.azure.search.documents.test.environment.models.Book; import com.azure.search.documents.test.environment.models.Hotel; @@ -288,11 +288,11 @@ public void indexWithInvalidDocumentThrowsException() { @Test public void canUseIndexWithReservedName() { String indexName = "prototype"; - Index indexWithReservedName = new Index() + SearchIndex indexWithReservedName = new SearchIndex() .setName(indexName) - .setFields(Collections.singletonList(new Field() + .setFields(Collections.singletonList(new SearchField() .setName("ID") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE) )); diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/LookupSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/LookupSyncTests.java index e3cc37251c7f..104ad736b453 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/LookupSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/LookupSyncTests.java @@ -4,11 +4,11 @@ import com.azure.core.http.rest.Response; import com.azure.core.util.Context; -import com.azure.search.documents.models.DataType; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.GeoPoint; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.IndexDocumentsBatch; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.test.environment.models.Hotel; import com.azure.search.documents.test.environment.models.HotelAddress; import com.azure.search.documents.test.environment.models.HotelRoom; @@ -465,41 +465,41 @@ ModelWithPrimitiveCollections preparePrimitivesModel() { } String setupIndexWithDataTypes() { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName("data-types-tests-index") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Key") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true) .setHidden(false), - new Field() + new SearchField() .setName("Bools") - .setType(DataType.collection(DataType.EDM_BOOLEAN)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.BOOLEAN)) .setHidden(false), - new Field() + new SearchField() .setName("Dates") - .setType(DataType.collection(DataType.EDM_DATE_TIME_OFFSET)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.DATE_TIME_OFFSET)) .setHidden(false), - new Field() + new SearchField() .setName("Doubles") - .setType(DataType.collection(DataType.EDM_DOUBLE)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.DOUBLE)) .setHidden(false), - new Field() + new SearchField() .setName("Points") - .setType(DataType.collection(DataType.EDM_GEOGRAPHY_POINT)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.GEOGRAPHY_POINT)) .setHidden(false), - new Field() + new SearchField() .setName("Ints") - .setType(DataType.collection(DataType.EDM_INT32)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.INT32)) .setHidden(false), - new Field() + new SearchField() .setName("Longs") - .setType(DataType.collection(DataType.EDM_INT64)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.INT64)) .setHidden(false), - new Field() + new SearchField() .setName("Strings") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setHidden(false) )); diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchSyncTests.java index 11ab114741f3..ab70cd45b131 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchSyncTests.java @@ -6,15 +6,15 @@ import com.azure.core.util.Context; import com.azure.core.util.CoreUtils; import com.azure.search.documents.models.CoordinateSystem; -import com.azure.search.documents.models.DataType; import com.azure.search.documents.models.FacetResult; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.GeoPoint; -import com.azure.search.documents.models.Index; import com.azure.search.documents.models.QueryType; import com.azure.search.documents.models.RangeFacetResult; import com.azure.search.documents.models.RequestOptions; import com.azure.search.documents.models.ScoringParameter; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; import com.azure.search.documents.models.SearchMode; import com.azure.search.documents.models.SearchOptions; import com.azure.search.documents.models.SearchResult; @@ -759,7 +759,7 @@ public void canSearchWithSynonyms() { .setSynonyms("luxury,fancy")).getName(); // Attach index field to SynonymMap - Index hotelsIndex = searchServiceClient.getIndex(client.getIndexName()); + SearchIndex hotelsIndex = searchServiceClient.getIndex(client.getIndexName()); hotelsIndex.getFields().stream() .filter(f -> fieldName.equals(f.getName())) .findFirst().get().setSynonymMaps(Collections.singletonList(synonymMapToDelete)); @@ -990,61 +990,61 @@ void assertListEqualHotelIds(List expected, List actual) { } String createIndexWithNonNullableTypes() { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName("non-nullable-index") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Key") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setHidden(false) .setKey(true), - new Field() + new SearchField() .setName("Rating") .setHidden(false) - .setType(DataType.EDM_INT32), - new Field() + .setType(SearchFieldDataType.INT32), + new SearchField() .setName("Count") .setHidden(false) - .setType(DataType.EDM_INT64), - new Field() + .setType(SearchFieldDataType.INT64), + new SearchField() .setName("IsEnabled") .setHidden(false) - .setType(DataType.EDM_BOOLEAN), - new Field() + .setType(SearchFieldDataType.BOOLEAN), + new SearchField() .setName("Ratio") .setHidden(false) - .setType(DataType.EDM_DOUBLE), - new Field() + .setType(SearchFieldDataType.DOUBLE), + new SearchField() .setName("StartDate") .setHidden(false) - .setType(DataType.EDM_DATE_TIME_OFFSET), - new Field() + .setType(SearchFieldDataType.DATE_TIME_OFFSET), + new SearchField() .setName("EndDate") .setHidden(false) - .setType(DataType.EDM_DATE_TIME_OFFSET), - new Field() + .setType(SearchFieldDataType.DATE_TIME_OFFSET), + new SearchField() .setName("TopLevelBucket") - .setType(DataType.EDM_COMPLEX_TYPE) + .setType(SearchFieldDataType.COMPLEX) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("BucketName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setFilterable(true), - new Field() + new SearchField() .setName("Count") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(true))), - new Field() + new SearchField() .setName("Buckets") - .setType(DataType.collection(DataType.EDM_COMPLEX_TYPE)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.COMPLEX)) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("BucketName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setFilterable(true), - new Field() + new SearchField() .setName("Count") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(true))))); setupIndex(index); @@ -1053,29 +1053,29 @@ String createIndexWithNonNullableTypes() { } String createIndexWithValueTypes() { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName("testindex") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Key") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true) .setSearchable(true), - new Field() + new SearchField() .setName("IntValue") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(true), - new Field() + new SearchField() .setName("Bucket") - .setType(DataType.EDM_COMPLEX_TYPE) + .setType(SearchFieldDataType.COMPLEX) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("BucketName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setFilterable(true), - new Field() + new SearchField() .setName("Count") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(true) )) ) diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchTestBase.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchTestBase.java index be3123381532..afa07509b9c6 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchTestBase.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SearchTestBase.java @@ -9,23 +9,23 @@ import com.azure.core.http.policy.RetryPolicy; import com.azure.core.test.TestBase; import com.azure.core.util.Configuration; -import com.azure.search.documents.models.AnalyzerName; import com.azure.search.documents.models.CorsOptions; import com.azure.search.documents.models.DataChangeDetectionPolicy; import com.azure.search.documents.models.DataDeletionDetectionPolicy; -import com.azure.search.documents.models.DataSource; -import com.azure.search.documents.models.DataType; import com.azure.search.documents.models.DistanceScoringFunction; import com.azure.search.documents.models.DistanceScoringParameters; -import com.azure.search.documents.models.Field; import com.azure.search.documents.models.FreshnessScoringFunction; import com.azure.search.documents.models.FreshnessScoringParameters; -import com.azure.search.documents.models.Index; +import com.azure.search.documents.models.LexicalAnalyzerName; import com.azure.search.documents.models.MagnitudeScoringFunction; import com.azure.search.documents.models.MagnitudeScoringParameters; import com.azure.search.documents.models.ScoringFunctionAggregation; import com.azure.search.documents.models.ScoringFunctionInterpolation; import com.azure.search.documents.models.ScoringProfile; +import com.azure.search.documents.models.SearchField; +import com.azure.search.documents.models.SearchFieldDataType; +import com.azure.search.documents.models.SearchIndex; +import com.azure.search.documents.models.SearchIndexerDataSource; import com.azure.search.documents.models.SoftDeleteColumnDeletionDetectionPolicy; import com.azure.search.documents.models.Suggester; import com.azure.search.documents.models.TagScoringFunction; @@ -82,13 +82,13 @@ protected String setupIndexFromJsonFile(String jsonFile) { Reader indexData = new InputStreamReader(Objects.requireNonNull(getClass().getClassLoader() .getResourceAsStream(jsonFile))); - return setupIndex(new ObjectMapper().readValue(indexData, Index.class)); + return setupIndex(new ObjectMapper().readValue(indexData, SearchIndex.class)); } catch (IOException ex) { throw new UncheckedIOException(ex); } } - protected String setupIndex(Index index) { + protected String setupIndex(SearchIndex index) { index.setName(testResourceNamer.randomName(index.getName(), 64)); getSearchServiceClientBuilder().buildClient().createOrUpdateIndex(index); @@ -146,126 +146,126 @@ protected SearchIndexClientBuilder getSearchIndexClientBuilder(String indexName) return builder; } - protected Index createTestIndex() { + protected SearchIndex createTestIndex() { Map weights = new HashMap<>(); weights.put("Description", 1.5); weights.put("Category", 2.0); - return new Index() + return new SearchIndex() .setName(randomIndexName(HOTEL_INDEX_NAME)) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("HotelId") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("HotelName") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Description") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) - .setAnalyzer(AnalyzerName.EN_LUCENE) + .setAnalyzer(LexicalAnalyzerName.EN_LUCENE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("DescriptionFr") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) - .setAnalyzer(AnalyzerName.FR_LUCENE) + .setAnalyzer(LexicalAnalyzerName.FR_LUCENE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Description_Custom") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) - .setSearchAnalyzer(AnalyzerName.STOP) - .setIndexAnalyzer(AnalyzerName.STOP) + .setSearchAnalyzer(LexicalAnalyzerName.STOP) + .setIndexAnalyzer(LexicalAnalyzerName.STOP) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Category") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Tags") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("ParkingIncluded") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SmokingAllowed") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("LastRenovationDate") - .setType(DataType.EDM_DATE_TIME_OFFSET) + .setType(SearchFieldDataType.DATE_TIME_OFFSET) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Rating") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Address") - .setType(DataType.EDM_COMPLEX_TYPE) + .setType(SearchFieldDataType.COMPLEX) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("StreetAddress") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("City") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("StateProvince") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Country") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("PostalCode") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) @@ -273,78 +273,78 @@ protected Index createTestIndex() { .setHidden(Boolean.FALSE) ) ), - new Field() + new SearchField() .setName("Location") - .setType(DataType.EDM_GEOGRAPHY_POINT) + .setType(SearchFieldDataType.GEOGRAPHY_POINT) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Rooms") - .setType(DataType.collection(DataType.EDM_COMPLEX_TYPE)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.COMPLEX)) .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Description") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) - .setAnalyzer(AnalyzerName.EN_LUCENE), - new Field() + .setAnalyzer(LexicalAnalyzerName.EN_LUCENE), + new SearchField() .setName("DescriptionFr") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) - .setAnalyzer(AnalyzerName.FR_LUCENE) + .setAnalyzer(LexicalAnalyzerName.FR_LUCENE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Type") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("BaseRate") - .setType(DataType.EDM_DOUBLE) + .setType(SearchFieldDataType.DOUBLE) .setKey(Boolean.FALSE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("BedOptions") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SleepsCount") - .setType(DataType.EDM_INT32) + .setType(SearchFieldDataType.INT32) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("SmokingAllowed") - .setType(DataType.EDM_BOOLEAN) + .setType(SearchFieldDataType.BOOLEAN) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE), - new Field() + new SearchField() .setName("Tags") - .setType(DataType.collection(DataType.EDM_STRING)) + .setType(SearchFieldDataType.collection(SearchFieldDataType.STRING)) .setSearchable(Boolean.TRUE) .setFilterable(Boolean.TRUE) .setFacetable(Boolean.TRUE) .setHidden(Boolean.FALSE) ) ), - new Field() + new SearchField() .setName("TotalGuests") - .setType(DataType.EDM_INT64) + .setType(SearchFieldDataType.INT64) .setFilterable(Boolean.TRUE) .setSortable(Boolean.TRUE) .setFacetable(Boolean.TRUE), - new Field() + new SearchField() .setName("ProfitMargin") - .setType(DataType.EDM_DOUBLE) + .setType(SearchFieldDataType.DOUBLE) ) ) .setScoringProfiles(Arrays.asList( @@ -422,25 +422,25 @@ protected Index createTestIndex() { .setSourceFields(Collections.singletonList("HotelName")))); } - protected DataSource createTestSqlDataSourceObject() { + protected SearchIndexerDataSource createTestSqlDataSourceObject() { return createTestSqlDataSourceObject(null, null); } - protected DataSource createTestSqlDataSourceObject(DataDeletionDetectionPolicy dataDeletionDetectionPolicy, - DataChangeDetectionPolicy dataChangeDetectionPolicy) { - return DataSources.createFromAzureSql(testResourceNamer.randomName(SQL_DATASOURCE_NAME, 32), + protected SearchIndexerDataSource createTestSqlDataSourceObject( + DataDeletionDetectionPolicy dataDeletionDetectionPolicy, DataChangeDetectionPolicy dataChangeDetectionPolicy) { + return SearchIndexerDataSources.createFromAzureSql(testResourceNamer.randomName(SQL_DATASOURCE_NAME, 32), AZURE_SQL_CONN_STRING_READONLY_PLAYGROUND, "GeoNamesRI", FAKE_DESCRIPTION, dataChangeDetectionPolicy, dataDeletionDetectionPolicy); } - protected DataSource createBlobDataSource() { + protected SearchIndexerDataSource createBlobDataSource() { String storageConnectionString = Configuration.getGlobalConfiguration() .get("SEARCH_STORAGE_CONNECTION_STRING", "connectionString"); String blobContainerName = Configuration.getGlobalConfiguration() .get("SEARCH_STORAGE_CONTAINER_NAME", "container"); // create the new data source object for this storage account and container - return DataSources.createFromAzureBlobStorage(testResourceNamer.randomName(BLOB_DATASOURCE_NAME, 32), + return SearchIndexerDataSources.createFromAzureBlobStorage(testResourceNamer.randomName(BLOB_DATASOURCE_NAME, 32), storageConnectionString, blobContainerName, "/", "real live blob", new SoftDeleteColumnDeletionDetectionPolicy() .setSoftDeleteColumnName("fieldName") diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SkillsetManagementSyncTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SkillsetManagementSyncTests.java index 38c92c518c69..f438bc2dc09e 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SkillsetManagementSyncTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/SkillsetManagementSyncTests.java @@ -24,11 +24,11 @@ import com.azure.search.documents.models.OcrSkillLanguage; import com.azure.search.documents.models.OutputFieldMappingEntry; import com.azure.search.documents.models.SearchErrorException; +import com.azure.search.documents.models.SearchIndexerSkill; +import com.azure.search.documents.models.SearchIndexerSkillset; import com.azure.search.documents.models.SentimentSkill; import com.azure.search.documents.models.SentimentSkillLanguage; import com.azure.search.documents.models.ShaperSkill; -import com.azure.search.documents.models.Skill; -import com.azure.search.documents.models.Skillset; import com.azure.search.documents.models.SplitSkill; import com.azure.search.documents.models.SplitSkillLanguage; import com.azure.search.documents.models.TextExtractionAlgorithm; @@ -81,8 +81,8 @@ protected void afterTest() { @Test public void createSkillsetReturnsCorrectDefinitionImageAnalysisKeyPhrase() { - Skillset expectedSkillset = createTestSkillsetImageAnalysisKeyPhrase(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createTestSkillsetImageAnalysisKeyPhrase(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -90,8 +90,8 @@ public void createSkillsetReturnsCorrectDefinitionImageAnalysisKeyPhrase() { @Test public void createSkillsetReturnsCorrectDefinitionImageAnalysisKeyPhraseWithResponse() { - Skillset expectedSkillset = createTestSkillsetImageAnalysisKeyPhrase(); - Response skillsetResponse = client.createSkillsetWithResponse(expectedSkillset, + SearchIndexerSkillset expectedSkillset = createTestSkillsetImageAnalysisKeyPhrase(); + Response skillsetResponse = client.createSkillsetWithResponse(expectedSkillset, generateRequestOptions(), Context.NONE); skillsetsToDelete.add(skillsetResponse.getValue().getName()); @@ -100,8 +100,8 @@ public void createSkillsetReturnsCorrectDefinitionImageAnalysisKeyPhraseWithResp @Test public void createSkillsetReturnsCorrectDefinitionLanguageDetection() { - Skillset expectedSkillset = createTestSkillsetLanguageDetection(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createTestSkillsetLanguageDetection(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -109,8 +109,8 @@ public void createSkillsetReturnsCorrectDefinitionLanguageDetection() { @Test public void createSkillsetReturnsCorrectDefinitionMergeText() { - Skillset expectedSkillset = createTestSkillsetMergeText(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createTestSkillsetMergeText(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -118,8 +118,8 @@ public void createSkillsetReturnsCorrectDefinitionMergeText() { @Test public void createSkillsetReturnsCorrectDefinitionOcrEntity() { - Skillset expectedSkillset = createTestSkillsetOcrEntity(null, null); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createTestSkillsetOcrEntity(null, null); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -134,9 +134,9 @@ public void createSkillsetReturnsCorrectDefinitionOcrEntity() { @Test public void createSkillsetReturnsCorrectDefinitionOcrHandwritingSentiment() { - Skillset expectedSkillset = createTestSkillsetOcrSentiment(OcrSkillLanguage.PT, + SearchIndexerSkillset expectedSkillset = createTestSkillsetOcrSentiment(OcrSkillLanguage.PT, SentimentSkillLanguage.PT_PT, TextExtractionAlgorithm.PRINTED); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -155,9 +155,9 @@ public void createSkillsetReturnsCorrectDefinitionOcrHandwritingSentiment() { @Test public void createSkillsetReturnsCorrectDefinitionOcrKeyPhrase() { - Skillset expectedSkillset = createTestSkillsetOcrKeyPhrase(OcrSkillLanguage.EN, + SearchIndexerSkillset expectedSkillset = createTestSkillsetOcrKeyPhrase(OcrSkillLanguage.EN, KeyPhraseExtractionSkillLanguage.EN); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -174,8 +174,8 @@ public void createSkillsetReturnsCorrectDefinitionOcrKeyPhrase() { @Test public void createSkillsetReturnsCorrectDefinitionOcrShaper() { - Skillset expectedSkillset = createTestSkillsetOcrShaper(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createTestSkillsetOcrShaper(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -183,9 +183,9 @@ public void createSkillsetReturnsCorrectDefinitionOcrShaper() { @Test public void createSkillsetReturnsCorrectDefinitionOcrSplitText() { - Skillset expectedSkillset = createTestSkillsetOcrSplitText(OcrSkillLanguage.EN, + SearchIndexerSkillset expectedSkillset = createTestSkillsetOcrSplitText(OcrSkillLanguage.EN, SplitSkillLanguage.EN, TextSplitMode.PAGES); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -211,8 +211,8 @@ public void createSkillsetReturnsCorrectDefinitionOcrSplitText() { @Test public void createSkillsetReturnsCorrectDefinitionWithCognitiveServicesDefault() { - Skillset expectedSkillset = createSkillsetWithCognitiveServicesKey(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithCognitiveServicesKey(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -220,8 +220,8 @@ public void createSkillsetReturnsCorrectDefinitionWithCognitiveServicesDefault() @Test public void createSkillsetReturnsCorrectDefinitionWithOcrDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithOcrDefaultSettings(false); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -229,8 +229,8 @@ public void createSkillsetReturnsCorrectDefinitionWithOcrDefaultSettings() { @Test public void createSkillsetReturnsCorrectDefinitionWithImageAnalysisDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithImageAnalysisDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithImageAnalysisDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -238,8 +238,8 @@ public void createSkillsetReturnsCorrectDefinitionWithImageAnalysisDefaultSettin @Test public void createSkillsetReturnsCorrectDefinitionWithKeyPhraseExtractionDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithKeyPhraseExtractionDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithKeyPhraseExtractionDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -247,8 +247,8 @@ public void createSkillsetReturnsCorrectDefinitionWithKeyPhraseExtractionDefault @Test public void createSkillsetReturnsCorrectDefinitionWithMergeDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithMergeDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithMergeDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -256,8 +256,8 @@ public void createSkillsetReturnsCorrectDefinitionWithMergeDefaultSettings() { @Test public void createSkillsetReturnsCorrectDefinitionWithEntityRecognitionDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithEntityRecognitionDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithEntityRecognitionDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -265,39 +265,39 @@ public void createSkillsetReturnsCorrectDefinitionWithEntityRecognitionDefaultSe @Test public void getOcrSkillsetReturnsCorrectDefinition() { - Skillset expected = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset expected = createSkillsetWithOcrDefaultSettings(false); client.createSkillset(expected); skillsetsToDelete.add(expected.getName()); - Skillset actual = client.getSkillset(expected.getName()); + SearchIndexerSkillset actual = client.getSkillset(expected.getName()); assertObjectEquals(expected, actual, true, "etag"); } @Test public void getOcrSkillsetReturnsCorrectDefinitionWithResponse() { - Skillset expected = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset expected = createSkillsetWithOcrDefaultSettings(false); client.createSkillset(expected); skillsetsToDelete.add(expected.getName()); - Skillset actual = client.getSkillsetWithResponse(expected.getName(), generateRequestOptions(), Context.NONE) + SearchIndexerSkillset actual = client.getSkillsetWithResponse(expected.getName(), generateRequestOptions(), Context.NONE) .getValue(); assertObjectEquals(expected, actual, true, "etag"); } @Test public void getOcrSkillsetWithShouldDetectOrientationReturnsCorrectDefinition() { - Skillset expected = createSkillsetWithOcrDefaultSettings(true); + SearchIndexerSkillset expected = createSkillsetWithOcrDefaultSettings(true); client.createSkillset(expected); skillsetsToDelete.add(expected.getName()); - Skillset actual = client.getSkillset(expected.getName()); + SearchIndexerSkillset actual = client.getSkillset(expected.getName()); assertObjectEquals(expected, actual, true, "etag"); } @Test public void createSkillsetReturnsCorrectDefinitionWithSentimentDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithSentimentDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithSentimentDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -305,8 +305,8 @@ public void createSkillsetReturnsCorrectDefinitionWithSentimentDefaultSettings() @Test public void createSkillsetReturnsCorrectDefinitionWithSplitDefaultSettings() { - Skillset expectedSkillset = createSkillsetWithSplitDefaultSettings(); - Skillset actualSkillset = client.createSkillset(expectedSkillset); + SearchIndexerSkillset expectedSkillset = createSkillsetWithSplitDefaultSettings(); + SearchIndexerSkillset actualSkillset = client.createSkillset(expectedSkillset); skillsetsToDelete.add(actualSkillset.getName()); assertObjectEquals(expectedSkillset, actualSkillset, true, "etag"); @@ -314,8 +314,8 @@ public void createSkillsetReturnsCorrectDefinitionWithSplitDefaultSettings() { @Test public void createCustomSkillsetReturnsCorrectDefinition() { - Skillset expected = createSkillsetWithCustomSkills(); - Skillset actual = client.createSkillset(expected); + SearchIndexerSkillset expected = createSkillsetWithCustomSkills(); + SearchIndexerSkillset actual = client.createSkillset(expected); skillsetsToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -332,16 +332,16 @@ public void getSkillsetThrowsOnNotFound() { @Test public void canCreateAndListSkillsets() { - Skillset skillset1 = createSkillsetWithCognitiveServicesKey(); - Skillset skillset2 = createSkillsetWithEntityRecognitionDefaultSettings(); + SearchIndexerSkillset skillset1 = createSkillsetWithCognitiveServicesKey(); + SearchIndexerSkillset skillset2 = createSkillsetWithEntityRecognitionDefaultSettings(); client.createSkillset(skillset1); skillsetsToDelete.add(skillset1.getName()); client.createSkillset(skillset2); skillsetsToDelete.add(skillset2.getName()); - PagedIterable actual = client.listSkillsets(); - List result = actual.stream().collect(Collectors.toList()); + PagedIterable actual = client.listSkillsets(); + List result = actual.stream().collect(Collectors.toList()); assertEquals(2, result.size()); assertEquals(skillset1.getName(), result.get(0).getName()); @@ -350,16 +350,16 @@ public void canCreateAndListSkillsets() { @Test public void canListSkillsetsWithSelectedField() { - Skillset skillset1 = createSkillsetWithCognitiveServicesKey(); - Skillset skillset2 = createSkillsetWithEntityRecognitionDefaultSettings(); + SearchIndexerSkillset skillset1 = createSkillsetWithCognitiveServicesKey(); + SearchIndexerSkillset skillset2 = createSkillsetWithEntityRecognitionDefaultSettings(); client.createSkillset(skillset1); skillsetsToDelete.add(skillset1.getName()); client.createSkillset(skillset2); skillsetsToDelete.add(skillset2.getName()); - PagedIterable selectedFieldListResponse = client.listSkillsets("name", generateRequestOptions(), Context.NONE); - List result = selectedFieldListResponse.stream().collect(Collectors.toList()); + PagedIterable selectedFieldListResponse = client.listSkillsets("name", generateRequestOptions(), Context.NONE); + List result = selectedFieldListResponse.stream().collect(Collectors.toList()); result.forEach(res -> { assertNotNull(res.getName()); @@ -376,7 +376,7 @@ public void canListSkillsetsWithSelectedField() { @Test public void deleteSkillsetIsIdempotent() { - Skillset skillset = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset skillset = createSkillsetWithOcrDefaultSettings(false); Response deleteResponse = client.deleteSkillsetWithResponse(skillset, false, generateRequestOptions(), Context.NONE); @@ -394,7 +394,7 @@ public void deleteSkillsetIsIdempotent() { @Test public void canCreateAndDeleteSkillset() { - Skillset expected = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset expected = createSkillsetWithOcrDefaultSettings(false); client.createSkillset(expected); client.deleteSkillset(expected.getName()); @@ -403,8 +403,8 @@ public void canCreateAndDeleteSkillset() { @Test public void createOrUpdateCreatesWhenSkillsetDoesNotExist() { - Skillset expected = createTestOcrSkillSet(1, TextExtractionAlgorithm.PRINTED); - Skillset actual = client.createOrUpdateSkillset(expected); + SearchIndexerSkillset expected = createTestOcrSkillSet(1, TextExtractionAlgorithm.PRINTED); + SearchIndexerSkillset actual = client.createOrUpdateSkillset(expected); skillsetsToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -412,8 +412,8 @@ public void createOrUpdateCreatesWhenSkillsetDoesNotExist() { @Test public void createOrUpdateCreatesWhenSkillsetDoesNotExistWithResponse() { - Skillset expected = createTestOcrSkillSet(1, TextExtractionAlgorithm.PRINTED); - Response createOrUpdateResponse = client.createOrUpdateSkillsetWithResponse(expected, + SearchIndexerSkillset expected = createTestOcrSkillSet(1, TextExtractionAlgorithm.PRINTED); + Response createOrUpdateResponse = client.createOrUpdateSkillsetWithResponse(expected, false, generateRequestOptions(), Context.NONE); skillsetsToDelete.add(createOrUpdateResponse.getValue().getName()); @@ -422,8 +422,8 @@ public void createOrUpdateCreatesWhenSkillsetDoesNotExistWithResponse() { @Test public void createOrUpdateUpdatesWhenSkillsetExists() { - Skillset skillset = createTestOcrSkillSet(1, TextExtractionAlgorithm.HANDWRITTEN); - Response createOrUpdateResponse = client.createOrUpdateSkillsetWithResponse(skillset, false, + SearchIndexerSkillset skillset = createTestOcrSkillSet(1, TextExtractionAlgorithm.HANDWRITTEN); + Response createOrUpdateResponse = client.createOrUpdateSkillsetWithResponse(skillset, false, generateRequestOptions(), Context.NONE); skillsetsToDelete.add(createOrUpdateResponse.getValue().getName()); assertEquals(HttpURLConnection.HTTP_CREATED, createOrUpdateResponse.getStatusCode()); @@ -436,8 +436,8 @@ public void createOrUpdateUpdatesWhenSkillsetExists() { @Test public void createOrUpdateUpdatesSkills() { - Skillset skillset = createSkillsetWithOcrDefaultSettings(false); - Skillset createdSkillset = client.createSkillset(skillset); + SearchIndexerSkillset skillset = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset createdSkillset = client.createSkillset(skillset); skillsetsToDelete.add(createdSkillset.getName()); // update skills @@ -449,8 +449,8 @@ public void createOrUpdateUpdatesSkills() { @Test public void createOrUpdateUpdatesCognitiveService() { - Skillset skillset = createSkillsetWithOcrDefaultSettings(false); - Skillset createdSkillset = client.createSkillset(skillset); + SearchIndexerSkillset skillset = createSkillsetWithOcrDefaultSettings(false); + SearchIndexerSkillset createdSkillset = client.createSkillset(skillset); skillsetsToDelete.add(createdSkillset.getName()); // update skills @@ -462,8 +462,8 @@ public void createOrUpdateUpdatesCognitiveService() { @Test public void createSkillsetReturnsCorrectDefinitionShaperWithNestedInputs() { - Skillset expected = createSkillsetWithSharperSkillWithNestedInputs(); - Skillset actual = client.createSkillset(expected); + SearchIndexerSkillset expected = createSkillsetWithSharperSkillWithNestedInputs(); + SearchIndexerSkillset actual = client.createSkillset(expected); skillsetsToDelete.add(actual.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -475,7 +475,7 @@ public void createSkillsetThrowsExceptionWithNonShaperSkillWithNestedInputs() { List inputs = this.createNestedInputFieldMappingEntry(); List outputs = this.createOutputFieldMappingEntry(); - List skills = new ArrayList<>(); + List skills = new ArrayList<>(); // Used for testing skill that shouldn't allow nested inputs skills.add(new WebApiSkill().setUri("https://contoso.example.org") .setDescription("Invalid skill with nested inputs") @@ -483,7 +483,7 @@ public void createSkillsetThrowsExceptionWithNonShaperSkillWithNestedInputs() { .setInputs(inputs) .setOutputs(outputs)); - Skillset skillset = new Skillset() + SearchIndexerSkillset skillset = new SearchIndexerSkillset() .setName("nested-skillset-with-nonsharperskill") .setDescription("Skillset for testing") .setSkills(skills); @@ -496,8 +496,8 @@ public void createSkillsetThrowsExceptionWithNonShaperSkillWithNestedInputs() { @Test public void createSkillsetReturnsCorrectDefinitionConditional() { - Skillset expected = createTestSkillsetConditional(); - Skillset actual = client.createSkillset(expected); + SearchIndexerSkillset expected = createTestSkillsetConditional(); + SearchIndexerSkillset actual = client.createSkillset(expected); skillsetsToDelete.add(expected.getName()); assertObjectEquals(expected, actual, true, "etag"); @@ -505,7 +505,7 @@ public void createSkillsetReturnsCorrectDefinitionConditional() { @Test public void createOrUpdateSkillsetIfNotExistsSucceedsOnNoResource() { - Skillset created = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), true, + SearchIndexerSkillset created = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), true, null, Context.NONE).getValue(); skillsetsToDelete.add(created.getName()); @@ -514,12 +514,12 @@ public void createOrUpdateSkillsetIfNotExistsSucceedsOnNoResource() { @Test public void createOrUpdateSkillsetIfExistsSucceedsOnExistingResource() { - Skillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), + SearchIndexerSkillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), false, null, Context.NONE).getValue(); String originalETag = original.getETag(); skillsetsToDelete.add(original.getName()); - Skillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), false, null, + SearchIndexerSkillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), false, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -529,12 +529,12 @@ public void createOrUpdateSkillsetIfExistsSucceedsOnExistingResource() { @Test public void createOrUpdateSkillsetIfNotChangedSucceedsWhenResourceUnchanged() { - Skillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), + SearchIndexerSkillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), false, null, Context.NONE).getValue(); String originalETag = original.getETag(); skillsetsToDelete.add(original.getName()); - Skillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), true, null, + SearchIndexerSkillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), true, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -545,12 +545,12 @@ public void createOrUpdateSkillsetIfNotChangedSucceedsWhenResourceUnchanged() { @Test public void createOrUpdateSkillsetIfNotChangedFailsWhenResourceChanged() { - Skillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), + SearchIndexerSkillset original = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), false, null, Context.NONE).getValue(); String originalETag = original.getETag(); skillsetsToDelete.add(original.getName()); - Skillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), true, null, + SearchIndexerSkillset updated = client.createOrUpdateSkillsetWithResponse(mutateSkillsInSkillset(original), true, null, Context.NONE).getValue(); String updatedETag = updated.getETag(); @@ -569,10 +569,10 @@ public void createOrUpdateSkillsetIfNotChangedFailsWhenResourceChanged() { @Test public void deleteSkillsetIfNotChangedWorksOnlyOnCurrentResource() { - Skillset stale = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), + SearchIndexerSkillset stale = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), true, null, Context.NONE).getValue(); - Skillset current = client.createOrUpdateSkillsetWithResponse(stale, true, null, Context.NONE) + SearchIndexerSkillset current = client.createOrUpdateSkillsetWithResponse(stale, true, null, Context.NONE) .getValue(); try { @@ -587,7 +587,7 @@ public void deleteSkillsetIfNotChangedWorksOnlyOnCurrentResource() { @Test public void deleteSkillsetIfExistsWorksOnlyWhenResourceExists() { - Skillset skillset = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), + SearchIndexerSkillset skillset = client.createOrUpdateSkillsetWithResponse(createSkillsetWithOcrDefaultSettings(false), false, null, Context.NONE).getValue(); client.deleteSkillsetWithResponse(skillset, true, null, Context.NONE); @@ -608,8 +608,8 @@ private OutputFieldMappingEntry createOutputFieldMappingEntry(String name, Strin return new OutputFieldMappingEntry().setName(name).setTargetName(targetName); } - Skillset createTestSkillsetImageAnalysisKeyPhrase() { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetImageAnalysisKeyPhrase() { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), @@ -639,20 +639,20 @@ Skillset createTestSkillsetImageAnalysisKeyPhrase() { .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("image-analysis-key-phrase-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestSkillsetLanguageDetection() { + SearchIndexerSkillset createTestSkillsetLanguageDetection() { List inputs = Collections .singletonList(simpleInputFieldMappingEntry("text", "/document/text")); List outputs = Collections .singletonList(createOutputFieldMappingEntry("languageCode", "myLanguageCode")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new LanguageDetectionSkill() .setName("mylanguage") .setDescription("Tested Language Detection skill") @@ -660,13 +660,13 @@ Skillset createTestSkillsetLanguageDetection() { .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("language-detection-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestSkillsetMergeText() { + SearchIndexerSkillset createTestSkillsetMergeText() { List inputs = Arrays.asList( simpleInputFieldMappingEntry("text", "/document/text"), simpleInputFieldMappingEntry("itemsToInsert", "/document/textitems"), @@ -675,7 +675,7 @@ Skillset createTestSkillsetMergeText() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("mergedText", "myMergedText")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new MergeSkill() .setInsertPostTag("__e") .setInsertPreTag("__") @@ -685,14 +685,14 @@ Skillset createTestSkillsetMergeText() { .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("merge-text-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestSkillsetOcrShaper() { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetOcrShaper() { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -718,13 +718,13 @@ Skillset createTestSkillsetOcrShaper() { .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("ocr-shaper-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createSkillsetWithCognitiveServicesKey() { + SearchIndexerSkillset createSkillsetWithCognitiveServicesKey() { List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -732,7 +732,7 @@ Skillset createSkillsetWithCognitiveServicesKey() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("text", "mytext")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new OcrSkill() .setTextExtractionAlgorithm(TextExtractionAlgorithm.PRINTED) .setDefaultLanguageCode(OcrSkillLanguage.EN) @@ -743,14 +743,14 @@ Skillset createSkillsetWithCognitiveServicesKey() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("cognitive-services-key-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills) .setCognitiveServicesAccount(new DefaultCognitiveServicesAccount()); } - Skillset createTestSkillsetConditional() { + SearchIndexerSkillset createTestSkillsetConditional() { List inputs = Arrays.asList( simpleInputFieldMappingEntry("condition", "= $(/document/language) == null"), simpleInputFieldMappingEntry("whenTrue", "= 'es'"), @@ -759,7 +759,7 @@ Skillset createTestSkillsetConditional() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("output", "myLanguageCode")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new ConditionalSkill() .setName("myconditional") .setDescription("Tested Conditional skill") @@ -768,13 +768,13 @@ Skillset createTestSkillsetConditional() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("conditional-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset mutateSkillsInSkillset(Skillset skillset) { + SearchIndexerSkillset mutateSkillsInSkillset(SearchIndexerSkillset skillset) { return skillset.setSkills(Collections.singletonList( new KeyPhraseExtractionSkill() .setDefaultLanguageCode(KeyPhraseExtractionSkillLanguage.EN) @@ -787,8 +787,8 @@ Skillset mutateSkillsInSkillset(Skillset skillset) { )); } - Skillset createTestSkillsetOcrEntity(TextExtractionAlgorithm algorithm, List categories) { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetOcrEntity(TextExtractionAlgorithm algorithm, List categories) { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -818,14 +818,15 @@ Skillset createTestSkillsetOcrEntity(TextExtractionAlgorithm algorithm, List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetOcrSentiment(OcrSkillLanguage ocrLanguageCode, + SentimentSkillLanguage sentimentLanguageCode, TextExtractionAlgorithm algorithm) { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -851,14 +852,14 @@ Skillset createTestSkillsetOcrSentiment(OcrSkillLanguage ocrLanguageCode, Sentim .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("ocr-sentiment-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestSkillsetOcrKeyPhrase(OcrSkillLanguage ocrLanguageCode, KeyPhraseExtractionSkillLanguage keyPhraseLanguageCode) { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetOcrKeyPhrase(OcrSkillLanguage ocrLanguageCode, KeyPhraseExtractionSkillLanguage keyPhraseLanguageCode) { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -885,14 +886,15 @@ Skillset createTestSkillsetOcrKeyPhrase(OcrSkillLanguage ocrLanguageCode, KeyPhr .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("ocr-key-phrase-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestSkillsetOcrSplitText(OcrSkillLanguage ocrLanguageCode, SplitSkillLanguage splitLanguageCode, TextSplitMode textSplitMode) { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestSkillsetOcrSplitText(OcrSkillLanguage ocrLanguageCode, + SplitSkillLanguage splitLanguageCode, TextSplitMode textSplitMode) { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -920,14 +922,14 @@ Skillset createTestSkillsetOcrSplitText(OcrSkillLanguage ocrLanguageCode, SplitS .setInputs(inputs) .setOutputs(outputs)); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("ocr-split-text-skillset", 48)) .setDescription("Skillset for testing") .setSkills(skills); } - Skillset createTestOcrSkillSet(int repeat, TextExtractionAlgorithm algorithm) { - List skills = new ArrayList<>(); + SearchIndexerSkillset createTestOcrSkillSet(int repeat, TextExtractionAlgorithm algorithm) { + List skills = new ArrayList<>(); List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), @@ -948,13 +950,13 @@ Skillset createTestOcrSkillSet(int repeat, TextExtractionAlgorithm algorithm) { .setOutputs(outputs)); } - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("testskillset", 48)) .setDescription("Skillset for testing OCR") .setSkills(skills); } - Skillset createSkillsetWithOcrDefaultSettings(Boolean shouldDetectOrientation) { + SearchIndexerSkillset createSkillsetWithOcrDefaultSettings(Boolean shouldDetectOrientation) { List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -962,7 +964,7 @@ Skillset createSkillsetWithOcrDefaultSettings(Boolean shouldDetectOrientation) { List outputs = Collections .singletonList(createOutputFieldMappingEntry("text", "mytext")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new OcrSkill() .setShouldDetectOrientation(shouldDetectOrientation) .setName("myocr") @@ -972,13 +974,13 @@ Skillset createSkillsetWithOcrDefaultSettings(Boolean shouldDetectOrientation) { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName(SkillsetManagementSyncTests.OCR_SKILLSET_NAME, 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithImageAnalysisDefaultSettings() { + SearchIndexerSkillset createSkillsetWithImageAnalysisDefaultSettings() { List inputs = Arrays.asList( simpleInputFieldMappingEntry("url", "/document/url"), simpleInputFieldMappingEntry("queryString", "/document/queryString")); @@ -986,7 +988,7 @@ Skillset createSkillsetWithImageAnalysisDefaultSettings() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("description", "mydescription")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new ImageAnalysisSkill() .setName("myimage") .setDescription("Tested image analysis skill") @@ -995,20 +997,20 @@ Skillset createSkillsetWithImageAnalysisDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("image-analysis-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithKeyPhraseExtractionDefaultSettings() { + SearchIndexerSkillset createSkillsetWithKeyPhraseExtractionDefaultSettings() { List inputs = Collections .singletonList(simpleInputFieldMappingEntry("text", "/document/myText")); List outputs = Collections .singletonList(createOutputFieldMappingEntry("keyPhrases", "myKeyPhrases")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new KeyPhraseExtractionSkill() .setName("mykeyphrases") .setDescription("Tested Key Phrase skill") @@ -1017,13 +1019,13 @@ Skillset createSkillsetWithKeyPhraseExtractionDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("key-phrase-extraction-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithMergeDefaultSettings() { + SearchIndexerSkillset createSkillsetWithMergeDefaultSettings() { List inputs = Arrays.asList( simpleInputFieldMappingEntry("text", "/document/text"), simpleInputFieldMappingEntry("itemsToInsert", "/document/textitems"), @@ -1032,7 +1034,7 @@ Skillset createSkillsetWithMergeDefaultSettings() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("mergedText", "myMergedText")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new MergeSkill() .setName("mymerge") .setDescription("Tested Merged Text skill") @@ -1041,20 +1043,20 @@ Skillset createSkillsetWithMergeDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("merge-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithSentimentDefaultSettings() { + SearchIndexerSkillset createSkillsetWithSentimentDefaultSettings() { List inputs = Collections .singletonList(simpleInputFieldMappingEntry("text", "/document/mytext")); List outputs = Collections .singletonList(createOutputFieldMappingEntry("score", "mySentiment")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new SentimentSkill() .setName("mysentiment") .setDescription("Tested Sentiment skill") @@ -1063,20 +1065,20 @@ Skillset createSkillsetWithSentimentDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("sentiment-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithEntityRecognitionDefaultSettings() { + SearchIndexerSkillset createSkillsetWithEntityRecognitionDefaultSettings() { List inputs = Collections .singletonList(simpleInputFieldMappingEntry("text", "/document/mytext")); List outputs = Collections .singletonList(createOutputFieldMappingEntry("entities", "myEntities")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new EntityRecognitionSkill() .setName("myentity") .setDescription("Tested Entity Recognition skill") @@ -1085,20 +1087,20 @@ Skillset createSkillsetWithEntityRecognitionDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("entity-recognition-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithSplitDefaultSettings() { + SearchIndexerSkillset createSkillsetWithSplitDefaultSettings() { List inputs = Collections .singletonList(simpleInputFieldMappingEntry("text", "/document/mytext")); List outputs = Collections .singletonList(createOutputFieldMappingEntry("textItems", "myTextItems")); - List skills = Collections.singletonList( + List skills = Collections.singletonList( new SplitSkill() .setTextSplitMode(TextSplitMode.PAGES) .setName("mysplit") @@ -1108,13 +1110,13 @@ Skillset createSkillsetWithSplitDefaultSettings() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("split-skillset", 48)) .setDescription("Skillset for testing default configuration") .setSkills(skills); } - Skillset createSkillsetWithCustomSkills() { + SearchIndexerSkillset createSkillsetWithCustomSkills() { HashMap headers = new HashMap<>(); headers.put("Ocp-Apim-Subscription-Key", "foobar"); @@ -1124,7 +1126,7 @@ Skillset createSkillsetWithCustomSkills() { List outputs = Collections .singletonList(createOutputFieldMappingEntry("textItems", "myTextItems")); - Skill webApiSkill = new WebApiSkill() + SearchIndexerSkill webApiSkill = new WebApiSkill() .setUri("https://indexer-e2e-webskill.azurewebsites.net/api/InvokeTextAnalyticsV3?code=foo") .setHttpMethod("POST") .setHttpHeaders(headers) @@ -1133,17 +1135,17 @@ Skillset createSkillsetWithCustomSkills() { .setName("webapi-skill") .setDescription("Calls an Azure function, which in turn calls Bing Entity Search"); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("custom-skillset", 48)) .setDescription("Skillset for testing custom skillsets") .setSkills(Collections.singletonList(webApiSkill)); } - Skillset createSkillsetWithSharperSkillWithNestedInputs() { + SearchIndexerSkillset createSkillsetWithSharperSkillWithNestedInputs() { List inputs = this.createNestedInputFieldMappingEntry(); List outputs = this.createOutputFieldMappingEntry(); - List skills = new ArrayList<>(); + List skills = new ArrayList<>(); skills.add(new ShaperSkill() .setName("myshaper") .setDescription("Tested Shaper skill") @@ -1152,7 +1154,7 @@ Skillset createSkillsetWithSharperSkillWithNestedInputs() { .setOutputs(outputs) ); - return new Skillset() + return new SearchIndexerSkillset() .setName(testResourceNamer.randomName("nested-skillset-with-sharperskill", 48)) .setDescription("Skillset for testing") .setSkills(skills); @@ -1174,7 +1176,7 @@ private List createOutputFieldMappingEntry() { } - protected List getCreateOrUpdateSkills() { + protected List getCreateOrUpdateSkills() { return Collections.singletonList(new KeyPhraseExtractionSkill() .setDefaultLanguageCode(KeyPhraseExtractionSkillLanguage.EN) .setName("mykeyphrases") diff --git a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/models/GeoPointTests.java b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/models/GeoPointTests.java index 4d865f1f9470..9350d0cdd23a 100644 --- a/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/models/GeoPointTests.java +++ b/sdk/search/azure-search-documents/src/test/java/com/azure/search/documents/models/GeoPointTests.java @@ -71,24 +71,24 @@ public void canDeserializeGeoPoint() throws Exception { @Test public void canSerializeGeoPoint() { - Index index = new Index() + SearchIndex index = new SearchIndex() .setName("geopoints") .setFields(Arrays.asList( - new Field() + new SearchField() .setName("Id") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setKey(true) .setFilterable(true) .setSortable(true), - new Field() + new SearchField() .setName("Name") - .setType(DataType.EDM_STRING) + .setType(SearchFieldDataType.STRING) .setSearchable(true) .setFilterable(true) .setSortable(true), - new Field() + new SearchField() .setName("Location") - .setType(DataType.EDM_GEOGRAPHY_POINT) + .setType(SearchFieldDataType.GEOGRAPHY_POINT) .setFilterable(true) .setSortable(true) )); diff --git a/sdk/search/azure-search-documents/swagger/readme.md b/sdk/search/azure-search-documents/swagger/readme.md index 19b6eb127f0f..fa485fd38a43 100644 --- a/sdk/search/azure-search-documents/swagger/readme.md +++ b/sdk/search/azure-search-documents/swagger/readme.md @@ -64,7 +64,7 @@ input-file: - https://github.com/Azure/azure-rest-api-specs/blob/master/specification/search/data-plane/Azure.Search/preview/2019-05-06-preview/searchindex.json title: SearchIndexRestClient models-subpackage: implementation.models -custom-types: QueryType,AutocompleteResult,AutocompleteOptions,AutocompleteRequest,AutocompleteItem,IndexDocumentsResult,IndexingResult,SearchError,SearchErrorException,SearchResult,SearchRequest,SearchOptions,RequestOptions,IndexBatchBase,IndexAction,FacetResult,SuggestOptions,SuggestResult,SuggestRequest +custom-types: QueryType,AutocompleteResult,AutocompleteOptions,AutocompleteItem,IndexDocumentsResult,IndexingResult,SearchError,SearchErrorException,SearchResult,SearchOptions,RequestOptions,IndexBatchBase,IndexAction,FacetResult,SuggestOptions,SuggestResult custom-types-subpackage: models ``` @@ -272,12 +272,12 @@ directive: .replace(/(package com.azure.search.documents.models;)/g, "$1\nimport com.fasterxml.jackson.annotation.JsonIgnore;") .replace(/(public Document getDocument())/g, "@JsonIgnore\n$1") - # Add static Collection method to DataType - - from: DataType.java + # Add static Collection method to SearchFieldDataType + - from: SearchFieldDataType.java where: $ transform: >- return $ - .replace(/(public static final DataType EDM_COMPLEX_TYPE = fromString\("Edm.ComplexType"\);)/g, "$1\n\n /**\n * Returns a collection of a specific DataType\n * @param dataType the corresponding DataType\n * @return a Collection of the corresponding DataType\n */\n @JsonCreator\n public static DataType collection(DataType dataType) {\n return fromString(String.format(\"Collection(%s)\", dataType.toString()));\n }") + .replace(/(public static final SearchFieldDataType COMPLEX = fromString\("Edm.ComplexType"\);)/g, "$1\n\n /**\n * Returns a collection of a specific SearchFieldDataType\n * @param dataType the corresponding SearchFieldDataType\n * @return a Collection of the corresponding SearchFieldDataType\n */\n @JsonCreator\n public static SearchFieldDataType collection(SearchFieldDataType dataType) {\n return fromString(String.format(\"Collection(%s)\", dataType.toString()));\n }") # Workaround to fix bad host path parameters - from: @@ -407,6 +407,7 @@ directive: where: $ transform: >- return $ + .replace(/(import com\.azure\.search\.documents\.models\.QueryType\;)/g, "$1\nimport com.azure.search.documents.models.ScoringParameter;") .replace(/(private List\ scoringParameters\;)/g, "private List scoringParameters;") .replace(/(public List\ getScoringParameters\(\) \{)/g, "public List getScoringParameters() {") .replace(/(public SearchRequest setScoringParameters\(List\ scoringParameters\) \{)/g, "public SearchRequest setScoringParameters(List scoringParameters) {") @@ -445,7 +446,7 @@ directive: # Changed isRetrievable to isHidden - from: swagger-document - where: $.definitions.Field.properties + where: $.definitions.SearchField.properties transform: > $.hidden = $.retrievable; $.hidden = { @@ -453,12 +454,12 @@ directive: "description": "A value indicating whether the field will be returned in a search result. This property must be false for key fields, and must be null for complex fields. You can hide a field from search results if you want to use it only as a filter, for sorting, or for scoring. This property can also be changed on existing fields and enabling it does not cause an increase in index storage requirements." } - - from: Field.java + - from: SearchField.java where: $ transform: >- return $ .replace(/(import com\.azure\.core\.annotation\.Fluent\;)/g, "$1\nimport com.fasterxml.jackson.annotation.JsonIgnore;") - .replace(/(public Field setRetrievable\(Boolean retrievable\))/g, "private Field setRetrievable(Boolean retrievable)") + .replace(/(public SearchField setRetrievable\(Boolean retrievable\))/g, "private SearchField setRetrievable(Boolean retrievable)") .replace(/(public Boolean isRetrievable\(\))/g, "private Boolean isRetrievable()") .replace(/( return this\.hidden\;)/g, " return retrievable == null ? null : !retrievable;") .replace(/(this\.hidden \= hidden\;)/g, "$1\n retrievable = this.hidden == null ? null : !this.hidden;")